diff --git a/debian/msc-pygeoapi.cron.d b/debian/msc-pygeoapi.cron.d index ec82c3e1..d0d6c7dc 100644 --- a/debian/msc-pygeoapi.cron.d +++ b/debian/msc-pygeoapi.cron.d @@ -1,9 +1,9 @@ MAILTO="" # ================================================================= # -# Author: Tom Kralidis +# Author: Tom Kralidis # -# Copyright (c) 2021 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation diff --git a/debian/postinst b/debian/postinst index 60db6fd2..68e296dd 100644 --- a/debian/postinst +++ b/debian/postinst @@ -1,9 +1,9 @@ #!/bin/sh # ================================================================= # -# Author: Tom Kralidis +# Author: Tom Kralidis # -# Copyright (c) 2020 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation diff --git a/msc_pygeoapi/__init__.py b/msc_pygeoapi/__init__.py index f81c5a7e..ba33e72d 100644 --- a/msc_pygeoapi/__init__.py +++ b/msc_pygeoapi/__init__.py @@ -1,8 +1,8 @@ # ================================================================= # -# Author: Tom Kralidis +# Author: Tom Kralidis # -# Copyright (c) 2022 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation diff --git a/msc_pygeoapi/connector/base.py b/msc_pygeoapi/connector/base.py index fabd3a3d..15c3d716 100644 --- a/msc_pygeoapi/connector/base.py +++ b/msc_pygeoapi/connector/base.py @@ -3,7 +3,7 @@ # Author: Etienne # # Copyright (c) 2021 Etienne Pelletier -# Copyright (c) 2021 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -93,4 +93,4 @@ def delete(self, resources): raise NotImplementedError() def __repr__(self): - return ' {}'.format(self.name) + return f' {self.name}' diff --git a/msc_pygeoapi/env.py b/msc_pygeoapi/env.py index 741e6e12..00a5577e 100644 --- a/msc_pygeoapi/env.py +++ b/msc_pygeoapi/env.py @@ -1,8 +1,8 @@ # ================================================================= # -# Author: Tom Kralidis +# Author: Tom Kralidis # -# Copyright (c) 2021 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation diff --git a/msc_pygeoapi/event/file_.py b/msc_pygeoapi/event/file_.py index f483d694..aa3f1337 100644 --- a/msc_pygeoapi/event/file_.py +++ b/msc_pygeoapi/event/file_.py @@ -2,7 +2,7 @@ # # Author: Tom Kralidis # -# Copyright (c) 2021 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -48,10 +48,10 @@ def on_file(self, parent): from msc_pygeoapi.handler.core import CoreHandler filepath = parent.msg.local_file - parent.logger.debug('Filepath: {}'.format(filepath)) + parent.logger.debug(f'Filepath: {filepath}') handler = CoreHandler(filepath) result = handler.handle() - parent.logger.debug('Result: {}'.format(result)) + parent.logger.debug(f'Result: {result}') return True except Exception as err: parent.logger.warning(err) diff --git a/msc_pygeoapi/event/message.py b/msc_pygeoapi/event/message.py index 3ea8bf3a..67a4b214 100644 --- a/msc_pygeoapi/event/message.py +++ b/msc_pygeoapi/event/message.py @@ -2,7 +2,7 @@ # # Author: Tom Kralidis # -# Copyright (c) 2021 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -48,10 +48,10 @@ def on_message(self, parent): from msc_pygeoapi.handler.core import CoreHandler filepath = parent.msg.local_file - parent.logger.debug('Filepath: {}'.format(filepath)) + parent.logger.debug(f'Filepath: {filepath}') handler = CoreHandler(filepath) result = handler.handle() - parent.logger.debug('Result: {}'.format(result)) + parent.logger.debug(f'Result: {result}') return True except Exception as err: parent.logger.warning(err) diff --git a/msc_pygeoapi/handler/__init__.py b/msc_pygeoapi/handler/__init__.py index 10c560d4..901be014 100644 --- a/msc_pygeoapi/handler/__init__.py +++ b/msc_pygeoapi/handler/__init__.py @@ -1,8 +1,8 @@ # ================================================================= # -# Author: Tom Kralidis +# Author: Tom Kralidis # -# Copyright (c) 2020 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation diff --git a/msc_pygeoapi/handler/base.py b/msc_pygeoapi/handler/base.py index c6003136..645170ba 100644 --- a/msc_pygeoapi/handler/base.py +++ b/msc_pygeoapi/handler/base.py @@ -1,8 +1,8 @@ # ================================================================= # -# Author: Tom Kralidis +# Author: Tom Kralidis # -# Copyright (c) 2020 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -45,7 +45,7 @@ def __init__(self, filepath): """ self.filepath = filepath - LOGGER.debug('Filepath: {}'.format(self.filepath)) + LOGGER.debug(f'Filepath: {self.filepath}') def handle(self): """handle incoming file""" @@ -53,4 +53,4 @@ def handle(self): raise NotImplementedError() def __repr__(self): - return ' {}'.format(self.filepath) + return f' {self.filepath}' diff --git a/msc_pygeoapi/handler/core.py b/msc_pygeoapi/handler/core.py index 5660587e..6aeaeced 100644 --- a/msc_pygeoapi/handler/core.py +++ b/msc_pygeoapi/handler/core.py @@ -1,8 +1,8 @@ # ================================================================= # -# Author: Tom Kralidis +# Author: Tom Kralidis # -# Copyright (c) 2020 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -62,7 +62,7 @@ def handle(self): for key in PLUGINS['loader'].keys(): if PLUGINS['loader'][key]['filename_pattern'] in self.filepath: plugin_def = PLUGINS['loader'][key] - LOGGER.debug('Loading plugin {}'.format(plugin_def)) + LOGGER.debug(f'Loading plugin {plugin_def}') self.plugin = load_plugin('loader', plugin_def) if self.plugin is None: @@ -72,9 +72,9 @@ def handle(self): LOGGER.debug('Handling file') status = self.plugin.load_data(self.filepath) - LOGGER.debug('Status: {}'.format(status)) + LOGGER.debug(f'Status: {status}') return True def __repr__(self): - return ' {}'.format(self.filepath) + return f' {self.filepath}' diff --git a/msc_pygeoapi/loader/__init__.py b/msc_pygeoapi/loader/__init__.py index 21f3eda2..ef8a9fdf 100644 --- a/msc_pygeoapi/loader/__init__.py +++ b/msc_pygeoapi/loader/__init__.py @@ -1,9 +1,9 @@ # ================================================================= # -# Author: Tom Kralidis +# Author: Tom Kralidis # Felix Laframboise # -# Copyright (c) 2022 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # Copyright (c) 2021 Felix Laframboise # # Permission is hereby granted, free of charge, to any person @@ -78,11 +78,9 @@ def metadata(): data.add_command(getattr(mod, name)) except ImportError as err: command_name = name.replace('_', '-') - LOGGER.info( - 'msc-pygeoapi data {} command unavailable.'.format(command_name) - ) - module_name = '{}.{}'.format(module, name) - msg = 'Import error when loading {}: {}'.format(module_name, err) + LOGGER.info(f'msc-pygeoapi data {command_name} command unavailable') + module_name = f'{module}.{name}' + msg = f'Import error when loading {module_name}: {err}' LOGGER.debug(msg) diff --git a/msc_pygeoapi/loader/ahccd.py b/msc_pygeoapi/loader/ahccd.py index 27a9fc10..7ed397e1 100644 --- a/msc_pygeoapi/loader/ahccd.py +++ b/msc_pygeoapi/loader/ahccd.py @@ -2,11 +2,11 @@ # # Author: Alex Hurka # Author: Etienne Pelletier -# Author: Tom Kralidis +# Author: Tom Kralidis # # Copyright (c) 2019 Alex Hurka # Copyright (c) 2021 Etienne Pelletier -# Copyright (c) 2020 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -472,7 +472,7 @@ def generate_docs(self, fp, index): 'seasonal', 'trends', ]: - LOGGER.error('Unrecognized AHCCD data type {}'.format(index)) + LOGGER.error(f'Unrecognized AHCCD data type {index}') return try: @@ -494,22 +494,22 @@ def generate_docs(self, fp, index): record['properties']['identifier__identifiant'] = stn_id elif index == 'monthly': index_name = 'ahccd_monthly' - record['properties']['date'] = '{}-{}'.format( + record['properties']['date'] = '-'.join([ record['properties']['identifier__identifiant'].split('.')[ 1 ], record['properties']['identifier__identifiant'].split('.')[ 2 ], - ) + ]) del record['properties']['year__annee'] elif index == 'trends': index_name = 'ahccd_trends' - identifier = '{}.{}.{}'.format( + identifier = '.'.join([ record['properties']['station_id__id_station'], record['properties']['period__periode'], - record['properties']['measurement_type__type_mesure'], - ) + record['properties']['measurement_type__type_mesure'] + ]) record['properties']['identifier__identifiant'] = identifier action = { @@ -573,7 +573,7 @@ def add( with open(ctl, 'r') as f: ctl_dict = json.loads(f.read()) except Exception as err: - msg = 'Could not open JSON location file: {}'.format(err) + msg = f'Could not open JSON location file: {err}' click.ClickException(err) if dataset == 'all': @@ -587,16 +587,16 @@ def add( else: datasets_to_process = [dataset] - click.echo('Processing dataset(s): {}'.format(datasets_to_process)) + click.echo(f'Processing dataset(s): {datasets_to_process}') for dtp in datasets_to_process: try: - click.echo('Populating {} index'.format(dtp)) + click.echo(f'Populating {dtp} index') loader.create_index(dtp) dtp_data = loader.generate_docs(ctl_dict[dtp], dtp) loader.conn.submit_elastic_package(dtp_data, batch_size) except Exception as err: - msg = 'Could not populate {} index: {}'.format(dtp, err) + msg = f'Could not populate {dtp} index: {err}' raise click.ClickException(msg) diff --git a/msc_pygeoapi/loader/aqhi_realtime.py b/msc_pygeoapi/loader/aqhi_realtime.py index 5ddddc39..a3957cb5 100644 --- a/msc_pygeoapi/loader/aqhi_realtime.py +++ b/msc_pygeoapi/loader/aqhi_realtime.py @@ -8,7 +8,7 @@ # Copyright (c) 2020 Etienne Pelletier # Copyright (c) 2021 Felix Laframboise # Copyright (c) 2021 Louis-Philippe Rousseau-Lambert -# Copyright (c) 2022 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -156,7 +156,7 @@ SETTINGS = { 'order': 0, 'version': 1, - 'index_patterns': ['{}*'.format(INDEX_BASENAME)], + 'index_patterns': [f'{INDEX_BASENAME}*'], 'settings': {'number_of_shards': 1, 'number_of_replicas': 0}, 'mappings': None } @@ -185,7 +185,7 @@ def __init__(self, conn_config={}): for aqhi_type in template_mappings: template_name = INDEX_BASENAME.format(aqhi_type) - SETTINGS['index_patterns'] = ['{}*'.format(template_name)] + SETTINGS['index_patterns'] = [f'{template_name}*'] SETTINGS['mappings'] = MAPPINGS[aqhi_type] self.conn.create_template(template_name, SETTINGS) @@ -289,7 +289,7 @@ def update_latest_status(self): try: self.conn.update_by_query(query, index_) except Exception as err: - LOGGER.warning('{}: failed to update ES index'.format(err)) + LOGGER.warning(f'Failed to update ES index: {err}') return True @@ -304,7 +304,7 @@ def load_data(self, filepath): # set class variables from filename self.parse_filename() - LOGGER.debug('Received file {}'.format(self.filepath)) + LOGGER.debug(f'Received file {self.filepath}') # generate geojson features package = self.generate_geojson_features() @@ -360,7 +360,7 @@ def add(ctx, file_, directory, es, username, password, ignore_certs): @click.pass_context @cli_options.OPTION_DAYS( default=DAYS_TO_KEEP, - help='Delete indexes older than n days (default={})'.format(DAYS_TO_KEEP), + help=f'Delete indexes older than n days (default={DAYS_TO_KEEP})', ) @cli_options.OPTION_DATASET( help='AQHI dataset indexes to delete.', @@ -387,7 +387,7 @@ def clean_indexes(ctx, days, dataset, es, username, password, ignore_certs): if indexes: indexes_to_delete = check_es_indexes_to_delete(indexes, days) if indexes_to_delete: - click.echo('Deleting indexes {}'.format(indexes_to_delete)) + click.echo(f'Deleting indexes {indexes_to_delete}') conn.delete(','.join(indexes_to_delete)) click.echo('Done') @@ -416,14 +416,14 @@ def delete_indexes(ctx, dataset, es, username, password, ignore_certs, else: indexes = '{}*'.format(INDEX_BASENAME.format(dataset)) - click.echo('Deleting indexes {}'.format(indexes)) + click.echo(f'Deleting indexes {indexes}') conn.delete(indexes) if index_template: for type_ in ('forecasts', 'observations'): index_name = INDEX_BASENAME.format(type_) - click.echo('Deleting index template {}'.format(index_name)) + click.echo(f'Deleting index template {index_name}') conn.delete_template(index_name) click.echo('Done') diff --git a/msc_pygeoapi/loader/aqhi_stations.py b/msc_pygeoapi/loader/aqhi_stations.py index a7188c1b..d0a004b6 100644 --- a/msc_pygeoapi/loader/aqhi_stations.py +++ b/msc_pygeoapi/loader/aqhi_stations.py @@ -5,6 +5,7 @@ # # # Copyright (c) 2023 Louis-Philippe Rousseau-Lambert +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -51,8 +52,7 @@ INDEX_BASENAME = 'aqhi_stations' STATIONS_LIST_NAME = 'AQHI_XML_File_List.xml' -STATIONS_LIST_URL = 'https://dd.weather.gc.ca/air_quality/doc/{}' \ - .format(STATIONS_LIST_NAME) +STATIONS_LIST_URL = f'https://dd.weather.gc.ca/air_quality/doc/{STATIONS_LIST_NAME}' # noqa STATIONS_CACHE = os.path.join(MSC_PYGEOAPI_CACHEDIR, STATIONS_LIST_NAME) if not os.path.exists(MSC_PYGEOAPI_CACHEDIR): @@ -61,7 +61,7 @@ SETTINGS = { 'order': 0, 'version': 1, - 'index_patterns': ['{}'.format(INDEX_BASENAME)], + 'index_patterns': [INDEX_BASENAME], 'settings': { 'number_of_shards': 1, 'number_of_replicas': 0 @@ -243,7 +243,7 @@ def load_data(self): :returns: `bool` of status result """ - LOGGER.debug('Received file {}'.format(self.filepath)) + LOGGER.debug(f'Received file {self.filepath}') # generate geojson features package = self.generate_geojson_features() @@ -258,7 +258,7 @@ def download_stations(): :returns: void """ - LOGGER.debug('Caching {} to {}'.format(STATIONS_LIST_URL, STATIONS_CACHE)) + LOGGER.debug(f'Caching {STATIONS_LIST_URL} to {STATIONS_CACHE}') urllib.request.urlretrieve(STATIONS_LIST_URL, STATIONS_CACHE) @@ -307,7 +307,7 @@ def clean_index(ctx, es, username, password, ignore_certs): if indexes: indexes_to_delete = check_es_indexes_to_delete(indexes) if indexes_to_delete: - click.echo('Deleting indexes {}'.format(indexes_to_delete)) + click.echo(f'Deleting indexes {indexes_to_delete}') conn.delete(','.join(indexes_to_delete)) click.echo('Done') @@ -327,15 +327,15 @@ def delete_index(ctx, es, username, password, ignore_certs, conn_config = configure_es_connection(es, username, password, ignore_certs) conn = ElasticsearchConnector(conn_config) - indexes = '{}*'.format(INDEX_BASENAME) + indexes = f'{INDEX_BASENAME}*' - click.echo('Deleting indexes {}'.format(indexes)) + click.echo(f'Deleting indexes {indexes}') conn.delete(indexes) if index_template: index_name = INDEX_BASENAME - click.echo('Deleting index template {}'.format(index_name)) + click.echo(f'Deleting index template {index_name}') conn.delete_template(index_name) click.echo('Done') diff --git a/msc_pygeoapi/loader/base.py b/msc_pygeoapi/loader/base.py index d509ccd6..c3dceeb3 100644 --- a/msc_pygeoapi/loader/base.py +++ b/msc_pygeoapi/loader/base.py @@ -1,8 +1,8 @@ # ================================================================= # -# Author: Tom Kralidis +# Author: Tom Kralidis # -# Copyright (c) 2020 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation diff --git a/msc_pygeoapi/loader/bulletins_realtime.py b/msc_pygeoapi/loader/bulletins_realtime.py index 95312fba..d229106a 100644 --- a/msc_pygeoapi/loader/bulletins_realtime.py +++ b/msc_pygeoapi/loader/bulletins_realtime.py @@ -1,8 +1,8 @@ # ================================================================= # -# Author: Tom Kralidis +# Author: Tom Kralidis # -# Copyright (c) 2021 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -51,7 +51,7 @@ SETTINGS = { 'order': 0, 'version': 1, - 'index_patterns': ['{}*'.format(INDEX_BASENAME)], + 'index_patterns': [INDEX_BASENAME], 'settings': { 'number_of_shards': 1, 'number_of_replicas': 0 @@ -142,16 +142,16 @@ def load_data(self, filepath): b_dt = datetime.strptime(data['properties']['datetime'], '%Y-%m-%dT%H:%M') b_dt2 = b_dt.strftime('%Y-%m-%d') - es_index = '{}{}'.format(INDEX_BASENAME, b_dt2) + es_index = f'{INDEX_BASENAME}{b_dt2}' try: r = self.conn.Elasticsearch.index( index=es_index, id=data['id'], body=data ) - LOGGER.debug('Result: {}'.format(r)) + LOGGER.debug(f'Result: {r}') return True except Exception as err: - LOGGER.warning('Error indexing: {}'.format(err)) + LOGGER.warning(f'Error indexing: {err}') return False def bulletin2dict(self, filepath): @@ -172,7 +172,7 @@ def bulletin2dict(self, filepath): try: bulletin_path = filepath.split('/alphanumeric/')[1] except IndexError as err: - LOGGER.warning('no bulletin path: {}'.format(err)) + LOGGER.warning(f'no bulletin path: {err}') raise RuntimeError(err) identifier = bulletin_path.replace('/', '.') @@ -193,7 +193,7 @@ def bulletin2dict(self, filepath): min_ = filename.split('_')[2][-2:] - datetime_ = '{}-{}-{}T{}:{}'.format(yyyy, mm, dd, hh, min_) + datetime_ = f'{yyyy}-{mm}-{dd}T{hh}:{min_}' # TODO: use real coordinates @@ -203,7 +203,7 @@ def bulletin2dict(self, filepath): dict_['properties']['issuer_name'] = issuer_name dict_['properties']['issuer_country'] = issuer_country dict_['properties']['issuing_office'] = tokens[2][2:] - dict_['properties']['url'] = '{}/{}'.format(self.DD_URL, bulletin_path) + dict_['properties']['url'] = f'{self.DD_URL}/{bulletin_path}' return dict_ @@ -218,7 +218,7 @@ def bulletins_realtime(): @click.pass_context @cli_options.OPTION_DAYS( default=DAYS_TO_KEEP, - help='Delete indexes older than n days (default={})'.format(DAYS_TO_KEEP) + help=f'Delete indexes older than n days (default={DAYS_TO_KEEP})' ) @cli_options.OPTION_ELASTICSEARCH() @cli_options.OPTION_ES_USERNAME() @@ -233,12 +233,12 @@ def clean_indexes(ctx, days, es, username, password, ignore_certs): conn_config = configure_es_connection(es, username, password, ignore_certs) conn = ElasticsearchConnector(conn_config) - indexes = conn.get('{}*'.format(INDEX_BASENAME)) + indexes = conn.get(f'{INDEX_BASENAME}*') if indexes: indexes_to_delete = check_es_indexes_to_delete(indexes, days) if indexes_to_delete: - click.echo('Deleting indexes {}'.format(indexes_to_delete)) + click.echo(f'Deleting indexes {indexes_to_delete}') conn.delete(','.join(indexes_to_delete)) click.echo('Done') @@ -260,13 +260,13 @@ def delete_indexes(ctx, es, username, password, ignore_certs, index_template): conn_config = configure_es_connection(es, username, password, ignore_certs) conn = ElasticsearchConnector(conn_config) - all_indexes = '{}*'.format(INDEX_BASENAME) + all_indexes = f'{INDEX_BASENAME}*' - click.echo('Deleting indexes {}'.format(all_indexes)) + click.echo(f'Deleting indexes {all_indexes}') conn.delete(all_indexes) if index_template: - click.echo('Deleting index template {}'.format(INDEX_BASENAME)) + click.echo(f'Deleting index template {INDEX_BASENAME}') conn.delete_template(INDEX_BASENAME) click.echo('Done') diff --git a/msc_pygeoapi/loader/cap_alerts_realtime.py b/msc_pygeoapi/loader/cap_alerts_realtime.py index ac393e9a..57b3d7d1 100644 --- a/msc_pygeoapi/loader/cap_alerts_realtime.py +++ b/msc_pygeoapi/loader/cap_alerts_realtime.py @@ -4,6 +4,7 @@ # # # Copyright (c) 2020 Louis-Philippe Rousseau-Lambert +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -213,7 +214,7 @@ def load_data(self, filepath): index=INDEX_NAME, body=self.bulk_data ) - LOGGER.debug('Result: {}'.format(r)) + LOGGER.debug(f'Result: {r}') previous_alerts = self.delete_references_alerts() @@ -226,7 +227,7 @@ def load_data(self, filepath): return True except Exception as err: - LOGGER.warning('Error bulk indexing: {}'.format(err)) + LOGGER.warning(f'Error bulk indexing: {err}') return False def delete_references_alerts(self): @@ -274,14 +275,14 @@ def weather_warning2geojson(self, filepath): # we want to run a loop on every cap-xml in filepath and add them # in the geojson # we want to strat by the newest file in the directory - LOGGER.info('Processing {} CAP documents'.format(len(filepath))) + LOGGER.info(f'Processing {len(filepath)} CAP documents') - LOGGER.debug('Processing {}'.format(filepath)) + LOGGER.debug(f'Processing {filepath}') # with the lxml library we parse the xml file try: tree = etree.parse(filepath) except Exception as err: - LOGGER.warning('Cannot parse {}: {}'.format(filepath, err)) + LOGGER.warning(f'Cannot parse {filepath}: {err}') url = 'https://dd.weather.gc.ca/alerts/{}'.\ format(filepath.split('alerts')[1]) @@ -290,52 +291,45 @@ def weather_warning2geojson(self, filepath): b_xml = '{urn:oasis:names:tc:emergency:cap:1.2}' - identifier = _get_element(root, - '{}identifier'.format(b_xml)) + identifier = _get_element(root, f'{b_xml}identifier') - references = _get_element(root, - '{}references'.format(b_xml)) + references = _get_element(root, f'{b_xml}references') if references: for ref in references.split(' '): self.references_arr.append(ref.split(',')[1]) - for grandchild in root.iter('{}info'.format(b_xml)): + for grandchild in root.iter(f'{b_xml}info'): expires = _get_date_format(_get_element(grandchild, - '{}expires'.format(b_xml)))\ + f'{b_xml}expires'))\ .strftime(timeformat) status_alert = _get_element(grandchild, - '{}parameter[last()-4]/' - '{}value'.format(b_xml, - b_xml)) + f'{b_xml}parameter[last()-4]/' + f'{b_xml}value') if _get_date_format(expires) > now: - language = _get_element(grandchild, - '{}language'.format(b_xml)) + language = _get_element(grandchild, f'{b_xml}language') if language == 'fr-CA': - headline = _get_element(grandchild, - '{}headline'.format(b_xml)) + headline = _get_element(grandchild, f'{b_xml}headline') - description_fr = '{}description'.format(b_xml) + description_fr = f'{b_xml}description' descript = _get_element(grandchild, description_fr) descript = descript.replace("\n", " ").strip() - for i in grandchild.iter('{}area'.format(b_xml)): - tag = _get_element(i, - '{}polygon'.format(b_xml)) - name = _get_element(i, - '{}areaDesc'.format(b_xml)) + for i in grandchild.iter(f'{b_xml}area'): + tag = _get_element(i, f'{b_xml}polygon') + name = _get_element(i, f'{b_xml}areaDesc') - for j in grandchild.iter('{}geocode'.format(b_xml)): - str_value_name = '{}valueName'.format(b_xml) + for j in grandchild.iter(f'{b_xml}geocode'): + str_value_name = f'{b_xml}valueName' valueName = _get_element(j, str_value_name) if valueName == 'layer:EC-MSC-SMC:1.0:CLC': - geocode_value = '{}value'.format(b_xml) + geocode_value = f'{b_xml}value' geocode = _get_element(j, geocode_value) - id_warning = '{}_{}'.format(identifier, geocode) + id_warning = f'{identifier}_{geocode}' if id_warning not in french_alert: french_alert[id_warning] = (id_warning, @@ -343,40 +337,34 @@ def weather_warning2geojson(self, filepath): headline, descript) else: - headline = _get_element(grandchild, - '{}headline'.format(b_xml)) + headline = _get_element(grandchild, f'{b_xml}headline') - description = '{}description'.format(b_xml) + description = f'{b_xml}description' descript = _get_element(grandchild, description) descript = descript.replace("\n", " ").strip() effective_date =\ - _get_element(grandchild, - '{}effective'.format(b_xml)) + _get_element(grandchild, f'{b_xml}effective') effective = _get_date_format(effective_date) effective = effective.strftime(timeformat) warning = _get_element(grandchild, - '{}parameter[1]/' - '{}value'.format(b_xml, - b_xml)) + f'{b_xml}parameter[1]/{b_xml}value') # There can be many cobvered by one # so we have to loop through the info - for i in grandchild.iter('{}area'.format(b_xml)): - tag = _get_element(i, '{}polygon'.format(b_xml)) - name = _get_element(i, '{}areaDesc'.format(b_xml)) + for i in grandchild.iter(f'{b_xml}area'): + tag = _get_element(i, f'{b_xml}polygon') + name = _get_element(i, f'{b_xml}areaDesc') - for j in grandchild.iter('{}geocode'.format(b_xml)): - valueName = \ - _get_element(j, '{}valueName'.format(b_xml)) + for j in grandchild.iter(f'{b_xml}geocode'): + valueName = _get_element(j, f'{b_xml}valueName') if valueName == 'layer:EC-MSC-SMC:1.0:CLC': - geocode = \ - _get_element(j, '{}value'.format(b_xml)) + geocode = _get_element(j, f'{b_xml}value') split_tag = re.split(' |,', tag) - id_warning = '{}_{}'.format(identifier, geocode) + id_warning = f'{identifier}_{geocode}' if id_warning not in english_alert: english_alert[id_warning] = (split_tag, @@ -515,8 +503,7 @@ def clean_records(ctx, days, es, username, password, ignore_certs): older_than = (datetime.now() - timedelta(days=days)).strftime( '%Y-%m-%dT%H:%M:%SZ') - click.echo('Deleting documents older than {} ({} days)'.format( - older_than, days)) + click.echo(f'Deleting documents older than {older_than} ({days} days)') query = { 'query': { diff --git a/msc_pygeoapi/loader/citypageweather_realtime.py b/msc_pygeoapi/loader/citypageweather_realtime.py index 8f957270..02273699 100644 --- a/msc_pygeoapi/loader/citypageweather_realtime.py +++ b/msc_pygeoapi/loader/citypageweather_realtime.py @@ -4,6 +4,7 @@ # # # Copyright (c) 2020 Louis-Philippe Rousseau-Lambert +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -265,10 +266,10 @@ def load_data(self, filepath): id=data['properties']['identifier'], body=data ) - LOGGER.debug('Result: {}'.format(r)) + LOGGER.debug(f'Result: {r}') return True except Exception as err: - LOGGER.warning('Error indexing: {}'.format(err)) + LOGGER.warning(f'Error indexing: {err}') return False def _get_element(self, node, path, attrib=None): @@ -320,21 +321,20 @@ def xml2json_cpw(self, wxo_lookup, xml): feature = {} row = {} - LOGGER.debug('Processing XML: {}'.format(xml)) + LOGGER.debug(f'Processing XML: {xml}') LOGGER.debug('Fetching English elements') try: root = etree.parse(xml).getroot() except Exception as err: - LOGGER.error('ERROR: cannot process data: {}'.format(err)) + LOGGER.error(f'ERROR: cannot process data: {err}') if root.findall("currentConditions/"): sitecode = os.path.basename(xml)[:-6] try: citycode = wxo_lookup[sitecode]['citycode'] except KeyError as err: - LOGGER.error('ERROR: cannot find sitecode {} : ' - 'err: {}'.format(sitecode, err)) + LOGGER.error(f'ERROR: cannot find sitecode {sitecode}: {err}') location_name = root.find('location/name') x = float(location_name.attrib.get('lon')[:-1]) @@ -349,8 +349,7 @@ def xml2json_cpw(self, wxo_lookup, xml): icon = self._get_element(root, 'currentConditions/iconCode') if icon: - row['icon'] = 'https://weather.gc.ca/' \ - 'weathericons/{}.gif'.format(icon) + row['icon'] = f'https://weather.gc.ca/weathericons/{icon}.gif' else: row['icon'] = None @@ -392,8 +391,7 @@ def xml2json_cpw(self, wxo_lookup, xml): root, 'currentConditions/pressure', 'tendency') - row['url_en'] = 'https://weather.gc.ca/city/pages/' \ - '{}_metric_e.html'.format(citycode) + row['url_en'] = f'https://weather.gc.ca/city/pages/{citycode}_metric_e.html' # noqa row['national'] = 0 if row['name'] in NATIONAL_CITIES: @@ -431,7 +429,7 @@ def xml2json_cpw(self, wxo_lookup, xml): } elif xml.endswith('f.xml'): - LOGGER.debug('Processing {}'.format(xml)) + LOGGER.debug(f'Processing {xml}') row['nom'] = self._get_element(root, 'location/name') row['station_fr'] = self._get_element( @@ -444,8 +442,7 @@ def xml2json_cpw(self, wxo_lookup, xml): root, 'currentConditions/pressure', 'tendency') - row['url_fr'] = 'https://meteo.gc.ca/city/pages/' \ - '{}_metric_f.html'.format(citycode) + row['url_fr'] = f'https://meteo.gc.ca/city/pages/{citycode}_metric_f.html' # noqa row['national'] = 0 if row['nom'] in NATIONAL_CITIES: @@ -512,8 +509,7 @@ def clean_records(ctx, days, es, username, password, ignore_certs): older_than = (datetime.now() - timedelta(days=days)).strftime( '%Y-%m-%d %H:%M') - click.echo('Deleting documents older than {} ({} days)'.format( - older_than, days)) + click.echo(f'Deleting documents older than {older_than} ({days} days)') query = { 'query': { diff --git a/msc_pygeoapi/loader/climate_archive.py b/msc_pygeoapi/loader/climate_archive.py index fe36f19a..0c702540 100644 --- a/msc_pygeoapi/loader/climate_archive.py +++ b/msc_pygeoapi/loader/climate_archive.py @@ -2,11 +2,11 @@ # # Author: Alex Hurka # Author: Etienne Pelletier -# Author: Tom Kralidis +# Author: Tom Kralidis # # Copyright (c) 2020 Etienne Pelletier # Copyright (c) 2019 Alex Hurka -# Copyright (c) 2020 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -697,11 +697,11 @@ def generate_normals(self, stn_dict, normals_dict, periods_dict): if insert_dict[key] is not None else insert_dict[key] ) - insert_dict['ID'] = '{}.{}.{}'.format( + insert_dict['ID'] = '.'.join([ insert_dict['STN_ID'], insert_dict['NORMAL_ID'], - insert_dict['MONTH'], - ) + insert_dict['MONTH'] + ]) if insert_dict['STN_ID'] in stn_dict: coords = stn_dict[insert_dict['STN_ID']]['coordinates'] insert_dict['STATION_NAME'] = stn_dict[insert_dict['STN_ID']][ @@ -795,11 +795,11 @@ def generate_monthly_data(self, stn_dict, date=None): else insert_dict['LAST_UPDATED'] ) - insert_dict['ID'] = '{}.{}.{}'.format( + insert_dict['ID'] = '.'.join([ insert_dict['STN_ID'], insert_dict['LOCAL_YEAR'], - insert_dict['LOCAL_MONTH'], - ) + insert_dict['LOCAL_MONTH'] + ]) if insert_dict['STN_ID'] in stn_dict: coords = stn_dict[insert_dict['STN_ID']]['coordinates'] insert_dict['PROVINCE_CODE'] = stn_dict[insert_dict['STN_ID']][ @@ -877,12 +877,12 @@ def generate_daily_data(self, stn_dict, date=None): else insert_dict['LOCAL_DATE'] ) - insert_dict['ID'] = '{}.{}.{}.{}'.format( + insert_dict['ID'] = '.'.join([ insert_dict['CLIMATE_IDENTIFIER'], insert_dict['LOCAL_YEAR'], insert_dict['LOCAL_MONTH'], - insert_dict['LOCAL_DAY'], - ) + insert_dict['LOCAL_DAY'] + ]) if insert_dict['STN_ID'] in stn_dict: coords = stn_dict[insert_dict['STN_ID']]['coordinates'] insert_dict['PROVINCE_CODE'] = stn_dict[ @@ -963,13 +963,13 @@ def generate_hourly_data(self, stn_dict, date=None): else insert_dict['LOCAL_DATE'] ) - insert_dict['ID'] = '{}.{}.{}.{}.{}'.format( + insert_dict['ID'] = '.'.join([ insert_dict['CLIMATE_IDENTIFIER'], insert_dict['LOCAL_YEAR'], insert_dict['LOCAL_MONTH'], insert_dict['LOCAL_DAY'], - insert_dict['LOCAL_HOUR'], - ) + insert_dict['LOCAL_HOUR'] + ]) if insert_dict['STN_ID'] in stn_dict: coords = stn_dict[insert_dict['STN_ID']]['coordinates'] insert_dict['PROVINCE_CODE'] = stn_dict[ @@ -1182,7 +1182,7 @@ def add( else: datasets_to_process = [dataset] - click.echo('Processing dataset(s): {}'.format(datasets_to_process)) + click.echo(f'Processing dataset(s): {datasets_to_process}') if 'stations' in datasets_to_process: try: @@ -1191,7 +1191,7 @@ def add( stations = loader.generate_stations() loader.conn.submit_elastic_package(stations, batch_size) except Exception as err: - msg = 'Could not populate stations index: {}'.format(err) + msg = f'Could not populate stations index: {err}' raise click.ClickException(msg) if 'normals' in datasets_to_process: @@ -1206,7 +1206,7 @@ def add( ) loader.conn.submit_elastic_package(normals, batch_size) except Exception as err: - msg = 'Could not populate normals index: {}'.format(err) + msg = f'Could not populate normals index: {err}' raise click.ClickException(msg) if 'monthly' in datasets_to_process: @@ -1218,7 +1218,7 @@ def add( monthlies = loader.generate_monthly_data(stn_dict, date) loader.conn.submit_elastic_package(monthlies, batch_size) except Exception as err: - msg = 'Could not populate montly index: {}'.format(err) + msg = f'Could not populate montly index: {err}' raise click.ClickException(msg) if 'daily' in datasets_to_process: @@ -1230,7 +1230,7 @@ def add( dailies = loader.generate_daily_data(stn_dict, date) loader.conn.submit_elastic_package(dailies, batch_size) except Exception as err: - msg = 'Could not populate daily index: {}'.format(err) + msg = f'Could not populate daily index: {err}' raise click.ClickException(msg) if 'hourly' in datasets_to_process: @@ -1242,7 +1242,7 @@ def add( hourlies = loader.generate_hourly_data(stn_dict, date) loader.conn.submit_elastic_package(hourlies, batch_size) except Exception as err: - msg = 'Could not populate hourly index: {}'.format(err) + msg = f'Could not populate hourly index: {err}' raise click.ClickException(msg) loader.db_conn.close() diff --git a/msc_pygeoapi/loader/cumulative_effects_hs.py b/msc_pygeoapi/loader/cumulative_effects_hs.py index 6b275a9f..6d9880ac 100644 --- a/msc_pygeoapi/loader/cumulative_effects_hs.py +++ b/msc_pygeoapi/loader/cumulative_effects_hs.py @@ -1,9 +1,9 @@ # ================================================================= # -# Author: Philippe Theroux -# +# Author: Philippe Theroux # # Copyright (c) 2022 Philippe Theroux +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -79,7 +79,7 @@ SETTINGS = { 'order': 0, 'version': 1, - 'index_patterns': ['{}*'.format(INDEX_BASENAME)], + 'index_patterns': [INDEX_BASENAME], 'settings': {'number_of_shards': 1, 'number_of_replicas': 0}, 'mappings': None } @@ -119,9 +119,7 @@ def generate_geojson_features(self): ) # set ES index name for feature - es_index = '{}{}'.format( - INDEX_BASENAME, self.datetime.strftime('%Y') - ) + es_index = f"{INDEX_BASENAME}{self.datetime.strftime('%Y')}" feature['id'] = feature['properties']['identifier'] # add properties @@ -148,16 +146,16 @@ def load_data(self, filepath): self.filepath = Path(filepath) - LOGGER.debug('Received file {}'.format(self.filepath)) + LOGGER.debug(f'Received file {self.filepath}') # generate geojson features package = self.generate_geojson_features() try: r = self.conn.submit_elastic_package(package, request_size=80000) - LOGGER.debug('Result: {}'.format(r)) + LOGGER.debug(f'Result: {r}') return True except Exception as err: - LOGGER.warning('Error indexing: {}'.format(err)) + LOGGER.warning(f'Error indexing: {err}') return False @@ -205,7 +203,7 @@ def add(ctx, file_, directory, es, username, password, ignore_certs): @click.pass_context @cli_options.OPTION_DAYS( default=DAYS_TO_KEEP, - help='Delete indexes older than n days (default={})'.format(DAYS_TO_KEEP) + help=f'Delete indexes older than n days (default={DAYS_TO_KEEP})' ) @cli_options.OPTION_ELASTICSEARCH() @cli_options.OPTION_ES_USERNAME() @@ -220,12 +218,12 @@ def clean_indexes(ctx, days, es, username, password, ignore_certs): conn_config = configure_es_connection(es, username, password, ignore_certs) conn = ElasticsearchConnector(conn_config) - indexes = conn.get('{}*'.format(INDEX_BASENAME)) + indexes = conn.get(f'{INDEX_BASENAME}*') if indexes: indexes_to_delete = check_es_indexes_to_delete(indexes, days) if indexes_to_delete: - click.echo('Deleting indexes {}'.format(indexes_to_delete)) + click.echo(f'Deleting indexes {indexes_to_delete}') conn.delete(','.join(indexes_to_delete)) click.echo('Done') @@ -247,13 +245,13 @@ def delete_index(ctx, es, username, password, ignore_certs, index_template): conn_config = configure_es_connection(es, username, password, ignore_certs) conn = ElasticsearchConnector(conn_config) - all_indexes = '{}*'.format(INDEX_BASENAME) + all_indexes = f'{INDEX_BASENAME}*' - click.echo('Deleting indexes {}'.format(all_indexes)) + click.echo(f'Deleting indexes {all_indexes}') conn.delete(all_indexes) if index_template: - click.echo('Deleting index template {}'.format(INDEX_BASENAME)) + click.echo(f'Deleting index template {INDEX_BASENAME}') conn.delete_template(INDEX_BASENAME) click.echo('Done') diff --git a/msc_pygeoapi/loader/discovery_metadata.py b/msc_pygeoapi/loader/discovery_metadata.py index fea9206f..870590f8 100644 --- a/msc_pygeoapi/loader/discovery_metadata.py +++ b/msc_pygeoapi/loader/discovery_metadata.py @@ -2,7 +2,7 @@ # # Author: Tom Kralidis # -# Copyright (c) 2022 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -71,7 +71,7 @@ def __init__(self, conn_config={}): self.items = [] if not self.conn.exists(INDEX_NAME): - LOGGER.debug('Creating index {}'.format(INDEX_NAME)) + LOGGER.debug(f'Creating index {INDEX_NAME}') self.conn.create(INDEX_NAME, SETTINGS) def load_data(self, json_dict): @@ -85,7 +85,7 @@ def load_data(self, json_dict): identifier = json_dict['id'] - LOGGER.debug('Adding record {} to index {}'.format(identifier, INDEX_NAME)) # noqa + LOGGER.debug(f'Adding record {identifier} to index {INDEX_NAME}') package = { '_id': identifier, @@ -108,12 +108,12 @@ def generate_metadata(self, filepath): :returns: `dict` of discovery metadata """ - LOGGER.info('Processing MCF: {}'.format(filepath)) + LOGGER.info(f'Processing MCF: {filepath}') try: m = read_mcf(filepath) except Exception as err: - msg = 'ERROR: cannot read MCF: {}'.format(err) + msg = f'ERROR: cannot read MCF: {err}' LOGGER.error(msg) raise @@ -122,7 +122,7 @@ def generate_metadata(self, filepath): try: metadata = output_schema().write(m, stringify=False) except Exception as err: - msg = 'ERROR: cannot generate metadata: {}'.format(err) + msg = f'ERROR: cannot generate metadata: {err}' LOGGER.error(msg) raise @@ -156,16 +156,16 @@ def add(ctx, directory, es, username, password, ignore_certs): passed = 0 failed = 0 - click.echo('Processing discovery metadata in {}'.format(directory)) - for root, dirs, files in os.walk('{}/mcf'.format(directory)): + click.echo(f'Processing discovery metadata in {directory}') + for root, dirs, files in os.walk(f'{directory}/mcf'): for name in files: total += 1 if any(['shared' in root, 'template' in name, not name.endswith('yml')]): continue - mcf_file = ('{}/{}'.format(root, name)) + mcf_file = f'{root}/{name}' try: - click.echo('Processing MCF file {}'.format(mcf_file)) + click.echo(f'Processing MCF file {mcf_file}') metadata = loader.generate_metadata(mcf_file) _ = loader.load_data(metadata) passed += 1 @@ -174,9 +174,9 @@ def add(ctx, directory, es, username, password, ignore_certs): continue click.echo('Results') - click.echo('Total: {}'.format(total)) - click.echo('Passed: {}'.format(passed)) - click.echo('Failed: {}'.format(failed)) + click.echo(f'Total: {total}') + click.echo(f'Passed: {passed}') + click.echo(f'Failed: {failed}') @click.command() @@ -195,7 +195,7 @@ def delete_index(ctx, es, username, password, ignore_certs): conn = ElasticsearchConnector(conn_config) if conn.exists(INDEX_NAME): - click.echo('Deleting index {}'.format(INDEX_NAME)) + click.echo(f'Deleting index {INDEX_NAME}') conn.delete(INDEX_NAME) click.echo('Done') diff --git a/msc_pygeoapi/loader/forecast_polygons.py b/msc_pygeoapi/loader/forecast_polygons.py index 343185db..d3e6741c 100644 --- a/msc_pygeoapi/loader/forecast_polygons.py +++ b/msc_pygeoapi/loader/forecast_polygons.py @@ -3,7 +3,7 @@ # Author: Etienne Pelletier # # Copyright (c) 2020 Etienne Pelletier -# Copyright (c) 2022 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -348,7 +348,7 @@ def generate_geojson_features(self, shapefile_name): """ filepath = str((self.filepath / self.filepath.stem / shapefile_name).resolve()) - data = ogr.Open(r'/vsizip/{}'.format(filepath)) + data = ogr.Open(rf'/vsizip/{filepath}') lyr = data.GetLayer() for feature in lyr: @@ -361,7 +361,7 @@ def generate_geojson_features(self, shapefile_name): self.items.append(feature_json) action = { - '_id': '{}'.format(_id), + '_id': _id, '_index': INDEX_NAME.format(self.zone.lower(), shapefile_name.split('_')[2] ), @@ -382,7 +382,7 @@ def load_data(self, filepath): # set class variables from filename self.parse_filename() - LOGGER.debug('Received file {}'.format(self.filepath)) + LOGGER.debug(f'Received file {self.filepath}') for shapefile in SHAPEFILES_TO_LOAD[self.filepath.stem]: # generate geojson features @@ -453,7 +453,7 @@ def delete_indexes(ctx, index_name, es, username, password, ignore_certs): if click.confirm( 'Are you sure you want to delete ES index named: {}?'.format( click.style(index_name, fg='red')), abort=True): - LOGGER.info('Deleting ES index {}'.format(index_name)) + LOGGER.info(f'Deleting ES index {index_name}') conn.delete(index_name) return True else: diff --git a/msc_pygeoapi/loader/hurricanes_realtime.py b/msc_pygeoapi/loader/hurricanes_realtime.py index de4d8aa5..ebc15ab4 100644 --- a/msc_pygeoapi/loader/hurricanes_realtime.py +++ b/msc_pygeoapi/loader/hurricanes_realtime.py @@ -3,7 +3,7 @@ # Author: Etienne Pelletier # # Copyright (c) 2020 Etienne Pelletier -# Copyright (c) 2022 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -424,11 +424,11 @@ def generate_geojson_features(self): self.items.append(feature_json) action = { - '_id': '{}-{}-{}-{}-{}'.format(self.storm_name, - self.storm_variable, - file_datetime_str, - self.fh, - feature_json['id']), + '_id': '-'.join([self.storm_name, + self.storm_variable, + file_datetime_str, + self.fh, + feature_json['id']]), '_index': INDEX_NAME.format(self.storm_variable), '_op_type': 'update', 'doc': feature_json, @@ -448,7 +448,7 @@ def load_data(self, filepath): # set class variables from filename self.parse_filename() - LOGGER.debug('Received file {}'.format(self.filepath)) + LOGGER.debug(f'Received file {self.filepath}') # check for shapefile dependencies if self.check_shapefile_deps(): @@ -525,11 +525,11 @@ def deactivate(ctx, days, es, username, password, ignore_certs): for index in INDICES: query = { - "script": "ctx._source.properties.active=false", - "query": { - "range": { - "properties.filedate": { - "lte": "now-{}d".format(days) + 'script': 'ctx._source.properties.active=false', + 'query': { + 'range': { + 'properties.filedate': { + 'lte': f'now-{days}d' } } } diff --git a/msc_pygeoapi/loader/hydat.py b/msc_pygeoapi/loader/hydat.py index 7dca9df7..0301254f 100644 --- a/msc_pygeoapi/loader/hydat.py +++ b/msc_pygeoapi/loader/hydat.py @@ -2,11 +2,11 @@ # # Author: Alex Hurka # Author: Etienne Pelletier -# Author: Tom Kralidis +# Author: Tom Kralidis # # Copyright (c) 2019 Alex Hurka # Copyright (c) 2020 Etienne Pelletier -# Copyright (c) 2021 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -70,7 +70,7 @@ def __init__(self, db_string, conn_config={}): super().__init__() self.conn = ElasticsearchConnector(conn_config) - self.db_string = 'sqlite:///{}'.format(db_string) + self.db_string = f'sqlite:///{db_string}' self.engine, self.session, self.metadata = self.connect_db() @@ -85,7 +85,7 @@ def zero_pad(self, val): the string representation of the value otherwise. """ if len(str(val)) == 1: - return '0{}'.format(val) + return f'0{val}' else: return str(val) @@ -403,7 +403,7 @@ def connect_db(self): """ try: - LOGGER.info('Connecting to database {}.'.format(self.db_string)) + LOGGER.info(f'Connecting to database {self.db_string}') LOGGER.info('Creating engine...') engine = create_engine(self.db_string) LOGGER.info('Success. Database engine created.') @@ -476,11 +476,11 @@ def generate_obs(self, station, var, symbol_table, discharge=True): word_out: '', 'IDENTIFIER': '', } - date = '{}-{}-{}'.format( + date_ = '-'.join([ str(row[1]), self.zero_pad(row[2]), self.zero_pad(i) - ) - insert_dict['DATE'] = date - insert_dict['IDENTIFIER'] = '{}.{}'.format(row[0], date) + ]) + insert_dict['DATE'] = date_ + insert_dict['IDENTIFIER'] = f'{row[0]}.{date_}' value = row[keys.index(word_in.upper() + str(i))] symbol = row[keys.index(word_in.upper() + '_SYMBOL' + str(i))] if symbol is not None and symbol.strip(): @@ -509,9 +509,9 @@ def generate_obs(self, station, var, symbol_table, discharge=True): ) mean_dict = {} - date = '{}-{}'.format(str(row[1]), self.zero_pad(row[2])) - mean_dict['DATE'] = date - mean_dict['IDENTIFIER'] = '{}.{}'.format(row[0], date) + date_ = '-'.join([str(row[1]), self.zero_pad(row[2])]) + mean_dict['DATE'] = date_ + mean_dict['IDENTIFIER'] = f'{row[0]}.{date_}' if row[keys.index('MONTHLY_MEAN')]: mean_dict['MONTHLY_MEAN_' + word_out] = float( @@ -561,11 +561,7 @@ def generate_means( set(discharge_station_codes).union(level_station_codes) ) for station in station_codes: - LOGGER.debug( - 'Generating discharge and level values for station {}'.format( - station - ) - ) + LOGGER.debug(f'Generating discharge and level values for station {station}') # noqa discharge_lst, discharge_means = self.generate_obs( station, discharge_var, symbol_table, True ) @@ -703,11 +699,7 @@ def generate_stations(self, station_table, annual_peaks_table, agency_fr = agency_metadata[agency_keys.index('AGENCY_FR')] else: agency_en = agency_fr = '' - LOGGER.warning( - 'Could not find agency information for station {}'.format( - station - ) - ) + LOGGER.warning(f'Could not find agency information for station {station}') # noqa if datum_id is not None: datum_args = {'DATUM_ID': datum_id} datum_table = self.get_table_var('DATUM_LIST') @@ -720,11 +712,7 @@ def generate_stations(self, station_table, annual_peaks_table, datum_en = datum_metadata[datum_keys.index('DATUM_EN')] else: datum_en = '' - LOGGER.warning( - 'Could not find datum information for station {}'.format( - station - ) - ) + LOGGER.warning(f'Could not find datum information for station {station}') # noqa if station_status is not None: status_args = {'STATUS_CODE': station_status} status_table = self.get_table_var('STN_STATUS_CODES') @@ -738,11 +726,7 @@ def generate_stations(self, station_table, annual_peaks_table, status_fr = status_metadata[status_keys.index('STATUS_FR')] else: status_en = status_fr = '' - LOGGER.warning( - 'Could not find status information for station {}'.format( - station - ) - ) + LOGGER.warning(f'Could not find status information for station {station}') # noqa station_number_args = {'STATION_NUMBER': station} @@ -786,9 +770,7 @@ def generate_stations(self, station_table, annual_peaks_table, { 'type': 'text/html', 'rel': 'alternate', - 'title': 'Station Information for {} ({})'.format( - station_name, station - ), + 'title': f'Station Information for {station_name} ({station})', # noqa 'href': f'https://wateroffice.ec.gc.ca/report/historical_e.html?stn={station}', # noqa 'hreflang': 'en-CA', }, @@ -907,18 +889,18 @@ def generate_annual_stats( f'Could not find min date for station {station_number}' ) else: - min_date = '{}-{}-{}'.format( + min_date = '-'.join([ year, self.zero_pad(min_month), self.zero_pad(min_day) - ) + ]) if max_month is None or max_day is None: max_date = None LOGGER.warning( f'Could not find max date for station {station_number}' ) else: - max_date = '{}-{}-{}'.format( + max_date = '-'.join([ year, self.zero_pad(max_month), self.zero_pad(max_day) - ) + ]) symbol_keys = symbol_table.columns.keys() if min_symbol is not None and min_symbol.strip(): args = {'SYMBOL_ID': min_symbol} @@ -945,15 +927,18 @@ def generate_annual_stats( f'Could not find max symbol for station {station_number}' ) if data_type_en == 'Water Level': - es_id = '{}.{}.level-niveaux'.format(station_number, year) + level_name_fr = 'level-niveaux' elif data_type_en == 'Discharge': - es_id = '{}.{}.discharge-debit'.format(station_number, year) + level_name_fr = 'discharge-debit' elif data_type_en == 'Sediment in mg/L': - es_id = '{}.{}.sediment-sediment'.format(station_number, year) + level_name_fr = 'sediment-sediment' elif data_type_en == 'Daily Mean Tonnes': - es_id = '{}.{}.tonnes-tonnes'.format(station_number, year) + level_name_fr = 'tonnes-tonnes' else: - es_id = '{}.{}.None'.format(station_number, year) + level_name_fr = 'None' + + es_id = f'{station_number}.{year}.{level_name_fr}' + insert_dict = { 'type': 'Feature', 'properties': { @@ -1046,22 +1031,19 @@ def generate_annual_peaks( peak_value = result[annual_peaks_keys.index('PEAK')] symbol_id = result[annual_peaks_keys.index('SYMBOL')] if month is None or day is None: - date = None + date_ = None LOGGER.warning( f'Could not find date for station {station_number}' ) elif hour is None or minute is None: - date = '{}-{}-{}'.format( + date_ = '-'.join([ year, self.zero_pad(month), self.zero_pad(day) - ) + ]) else: - date = '{}-{}-{}T{}:{}'.format( - year, - self.zero_pad(month), - self.zero_pad(day), - self.zero_pad(hour), - self.zero_pad(minute), - ) + ymd = '-'.join([year, self.zero_pad(month), self.zero_pad(day)]) # noqa + hm = ':'.join([self.zero_pad(hour), self.zero_pad(minute)]) + date_ = f'{ymd}T{hm}' + args = {'STATION_NUMBER': station_number} try: station_metadata = list( @@ -1110,11 +1092,7 @@ def generate_annual_peaks( unit_fr = unit_data[unit_keys.index('PRECISION_FR')] else: unit_en = unit_fr = None - LOGGER.warning( - 'Could not find units for station {}'.format( - station_number - ) - ) + LOGGER.warning(f'Could not find units for station {station_number}') # noqa if peak_id: peak_codes = self.get_table_var('PEAK_CODES') peak_keys = peak_codes.columns.keys() @@ -1150,23 +1128,18 @@ def generate_annual_peaks( peak = None if data_type_en == 'Water Level': - es_id = '{}.{}.level-niveaux.{}'.format( - station_number, year, peak - ) + level_name_fr = 'level-niveaux' elif data_type_en == 'Discharge': - es_id = '{}.{}.discharge-debit.{}'.format( - station_number, year, peak - ) + level_name_fr = 'discharge-debit' elif data_type_en == 'Sediment in mg/L': - es_id = '{}.{}.sediment-sediment.{}'.format( - station_number, year, peak - ) + level_name_fr = 'sediment-sediment' elif data_type_en == 'Daily Mean Tonnes': - es_id = '{}.{}.tonnes-tonnes.{}'.format( - station_number, year, peak - ) + level_name_fr = 'tonnes-tonnes' else: - es_id = '{}.{}.None'.format(station_number, year) + level_name_fr = 'None' + + es_id = f'{station_name}.{year}.{level_name_fr}.{peak}' + insert_dict = { 'type': 'Feature', 'properties': { @@ -1176,7 +1149,7 @@ def generate_annual_peaks( 'IDENTIFIER': es_id, 'DATA_TYPE_EN': data_type_en, 'DATA_TYPE_FR': data_type_fr, - 'DATE': date, + 'DATE': date_, 'TIMEZONE_OFFSET': time_zone, 'PEAK_CODE_EN': peak_en, 'PEAK_CODE_FR': peak_fr, @@ -1238,9 +1211,9 @@ def add( conn_config = configure_es_connection(es, username, password, ignore_certs) loader = HydatLoader(db, conn_config) - click.echo('Accessing SQLite database {}'.format(db)) + click.echo(f'Accessing SQLite database {db}') try: - click.echo('Accessing SQLite database {}'.format(db)) + click.echo('Accessing SQLite database {db}') discharge_var = level_var = station_table = None level_var = loader.get_table_var('DLY_LEVELS') @@ -1251,7 +1224,7 @@ def add( symbol_table = loader.get_table_var('DATA_SYMBOLS') annual_peaks_table = loader.get_table_var('ANNUAL_INSTANT_PEAKS') except Exception as err: - msg = 'Could not create table variables: {}'.format(err) + msg = f'Could not create table variables: {err}' raise click.ClickException(msg) if dataset == 'all': @@ -1264,7 +1237,7 @@ def add( else: datasets_to_process = [dataset] - click.echo('Processing dataset(s): {}'.format(datasets_to_process)) + click.echo(f'Processing dataset(s): {datasets_to_process}') if 'stations' in datasets_to_process: if MSC_PYGEOAPI_OGC_API_URL is None: @@ -1277,7 +1250,7 @@ def add( station_table, annual_peaks_table, annual_stats_table) loader.conn.submit_elastic_package(stations, batch_size) except Exception as err: - msg = 'Could not populate stations index: {}'.format(err) + msg = f'Could not populate stations index: {err}' raise click.ClickException(msg) if 'observations' in datasets_to_process: @@ -1288,7 +1261,7 @@ def add( station_table, symbol_table) loader.conn.submit_elastic_package(means, batch_size) except Exception as err: - msg = 'Could not populate observations indexes: {}'.format(err) + msg = f'Could not populate observations indexes: {err}' raise click.ClickException(msg) if 'annual-statistics' in datasets_to_process: @@ -1300,7 +1273,7 @@ def add( station_table, symbol_table) loader.conn.submit_elastic_package(stats, batch_size) except Exception as err: - msg = 'Could not populate annual statistics index: {}'.format(err) + msg = f'Could not populate annual statistics index: {err}' raise click.ClickException(msg) if 'annual-peaks' in datasets_to_process: @@ -1312,7 +1285,7 @@ def add( symbol_table, station_table) loader.conn.submit_elastic_package(peaks, batch_size) except Exception as err: - msg = 'Could not populate annual peaks index: {}'.format(err) + msg = f'Could not populate annual peaks index: {err}' raise click.ClickException(msg) diff --git a/msc_pygeoapi/loader/hydrometric_realtime.py b/msc_pygeoapi/loader/hydrometric_realtime.py index f579e0b3..7b579454 100644 --- a/msc_pygeoapi/loader/hydrometric_realtime.py +++ b/msc_pygeoapi/loader/hydrometric_realtime.py @@ -2,7 +2,7 @@ # # Author: Tom Kralidis # -# Copyright (c) 2022 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -47,8 +47,7 @@ LOGGER = logging.getLogger(__name__) STATIONS_LIST_NAME = 'hydrometric_StationList.csv' -STATIONS_LIST_URL = 'https://dd.weather.gc.ca/hydrometric/doc/{}' \ - .format(STATIONS_LIST_NAME) +STATIONS_LIST_URL = f'https://dd.weather.gc.ca/hydrometric/doc/{STATIONS_LIST_NAME}' # noqa STATIONS_CACHE = os.path.join(MSC_PYGEOAPI_CACHEDIR, STATIONS_LIST_NAME) @@ -61,7 +60,7 @@ SETTINGS = { 'order': 0, 'version': 1, - 'index_patterns': ['{}*'.format(INDEX_BASENAME)], + 'index_patterns': [INDEX_BASENAME], 'settings': { 'number_of_shards': 1, 'number_of_replicas': 0 @@ -199,17 +198,16 @@ def read_stations_list(self): # Discard one row of headers next(reader) except StopIteration: - raise EOFError('Stations file at {} is empty' - .format(STATIONS_CACHE)) + raise EOFError(f'Stations file at {STATIONS_CACHE} is empty') self.stations.clear() for row in reader: if len(row) > 6: - LOGGER.warning('Station list row has too many values: {}' - ' (using first 6)'.format(row)) + LOGGER.warning(f'Station list row has too many values: {row}' # noqa + ' (using first 6)') elif len(row) < 6: - LOGGER.error('Station list row has too few values: {}' - ' (skipping)'.format(row)) + LOGGER.error(f'Station list row has too few values: {row}' + ' (skipping)') continue stn_id, name, lat, lon, province, timezone = row[:6] @@ -218,21 +216,18 @@ def read_stations_list(self): lat = float(lat) lon = float(lon) except ValueError: - LOGGER.error('Cannot interpret coordinates ({}, {}) for' - ' station {} (skipping)' - .format(lon, lat, stn_id)) + LOGGER.error(f'Cannot interpret coordinates ({lon}, {lat}) for' # noqa + f' station {stn_id} (skipping)') continue utcoffset = timezone[4:] if utcoffset.strip() == '': - LOGGER.error('Cannot interpret UTC offset {} for station' - ' {} (skipping)'.format(timezone, stn_id)) + LOGGER.error(f'Cannot interpret UTC offset {timezone} for station {stn_id} (skipping)') # noqa continue LOGGER.debug( - 'Station {}: name={}, province/territory={},' - ' coordinates={}, utcoffset={}' - .format(stn_id, name, province, (lon, lat), utcoffset)) + f'Station {stn_id}: name={name}, province/territory={province},' # noqa + f' coordinates={(lon, lat)}, utcoffset={utcoffset}') stn_info = { 'STATION_NAME': name, @@ -243,8 +238,7 @@ def read_stations_list(self): self.stations[stn_id] = stn_info - LOGGER.debug('Collected stations information: loaded {} stations' - .format(len(self.stations))) + LOGGER.debug(f'Collected stations information: loaded {len(self.stations)} stations') # noqa def generate_observations(self, filepath): """ @@ -271,64 +265,57 @@ def generate_observations(self, filepath): for row in reader: if len(row) > 10: - LOGGER.warning('Data row in {} has too many values:' - ' {} (using only first 10)' - .format(filepath, row)) + LOGGER.warning(f'Data row in {filepath} has too many values:' # noqa + f' {row} (using only first 10)') elif len(row) < 10: - LOGGER.error('Data row in {} has too few values: {}' - ' (skipping)'.format(filepath, row)) + LOGGER.error(f'Data row in {filepath} has too few values: {row}') # noqa continue - station, date, level, _, level_symbol, _, \ + station, date_, level, _, level_symbol, _, \ discharge, _, discharge_symbol, _ = row if station in self.stations: stn_info = self.stations[station] - LOGGER.debug('Found info for station {}'.format(station)) + LOGGER.debug(f'Found info for station {station}') else: - LOGGER.error('Cannot find info for station {} (skipping)' - .format(station)) + LOGGER.error(f'Cannot find info for station {station} (skipping)') # noqa continue try: # Convert timestamp to UTC time. - utc_datetime = delocalize_date(date) + utc_datetime = delocalize_date(date_) utc_datestamp = utc_datetime.strftime('%Y-%m-%d.%H:%M:%S') # Generate an ID now that all fields are known. - observation_id = '{}.{}'.format(station, utc_datestamp) + observation_id = f'{station}.{utc_datestamp}' utc_datestamp = utc_datestamp.replace('.', 'T') except Exception as err: - LOGGER.error('Cannot interpret datetime value {} in {}' - ' due to: {} (skipping)' - .format(date, filepath, str(err))) + LOGGER.error(f'Cannot interpret datetime value {date_} in {filepath}' # noqa + f' due to: {err} (skipping)') continue if 'daily' in filepath and utc_datetime > hourly_domain_start: - LOGGER.debug('Daily observation {} overlaps hourly data' - ' (skipping)'.format(observation_id)) + LOGGER.debug(f'Daily observation {observation_id} overlaps hourly data' # noqa + ' (skipping)') continue elif utc_datetime < daily_domain_start: - LOGGER.debug('Daily observation {} precedes retention' - ' period (skipping)'.format(observation_id)) + LOGGER.debug(f'Daily observation {observation_id} precedes retention' # noqa + ' period (skipping)') continue - LOGGER.debug('Generating observation {} from {}: datetime={},' - ' level={}, discharge={}' - .format(observation_id, filepath, utc_datestamp, - level, discharge)) + LOGGER.debug(f'Generating observation {observation_id} from {filepath}: datetime={utc_datestamp},' # noqa + f' level={level}, discharge={discharge}') try: level = float(level) if level.strip() else None except ValueError: - LOGGER.error('Cannot interpret level value {}' - ' (setting null)'.format(level)) + LOGGER.error(f'Cannot interpret level value {level}' + ' (setting null)') try: discharge = float(discharge) if discharge.strip() else None except ValueError: - LOGGER.error('Cannot interpret discharge value {}' - ' (setting null)'.format(discharge)) + LOGGER.error(f'Cannot interpret discharge value {discharge} (setting null)') # noqa if level_symbol.strip() == '': level_symbol_en = None @@ -349,7 +336,7 @@ def generate_observations(self, filepath): 'STATION_NAME': stn_info['STATION_NAME'], 'PROV_TERR_STATE_LOC': stn_info['PROV_TERR_STATE_LOC'], 'DATETIME': utc_datestamp, - 'DATETIME_LST': date, + 'DATETIME_LST': date_, 'LEVEL': level, 'DISCHARGE': discharge, 'LEVEL_SYMBOL_EN': level_symbol_en, @@ -359,11 +346,9 @@ def generate_observations(self, filepath): } } - LOGGER.debug('Observation {} created successfully' - .format(observation_id)) + LOGGER.debug(f'Observation {observation_id} created successfully') # noqa - es_index = '{}{}'.format(INDEX_BASENAME, - utc_datetime.strftime('%Y-%m-%d')) + es_index = f"{INDEX_BASENAME}{utc_datetime.strftime('%Y-%m-%d')}" # noqa action = { '_id': observation_id, @@ -387,7 +372,7 @@ def load_data(self, filepath): if filepath.endswith('hydrometric_StationList.csv'): return True - LOGGER.debug('Received file {}'.format(filepath)) + LOGGER.debug(f'Received file {filepath}') package = self.generate_observations(filepath) self.conn.submit_elastic_package(package, request_size=80000) @@ -402,7 +387,7 @@ def download_stations(): :returns: void """ - LOGGER.debug('Caching {} to {}'.format(STATIONS_LIST_URL, STATIONS_CACHE)) + LOGGER.debug(f'Caching {STATIONS_LIST_URL} to {STATIONS_CACHE}') urllib.request.urlretrieve(STATIONS_LIST_URL, STATIONS_CACHE) @@ -450,7 +435,7 @@ def add(ctx, file_, directory, es, username, password, ignore_certs): def cache_stations(ctx): """Cache local copy of hydrometric realtime stations index""" - click.echo('Caching realtime stations to {}'.format(STATIONS_CACHE)) + click.echo(f'Caching realtime stations to {STATIONS_CACHE}') download_stations() @@ -458,7 +443,7 @@ def cache_stations(ctx): @click.pass_context @cli_options.OPTION_DAYS( default=DAYS_TO_KEEP, - help='Delete indexes older than n days (default={})'.format(DAYS_TO_KEEP) + help=f'Delete indexes older than n days (default={DAYS_TO_KEEP})' ) @cli_options.OPTION_ELASTICSEARCH() @cli_options.OPTION_ES_USERNAME() @@ -473,12 +458,12 @@ def clean_indexes(ctx, days, es, username, password, ignore_certs): conn_config = configure_es_connection(es, username, password, ignore_certs) conn = ElasticsearchConnector(conn_config) - indexes = conn.get('{}*'.format(INDEX_BASENAME)) + indexes = conn.get(f'{INDEX_BASENAME}*') if indexes: indexes_to_delete = check_es_indexes_to_delete(indexes, days) if indexes_to_delete: - click.echo('Deleting indexes {}'.format(indexes_to_delete)) + click.echo(f'Deleting indexes {indexes_to_delete}') conn.delete(','.join(indexes_to_delete)) click.echo('Done') @@ -500,13 +485,13 @@ def delete_indexes(ctx, es, username, password, ignore_certs, index_template): conn_config = configure_es_connection(es, username, password, ignore_certs) conn = ElasticsearchConnector(conn_config) - all_indexes = '{}*'.format(INDEX_BASENAME) + all_indexes = f'{INDEX_BASENAME}*' - click.echo('Deleting indexes {}'.format(all_indexes)) + click.echo(f'Deleting indexes {all_indexes}') conn.delete(all_indexes) if index_template: - click.echo('Deleting index template {}'.format(INDEX_BASENAME)) + click.echo(f'Deleting index template {INDEX_BASENAME}') conn.delete_template(INDEX_BASENAME) click.echo('Done') diff --git a/msc_pygeoapi/loader/ltce.py b/msc_pygeoapi/loader/ltce.py index a7d840a2..57bb953d 100644 --- a/msc_pygeoapi/loader/ltce.py +++ b/msc_pygeoapi/loader/ltce.py @@ -3,7 +3,7 @@ # Author: Etienne Pelletier # # Copyright (c) 2020 Etienne Pelletier -# Copyright (c) 2022 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -536,7 +536,7 @@ def generate_stations(self): ) ) except Exception as err: - LOGGER.error(f'Could not fetch records from oracle due to: {err}.') + LOGGER.error(f'Could not fetch records from Oracle due to: {err}.') # fetch records and ensure that some records were retrieved records = self.cur.fetchall() @@ -555,13 +555,13 @@ def generate_stations(self): ) es_id = slugify( - '{}-{}-{}-{}-{}'.format( + '-'.join([ insert_dict['VIRTUAL_CLIMATE_ID'], insert_dict["ELEMENT_NAME_E"], insert_dict["CLIMATE_IDENTIFIER"], insert_dict["START_DATE"], - insert_dict["END_DATE"], - ) + insert_dict["END_DATE"] + ]) ) coords = [ @@ -657,11 +657,7 @@ def generate_daily_temp_extremes(self): ) ) except Exception as err: - LOGGER.error( - 'Could not fetch records from oracle due to: {}.'.format( - str(err) - ) - ) + LOGGER.error(f'Could not fetch records from Oracle due to: {err}') # dictionnary to store stations information once retrieved stations_dict = {} @@ -684,11 +680,11 @@ def generate_daily_temp_extremes(self): ) virtual_climate_id = insert_dict['VIRTUAL_CLIMATE_ID'] - es_id = '{}-{}-{}'.format( + es_id = '-'.join([ insert_dict['VIRTUAL_CLIMATE_ID'], - insert_dict["LOCAL_MONTH"], - insert_dict["LOCAL_DAY"], - ) + insert_dict['LOCAL_MONTH'], + insert_dict['LOCAL_DAY'] + ]) # check if we have station IDs record begin and end. If not # retrieve the information and store in stations_dict @@ -742,10 +738,10 @@ def generate_daily_temp_extremes(self): for level in ['MIN', 'MAX']: # set new insert_dict keys insert_dict[ - '{}_TEMP_RECORD_BEGIN'.format(level) + f'{level}_TEMP_RECORD_BEGIN' ] = stations_dict[virtual_climate_id][level]['record_begin'] insert_dict[ - '{}_TEMP_RECORD_END'.format(level) + f'{level}_TEMP_RECORD_END' ] = stations_dict[virtual_climate_id][level]['record_end'] # cleanup unwanted fields retained from SQL join @@ -808,11 +804,7 @@ def generate_daily_precip_extremes(self): ) ) except Exception as err: - LOGGER.error( - 'Could not fetch records from oracle due to: {}.'.format( - str(err) - ) - ) + LOGGER.error(f'Could not fetch records from Oracle due to: {err}') stations_dict = {} @@ -836,11 +828,11 @@ def generate_daily_precip_extremes(self): ) virtual_climate_id = insert_dict['VIRTUAL_CLIMATE_ID'] - es_id = '{}-{}-{}'.format( + es_id = '-'.join([ insert_dict['VIRTUAL_CLIMATE_ID'], insert_dict["LOCAL_MONTH"], - insert_dict["LOCAL_DAY"], - ) + insert_dict["LOCAL_DAY"] + ]) # check if we have station IDs record begin and end if not retrieve if virtual_climate_id not in stations_dict: @@ -927,11 +919,7 @@ def generate_daily_snow_extremes(self): ) ) except Exception as err: - LOGGER.error( - 'Could not fetch records from oracle due to: {}.'.format( - str(err) - ) - ) + LOGGER.error(f'Could not fetch records from Oracle due to: {err}') stations_dict = {} @@ -953,11 +941,11 @@ def generate_daily_snow_extremes(self): ) virtual_climate_id = insert_dict['VIRTUAL_CLIMATE_ID'] - es_id = '{}-{}-{}'.format( + es_id = '-'.join([ insert_dict['VIRTUAL_CLIMATE_ID'], insert_dict["LOCAL_MONTH"], - insert_dict["LOCAL_DAY"], - ) + insert_dict["LOCAL_DAY"] + ]) # check if we have station IDs record begin and end if not retrieve if virtual_climate_id not in stations_dict: @@ -1188,8 +1176,7 @@ def add( @click.pass_context @cli_options.OPTION_DAYS( default=DAYS_TO_KEEP, - help='Delete indexes older than n days (default={})'.format(DAYS_TO_KEEP), -) + help=f'Delete indexes older than n days (default={DAYS_TO_KEEP})') @cli_options.OPTION_ELASTICSEARCH() @cli_options.OPTION_ES_USERNAME() @cli_options.OPTION_ES_PASSWORD() @@ -1210,7 +1197,7 @@ def clean_indexes(ctx, days, es, username, password, ignore_certs): pattern=INDEX_PATTERN ) if indexes_to_delete: - click.echo('Deleting indexes {}'.format(indexes_to_delete)) + click.echo(f'Deleting indexes {indexes_to_delete}') conn.delete(','.join(indexes_to_delete)) click.echo('Done') @@ -1264,7 +1251,7 @@ def delete_indexes( if index_name: for i in index_name.split(","): if i in INDICES: - LOGGER.info('Deleting ES index {}'.format(i)) + LOGGER.info(f'Deleting ES index {i}') loader.conn.delete(f'{i}.*') else: LOGGER.info('Deleting all LTCE ES indices') diff --git a/msc_pygeoapi/loader/marine_weather_realtime.py b/msc_pygeoapi/loader/marine_weather_realtime.py index 71a8f39a..2b7f0101 100644 --- a/msc_pygeoapi/loader/marine_weather_realtime.py +++ b/msc_pygeoapi/loader/marine_weather_realtime.py @@ -3,7 +3,7 @@ # Author: Etienne Pelletier # # Copyright (c) 2020 Etienne Pelletier -# Copyright (c) 2022 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -423,10 +423,7 @@ def set_area_info(self): return True except exceptions.NotFoundError: - LOGGER.warning( - "Could not get forecast polygon document with id: " - "{}".format(forecast_id) - ) + LOGGER.warning(f'Could not get forecast polygon document with id: {forecast_id}') # noqa def generate_warnings(self): """ @@ -444,16 +441,12 @@ def generate_warnings(self): feature['geometry'] = self.area['geometry'] - feature['properties']['area_{}'.format(self.language)] = self.area[ - 'name' - ] - feature['properties']['region_{}'.format(self.language)] = self.area[ - 'region' - ] + feature['properties'][f'area_{self.language}'] = self.area['name'] + feature['properties'][f'region_{self.language}'] = self.area['region'] feature['properties'][ - 'sub_region_{}'.format(self.language) + f'sub_region_{self.language}' ] = self.area['subRegion'] - feature['properties']['warnings_{}'.format(self.language)] = [] + feature['properties'][f'warnings_{self.language}'] = [] if len(warnings) > 0: for elem in warnings: @@ -461,29 +454,15 @@ def generate_warnings(self): elem.findall('event/' 'dateTime') ) location = { - 'location_{}'.format(self.language): elem.attrib['name'], - 'issued_datetime_utc_{}'.format( - self.language - ): strftime_rfc3339(datetimes['utc']), - 'issued_datetime_local_{}'.format( - self.language - ): strftime_rfc3339(datetimes['local']), - 'event_type_{}'.format(self.language): elem.find( - 'event' - ).attrib['type'], - 'event_category_{}'.format(self.language): elem.find( - 'event' - ).attrib['category'], - 'event_name_{}'.format(self.language): elem.find( - 'event' - ).attrib['name'], - 'event_status_{}'.format(self.language): elem.find( - 'event' - ).attrib['status'], + f'location_{self.language}': elem.attrib['name'], + f'issued_datetime_utc_{self.language}': strftime_rfc3339(datetimes['utc']), # noqa + f'issued_datetime_local_{self.language}': strftime_rfc3339(datetimes['local']), # noqa + f'event_type_{self.language}': elem.find('event').attrib['type'], # noqa + f'event_category_{self.language}': elem.find('event').attrib['category'], # noqa + f'event_name_{self.language}': elem.find('event').attrib['name'], # noqa + f'event_status_{self.language}': elem.find('event').attrib['status'] # noqa } - feature['properties'][ - 'warnings_{}'.format(self.language) - ].append(location) + feature['properties'][f'warnings_{self.language}'].append(location) # noqa self.items.append(feature) @@ -510,16 +489,16 @@ def generate_regular_forecasts(self): feature = {'type': 'Feature', 'geometry': {}, 'properties': {}} feature['geometry'] = self.area['geometry'] - feature['properties']['area_{}'.format(self.language)] = self.area[ + feature['properties'][f'area_{self.language}'] = self.area[ 'name' ] - feature['properties']['region_{}'.format(self.language)] = self.area[ + feature['properties'][f'region_{self.language}'] = self.area[ 'region' ] feature['properties'][ - 'sub_region_{}'.format(self.language) + f'sub_region_{self.language}' ] = self.area['subRegion'] - feature['properties']['forecasts_{}'.format(self.language)] = [] + feature['properties'][f'forecasts_{self.language}'] = [] if len(regular_forecasts) > 0: datetimes = self.create_datetime_dict( @@ -543,49 +522,37 @@ def generate_regular_forecasts(self): ] for location in locations: location = { - 'location_{}'.format(self.language): location.attrib[ - 'name' - ] - if 'name' in location.attrib - else self.area['name'], - 'period_of_coverage_{}'.format( - self.language - ): location.find('weatherCondition/periodOfCoverage').text + f'location_{self.language}': location.attrib['name'] + if 'name' in location.attrib else self.area['name'], + f'period_of_coverage_{self.language}': + location.find('weatherCondition/periodOfCoverage').text if location.find('weatherCondition/periodOfCoverage') - is not None - else None, - 'wind_{}'.format(self.language): location.find( + is not None else None, + f'wind_{self.language}': location.find( 'weatherCondition/wind' ).text if location.find('weatherCondition/wind') is not None else None, - 'weather_visibility_{}'.format( - self.language - ): location.find('weatherCondition/weatherVisibility').text + f'weather_visibility_{self.language}': + location.find('weatherCondition/weatherVisibility').text if location.find('weatherCondition/weatherVisibility') - is not None - else None, - 'air_temperature_{}'.format(self.language): location.find( + is not None else None, + f'air_temperature_{self.language}': location.find( 'weatherCondition/airTemperature' ).text if location.find('weatherCondition/airTemperature') - is not None - else None, - 'freezing_spray_{}'.format(self.language): location.find( + is not None else None, + f'freezing_spray_{self.language}': location.find( 'weatherCondition/freezingSpray' ).text if location.find('weatherCondition/freezingSpray') - is not None - else None, - 'status_statement_{}'.format(self.language): location.find( + is not None else None, + f'status_statement_{self.language}': location.find( 'statusStatement' ).text - if location.find('statusStatement') is not None - else None, + if location.find('statusStatement') is not None else None } - feature['properties'][ - 'forecasts_{}'.format(self.language) - ].append(location) + feature['properties'][f'forecasts_{self.language}'].append(location) # noqa self.items.append(feature) @@ -612,18 +579,10 @@ def generate_extended_forecasts(self): feature = {'type': 'Feature', 'geometry': {}, 'properties': {}} feature['geometry'] = self.area['geometry'] - feature['properties']['area_{}'.format(self.language)] = self.area[ - 'name' - ] - feature['properties']['region_{}'.format(self.language)] = self.area[ - 'region' - ] - feature['properties'][ - 'sub_region_{}'.format(self.language) - ] = self.area['subRegion'] - feature['properties'][ - 'extended_forecasts_{}'.format(self.language) - ] = [] + feature['properties'][f'area_{self.language}'] = self.area['name'] + feature['properties'][f'region_{self.language}'] = self.area['region'] + feature['properties'][f'sub_region_{self.language}'] = self.area['subRegion'] # noqa + feature['properties'][f'extended_forecasts_{self.language}'] = [] if len(extended_forecasts) > 0: datetimes = self.create_datetime_dict( @@ -647,34 +606,29 @@ def generate_extended_forecasts(self): ] for location in locations: location = { - 'location_{}'.format(self.language): location.attrib[ - 'name' - ] + f'location_{self.language}': location.attrib['name'] if 'name' in location.attrib else self.area['name'], - 'forecast_periods_{}'.format(self.language): [ + f'forecast_periods_{self.language}': [ { - 'forecast_period_{}'.format( - self.language - ): forecast_period.attrib['name'], - 'forecast_{}'.format( - self.language - ): forecast_period.text, + f'forecast_period_{self.language}': + forecast_period.attrib['name'], + f'forecast_{self.language}': + forecast_period.text, } for forecast_period in location.findall( 'weatherCondition/' ) if location.findall('weatherCondition/') is not None ], - 'status_statement_{}'.format(self.language): location.find( + f'status_statement_{self.language}': location.find( 'statusStatement' ).text if location.find('statusStatement') is not None else None, } feature['properties'][ - 'extended_forecasts_{}'.format(self.language) - ].append(location) + f'extended_forecasts_{self.language}'].append(location) self.items.append(feature) @@ -697,7 +651,7 @@ def load_data(self, filepath): self.filepath = Path(filepath) self.parse_filename() - LOGGER.debug('Received file {}'.format(self.filepath)) + LOGGER.debug(f'Received file {self.filepath}') self.root = etree.parse(str(self.filepath.resolve())).getroot() @@ -775,7 +729,7 @@ def delete_index(ctx, index_name, es, username, password, ignore_certs): ), abort=True, ): - LOGGER.info('Deleting ES index {}'.format(index_name)) + LOGGER.info(f'Deleting ES index {index_name}') conn.delete(index_name) return True else: diff --git a/msc_pygeoapi/loader/metnotes.py b/msc_pygeoapi/loader/metnotes.py index 56f7e36c..56c550f3 100644 --- a/msc_pygeoapi/loader/metnotes.py +++ b/msc_pygeoapi/loader/metnotes.py @@ -4,6 +4,7 @@ # # # Copyright (c) 2022 Louis-Philippe Rousseau-Lambert +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -164,7 +165,7 @@ SETTINGS = { 'order': 0, 'version': 1, - 'index_patterns': ['{}*'.format(INDEX_BASENAME)], + 'index_patterns': [f'{INDEX_BASENAME}*'], 'settings': {'number_of_shards': 1, 'number_of_replicas': 0}, 'mappings': None } @@ -214,12 +215,9 @@ def load_data(self, filepath): '%Y-%m-%dT%H:%M:%S.%fZ' ) b_dt2 = b_dt.strftime('%Y-%m-%d') - es_index = '{}{}'.format(INDEX_BASENAME, b_dt2) + es_index = f'{INDEX_BASENAME}{b_dt2}' - id_ = '{}_{}'.format( - feature['id'], - feature['properties']['publication_version'] - ) + id_ = f"{feature['id']}_{feature['properties']['publication_version']}" # noqa feature['properties']['metnote_id'] = feature['id'] feature['id'] = feature['properties']['id'] = id_ @@ -229,7 +227,7 @@ def load_data(self, filepath): try: self.update_es_index(es_index, id_, feature) except Exception as err: - LOGGER.warning('Error indexing: {}'.format(err)) + LOGGER.warning(f'Error indexing: {err}') return False self.update_temporal_config() @@ -254,10 +252,10 @@ def update_es_index(self, es_index, id, feature): r = self.conn.Elasticsearch.index( index=es_index, id=id, body=feature, refresh=True ) - LOGGER.debug('Result: {}'.format(r)) + LOGGER.debug(f'Result: {r}') return True except Exception as err: - LOGGER.warning('Error indexing: {}'.format(err)) + LOGGER.warning(f'Error indexing: {err}') return False def update_temporal_config(self): @@ -334,9 +332,9 @@ def set_active_metnote(self): } try: - self.conn.update_by_query(query, '{}*'.format(INDEX_BASENAME)) + self.conn.update_by_query(query, f'{INDEX_BASENAME}*') except Exception as err: - LOGGER.warning('{}: failed to update ES index'.format(err)) + LOGGER.warning(f'Failed to update ES index: {err}') return True @@ -373,7 +371,7 @@ def add(ctx, file_, es, username, password, ignore_certs): @click.pass_context @cli_options.OPTION_DAYS( default=DAYS_TO_KEEP, - help='Delete indexes older than n days (default={})'.format(DAYS_TO_KEEP) + help=f'Delete indexes older than n days (default={DAYS_TO_KEEP})' ) @cli_options.OPTION_ELASTICSEARCH() @cli_options.OPTION_ES_USERNAME() @@ -386,12 +384,12 @@ def clean_indexes(ctx, days, es, username, password, ignore_certs): conn_config = configure_es_connection(es, username, password, ignore_certs) conn = ElasticsearchConnector(conn_config) - indexes = conn.get('{}*'.format(INDEX_BASENAME)) + indexes = conn.get(f'{INDEX_BASENAME}*') if indexes: indexes_to_delete = check_es_indexes_to_delete(indexes, days) if indexes_to_delete: - click.echo('Deleting indexes {}'.format(indexes_to_delete)) + click.echo(f'Deleting indexes {indexes_to_delete}') conn.delete(','.join(indexes_to_delete)) click.echo('Done') @@ -411,13 +409,13 @@ def delete_index(ctx, es, username, password, ignore_certs, index_template): conn_config = configure_es_connection(es, username, password, ignore_certs) conn = ElasticsearchConnector(conn_config) - all_indexes = '{}*'.format(INDEX_BASENAME) + all_indexes = f'{INDEX_BASENAME}*' - click.echo('Deleting indexes {}'.format(all_indexes)) + click.echo(f'Deleting indexes {all_indexes}') conn.delete(all_indexes) if index_template: - click.echo('Deleting index template {}'.format(INDEX_BASENAME)) + click.echo(f'Deleting index template {INDEX_BASENAME}') conn.delete_template(INDEX_BASENAME) click.echo('Done') diff --git a/msc_pygeoapi/loader/nwp_dataset_footprints.py b/msc_pygeoapi/loader/nwp_dataset_footprints.py index 329d45aa..1731a9cb 100644 --- a/msc_pygeoapi/loader/nwp_dataset_footprints.py +++ b/msc_pygeoapi/loader/nwp_dataset_footprints.py @@ -7,6 +7,7 @@ # # Copyright (c) 2023 Nicolas Dion-Degodez # Copyright (c) 2023 Louis-Philippe Rousseau-Lambert +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -91,7 +92,7 @@ SETTINGS = { 'order': 0, 'version': 1, - 'index_patterns': ['{}*'.format(INDEX_NAME)], + 'index_patterns': [f'{INDEX_NAME}*'], 'settings': {'number_of_shards': 1, 'number_of_replicas': 0}, 'mappings': None } @@ -257,13 +258,13 @@ def load_data(self, filepath): self.filepath = Path(filepath) - LOGGER.debug('Received file {}'.format(self.filepath)) + LOGGER.debug(f'Received file {self.filepath}') # Open the MCF try: opened_file = self.open_mcf(self.filepath) except yaml.parser.ParserError as err: - msg = 'Could not open {}: {}'.format(self.filepath, err) + msg = f'Could not open {self.filepath}: {err}' LOGGER.warning(msg) return False @@ -277,7 +278,7 @@ def load_data(self, filepath): polygon = self.get_reprojected_polygon() data['geometry'] = json.loads(polygon) except AttributeError as err: - msg = 'Error generating footprint polygon: {}'.format(err) + msg = f'Error generating footprint polygon: {err}' LOGGER.warning(msg) return False @@ -286,10 +287,10 @@ def load_data(self, filepath): r = self.conn.Elasticsearch.index( index=INDEX_NAME, id=data['id'], body=data ) - LOGGER.debug('Result: {}'.format(r)) + LOGGER.debug(f'Result: {r}') return True except Exception as err: - LOGGER.warning('Error indexing: {}'.format(err)) + LOGGER.warning(f'Error indexing: {err}') return False else: LOGGER.warning( @@ -335,7 +336,7 @@ def add(ctx, file_, directory, es, username, password, ignore_certs): loader = DatasetFootprintLoader(conn_config) result = loader.load_data(file_to_process) if not result: - click.echo('features not generated: {}'.format(file_to_process)) + click.echo(f'features not generated: {file_to_process}') @click.command() diff --git a/msc_pygeoapi/loader/radar_coverage_realtime.py b/msc_pygeoapi/loader/radar_coverage_realtime.py index 93146490..62f02e79 100644 --- a/msc_pygeoapi/loader/radar_coverage_realtime.py +++ b/msc_pygeoapi/loader/radar_coverage_realtime.py @@ -3,6 +3,7 @@ # Author: Etienne Pelletier # # Copyright (c) 2022 Etienne Pelletier +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -480,7 +481,7 @@ def delete_indexes( else: indexes = '{}*'.format(INDEX_BASENAME.format(dataset)) - click.echo('Deleting indexes {}'.format(indexes)) + click.echo(f'Deleting indexes {indexes}') conn.delete(indexes) diff --git a/msc_pygeoapi/loader/swob_realtime.py b/msc_pygeoapi/loader/swob_realtime.py index a4c73fa2..5733c5c6 100644 --- a/msc_pygeoapi/loader/swob_realtime.py +++ b/msc_pygeoapi/loader/swob_realtime.py @@ -6,7 +6,7 @@ # # Copyright (c) 2020 Thinesh Sornalingam # Copyright (c) 2020 Robert Westhaver -# Copyright (c) 2022 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -52,9 +52,7 @@ LOGGER = logging.getLogger(__name__) STATIONS_LIST_NAME = 'swob-xml_station_list.csv' -STATIONS_LIST_URL = 'https://dd.weather.gc.ca/observations/doc/{}'.format( - STATIONS_LIST_NAME -) +STATIONS_LIST_URL = f'https://dd.weather.gc.ca/observations/doc/{STATIONS_LIST_NAME}' # noqa STATIONS_CACHE = os.path.join(MSC_PYGEOAPI_CACHEDIR, STATIONS_LIST_NAME) @@ -67,7 +65,7 @@ SETTINGS = { 'order': 0, 'version': 1, - 'index_patterns': ['{}*'.format(INDEX_BASENAME)], + 'index_patterns': [f'{INDEX_BASENAME}*'], 'settings': {'number_of_shards': 1, 'number_of_replicas': 0}, 'mappings': { 'properties': { @@ -113,7 +111,7 @@ def parse_swob(swob_file): try: xml_tree = etree.parse(fh) except (FileNotFoundError, etree.ParseError): - msg = 'Error: file {} cannot be parsed as xml'.format(swob_file) + msg = f'Error: file {swob_file} cannot be parsed as xml' LOGGER.debug(msg) raise RuntimeError(msg) @@ -160,9 +158,7 @@ def parse_swob(swob_file): else: element_name = element.attrib[key] else: - properties[ - "{}-{}".format(element_name, key) - ] = element.attrib[key] + properties[f'{element_name}-{key}'] = element.attrib[key] # noqa # set up cords and time stamps swob_values['coordinates'] = [longitude, latitude, elevation] @@ -226,24 +222,14 @@ def parse_swob(swob_file): if all([name != 'qa_summary', name != 'data_flag']): properties[name] = value if uom: - properties["{}-{}".format(name, 'uom')] = uom + properties[f'{name}-uom'] = uom last_element = name elif name == 'qa_summary': - properties["{}-{}".format(last_element, 'qa')] = value + properties[f'{last_element}-qa'] = value elif name == 'data_flag': - properties[ - "{}-{}-{}".format(last_element, 'data_flag', 'uom') - ] = uom - properties[ - "{}-{}-{}".format( - last_element, 'data_flag', 'code_src' - ) - ] = nest_elem.attrib['code-src'] - properties[ - "{}-{}-{}".format( - last_element, 'data_flag', 'value' - ) - ] = value + properties[f'{last_element}-data_flag-uom'] = uom + properties[f'{last_element}-data_flag-code-src'] = nest_elem.attrib['code-src'] # noqa + properties[f'{last_element}-data_flag-value'] = value swob_values['properties'] = properties @@ -323,16 +309,14 @@ def generate_observations(self, filepath): observation = swob2geojson(filepath) observation_id = observation['id'] - LOGGER.debug( - 'Observation {} created successfully'.format(observation_id) - ) + LOGGER.debug(f'Observation {observation_id} created successfully') obs_dt = datetime.strptime( observation['properties']['date_tm-value'], DATETIME_RFC3339_MILLIS_FMT, ) obs_dt2 = obs_dt.strftime('%Y-%m-%d') - es_index = '{}{}'.format(INDEX_BASENAME, obs_dt2) + es_index = f'{INDEX_BASENAME}{obs_dt2}' action = { '_id': observation_id, @@ -355,7 +339,7 @@ def load_data(self, filepath): :returns: `bool` of status result """ - LOGGER.debug('Received file {}'.format(filepath)) + LOGGER.debug(f'Received file {filepath}') chunk_size = 80000 package = self.generate_observations(filepath) @@ -407,7 +391,7 @@ def add(ctx, file_, directory, es, username, password, ignore_certs): @click.pass_context @cli_options.OPTION_DAYS( default=DAYS_TO_KEEP, - help='Delete indexes older than n days (default={})'.format(DAYS_TO_KEEP) + help=f'Delete indexes older than n days (default={DAYS_TO_KEEP})' ) @cli_options.OPTION_ELASTICSEARCH() @cli_options.OPTION_ES_USERNAME() @@ -420,13 +404,13 @@ def clean_indexes(ctx, days, es, username, password, ignore_certs): conn_config = configure_es_connection(es, username, password, ignore_certs) conn = ElasticsearchConnector(conn_config) - indexes = conn.get('{}*'.format(INDEX_BASENAME)) + indexes = conn.get(f'{INDEX_BASENAME}*') click.echo(indexes) if indexes: indexes_to_delete = check_es_indexes_to_delete(indexes, days) if indexes_to_delete: - click.echo('Deleting indexes {}'.format(indexes_to_delete)) + click.echo(f'Deleting indexes {indexes_to_delete}') conn.delete(','.join(indexes_to_delete)) click.echo('Done') @@ -448,13 +432,13 @@ def delete_indexes(ctx, es, username, password, ignore_certs, index_template): conn_config = configure_es_connection(es, username, password, ignore_certs) conn = ElasticsearchConnector(conn_config) - all_indexes = '{}*'.format(INDEX_BASENAME) + all_indexes = f'{INDEX_BASENAME}*' - click.echo('Deleting indexes {}'.format(all_indexes)) + click.echo(f'Deleting indexes {all_indexes}') conn.delete(all_indexes) if index_template: - click.echo('Deleting index template {}'.format(INDEX_BASENAME)) + click.echo(f'Deleting index template {INDEX_BASENAME}') conn.delete_template(INDEX_BASENAME) click.echo('Done') diff --git a/msc_pygeoapi/plugin.py b/msc_pygeoapi/plugin.py index 49b0ff69..46ddcb84 100644 --- a/msc_pygeoapi/plugin.py +++ b/msc_pygeoapi/plugin.py @@ -1,9 +1,9 @@ # ================================================================= # -# Author: Tom Kralidis +# Author: Tom Kralidis # Felix Laframboise # -# Copyright (c) 2020 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # Copyright (c) 2021 Felix Laframboise # # Permission is hereby granted, free of charge, to any person @@ -95,7 +95,7 @@ def load_plugin(plugin_type, plugin_def, **kwargs): """ if plugin_type not in PLUGINS.keys(): - msg = 'Plugin {} not found'.format(plugin_type) + msg = f'Plugin {plugin_type} not found' LOGGER.exception(msg) raise InvalidPluginError(msg) @@ -103,8 +103,8 @@ def load_plugin(plugin_type, plugin_def, **kwargs): packagename, classname = handler.rsplit('.', 1) - LOGGER.debug('package name: {}'.format(packagename)) - LOGGER.debug('class name: {}'.format(classname)) + LOGGER.debug(f'package name: {packagename}') + LOGGER.debug(f'class name: {classname}') module = importlib.import_module(packagename) class_ = getattr(module, classname) diff --git a/msc_pygeoapi/process/cccs/raster_drill.py b/msc_pygeoapi/process/cccs/raster_drill.py index c3239b3a..90bd73a1 100644 --- a/msc_pygeoapi/process/cccs/raster_drill.py +++ b/msc_pygeoapi/process/cccs/raster_drill.py @@ -4,6 +4,7 @@ # # # Copyright (c) 2019 Louis-Philippe Rousseau-Lambert +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -179,7 +180,7 @@ def get_time_info(cfg): if month == 0: year = year - 1 month = 12 - time_stamp = '{}-{}'.format(year, str(month).zfill(2)) + time_stamp = f'{year}-{month:02}' dates.append(time_stamp) return dates @@ -232,7 +233,7 @@ def get_location_info(file_, x, y, cfg, layer_keys): 'dates': [] } - LOGGER.debug('Opening {}'.format(file_)) + LOGGER.debug(f'Opening {file_}') try: LOGGER.debug('Fetching units') dict_['time_step'] = cfg['timestep'] @@ -246,13 +247,13 @@ def get_location_info(file_, x, y, cfg, layer_keys): except RuntimeError as err: ds = None - msg = 'Cannot open file: {}'.format(err) + msg = f'Cannot open file: {err}' LOGGER.exception(msg) LOGGER.debug('Running through bands') for band in range(1, ds.RasterCount + 1): - LOGGER.debug('Fetching band {}'.format(band)) + LOGGER.debug(f'Fetching band {band}') srcband = ds.GetRasterBand(band) array = srcband.ReadAsArray().tolist() @@ -260,7 +261,7 @@ def get_location_info(file_, x, y, cfg, layer_keys): try: dict_['values'].append(array[y_][x_]) except IndexError as err: - msg = 'Invalid x/y value: {}'.format(err) + msg = f'Invalid x/y value: {err}' LOGGER.exception(msg) dict_['dates'] = get_time_info(cfg) @@ -322,10 +323,8 @@ def serialize(values_dict, cfg, output_format, x, y): pctl_en = pctl_fr = '' if output_format == 'CSV': - time = 'time_{}/{}/{}'.format(time_begin, - time_end, - time_step) - row = [time, + time_ = 'time_{time_begin}/{time_end}/{time_step}' + row = [time_, 'values', 'longitude', 'latitude', @@ -459,7 +458,7 @@ def raster_drill(layer, x, y, format_): file_path = cfg['layers'][layer]['filepath'] inter_path = os.path.join(climate_model_path, file_path) - file_name = '{}.vrt'.format(cfg['layers'][layer]['filename']) + file_name = f"{cfg['layers'][layer]['filename']}.vrt" elif layer.startswith('SPEI'): keys = ['Variable', 'Variation', 'Scenario', 'Period', 'Percentile'] @@ -478,7 +477,7 @@ def raster_drill(layer, x, y, format_): file_name = cfg['layers'][layer]['filename'] else: - msg = 'Not a valid or time enabled layer: {}'.format(layer) + msg = f'Not a valid or time enabled layer: {layer}' LOGGER.error(msg) raise ValueError(msg) @@ -547,7 +546,7 @@ def execute(self, data): try: output = raster_drill(layer, x, y, format_) except ValueError as err: - msg = 'Process execution error: {}'.format(err) + msg = f'Process execution error: {err}' LOGGER.error(msg) raise ProcessorExecuteError(msg) @@ -564,6 +563,7 @@ def execute(self, data): return mimetype, dict_ def __repr__(self): - return ' {}'.format(self.name) + return f' {self.name}' + except (ImportError, RuntimeError): pass diff --git a/msc_pygeoapi/process/weather/extract_raster.py b/msc_pygeoapi/process/weather/extract_raster.py index 26eb04c5..60f1d123 100644 --- a/msc_pygeoapi/process/weather/extract_raster.py +++ b/msc_pygeoapi/process/weather/extract_raster.py @@ -3,6 +3,7 @@ # Author: Tom Cooney # # Copyright (c) 2021 Tom Cooney +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -172,13 +173,13 @@ def get_files(layers, fh, mr): list_files.append(files) - except IndexError as error: - msg = 'invalid input value: {}' .format(error) + except IndexError as err: + msg = f'invalid input value: {err}' LOGGER.error(msg) return None, None - except exceptions.ElasticsearchException as error: - msg = 'ES search failed: {}' .format(error) + except exceptions.ElasticsearchException as err: + msg = f'ES search failed: {err}' LOGGER.error(msg) return None, None @@ -604,7 +605,7 @@ def execute(self, data): return 'application/json', output_geojson def __repr__(self): - return ' {}'.format(self.name) + return f' {self.name}' except (ImportError, RuntimeError) as err: - LOGGER.warning('Import errors: {}'.format(err)) + LOGGER.warning(f'Import errors: {err}') diff --git a/msc_pygeoapi/process/weather/extract_sounding_data.py b/msc_pygeoapi/process/weather/extract_sounding_data.py index 11f957f6..74db0d36 100644 --- a/msc_pygeoapi/process/weather/extract_sounding_data.py +++ b/msc_pygeoapi/process/weather/extract_sounding_data.py @@ -3,6 +3,7 @@ # Author: Philippe Theroux # # Copyright (c) 2022 Philippe Theroux +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -253,7 +254,7 @@ def extract_sounding_data( file_name = f"/{date_formatted}T{run_hour}Z_MSC_HRDPS_{{info}}_ISBL_*_RLatLon0.0225_PT{forecast_hour}H.grib2" # noqa first_value = f"/{date_formatted}T{run_hour}Z_MSC_HRDPS_{{info}}_AGL-{{height}}m_RLatLon0.0225_PT{forecast_hour}H.grib2" # noqa else: - msg = "Not a valid model: {}".format(model) + msg = f'Not a valid model: {model}' LOGGER.error(msg) raise ValueError(msg) @@ -629,14 +630,14 @@ def execute(self, data): noval_above_100, ) except ValueError as err: - msg = "Process execution error: {}".format(err) + msg = f'Process execution error: {err}' LOGGER.error(msg) raise ProcessorExecuteError(msg) return mimetype, output def __repr__(self): - return " {}".format(self.name) + return f' {self.name}' except (ImportError, RuntimeError) as err: - LOGGER.warning("Import errors: {}".format(err)) + LOGGER.warning(f'Import errors: {err}') diff --git a/msc_pygeoapi/provider/cangrd_rasterio.py b/msc_pygeoapi/provider/cangrd_rasterio.py index 20e94bb0..1f78c5f6 100644 --- a/msc_pygeoapi/provider/cangrd_rasterio.py +++ b/msc_pygeoapi/provider/cangrd_rasterio.py @@ -5,7 +5,7 @@ # Tom Kralidis # # Copyright (c) 2022 Louis-Philippe Rousseau-Lambert -# Copyright (c) 2022 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -155,10 +155,10 @@ def get_coverage_domainset(self): time_axis['uomLabel'] = 'year' else: begin = search('_{:d}-{:d}.tif', begin_file) - begin = '{}-{}'.format(begin[0], str(begin[1]).zfill(2)) + begin = f'{begin[0]}-{begin[1]:02}' end = search('_{:d}-{:d}.tif', end_file) - end = '{}-{}'.format(end[0], str(end[1]).zfill(2)) + end = f'{end[0]}-{end[1]:02}' time_axis['uomLabel'] = 'month' time_axis['lowerBound'] = begin @@ -213,19 +213,19 @@ def get_coverage_rangetype(self, *args, **kwargs): var_key = var_dict.keys() for var in var_key: + units = var_dict[var]['units'] rangetype['field'].append({ 'id': var_dict[var]['id'], 'type': 'Quantity', 'name': var_dict[var]['name'], 'encodingInfo': { - 'dataType': 'http://www.opengis.net/def/dataType/OGC/0/{}'.format(dtype) # noqa + 'dataType': f'http://www.opengis.net/def/dataType/OGC/0/{dtype}' # noqa }, 'nodata': nodataval, 'uom': { - 'id': 'http://www.opengis.net/def/uom/UCUM/{}'.format( - var_dict[var]['units']), + 'id': 'http://www.opengis.net/def/uom/UCUM/{units}', 'type': 'UnitReference', - 'code': var_dict[var]['units'] + 'code': units }, '_meta': { 'tags': { @@ -293,10 +293,8 @@ def query(self, properties=['TMEAN'], subsets={}, bbox=[], temp_geom_max) maxx2, maxy2 = max_coord['coordinates'] - LOGGER.debug('Source coordinates: {}'.format( - [minx, miny, maxx, maxy])) - LOGGER.debug('Destination coordinates: {}'.format( - [minx2, miny2, maxx2, maxy2])) + LOGGER.debug(f'Source coordinates: {minx}, {miny}, {maxx}, {maxy}') # noqa + LOGGER.debug(f'Destination coordinates: {minx2}, {miny2}, {maxx2}, {maxy2}') # noqa shapes = [{ 'type': 'Polygon', @@ -363,7 +361,7 @@ def query(self, properties=['TMEAN'], subsets={}, bbox=[], if '/' not in datetime_: if 'month' in self.data: month = search('_{:d}-{:d}.tif', self.data) - period = '{}-{}'.format(month[0], str(month[1]).zfill(2)) + period = f'{month[0]}-{month[1]:02}' self.data = self.data.replace(str(month), str(datetime_)) else: period = search('_{:d}.tif', self.data)[0] @@ -428,8 +426,7 @@ def query(self, properties=['TMEAN'], subsets={}, bbox=[], self.filename = self.data.split('/')[-1] if 'trend' not in self.data and datetime_: self.filename = self.filename.split('_') - self.filename[-1] = '{}.tif'.format( - datetime_.replace('/', '-')) + self.filename[-1] = f"{datetime_.replace('/', '-')}.tif" self.filename = '_'.join(self.filename) # CovJSON output does not support multiple bands yet @@ -545,7 +542,7 @@ def gen_covjson(self, metadata, shapes, data): else: bands_select = metadata['bands'] - LOGGER.debug('bands selected: {}'.format(bands_select)) + LOGGER.debug(f'bands selected: {bands_select}') for bs in bands_select: pm = _get_parameter_metadata( self._data.profile['driver'], self._data.tags(bs)) @@ -614,9 +611,8 @@ def _get_coverage_properties(self): if self._data.crs is not None: if self._data.crs.is_projected: - properties['bbox_crs'] = '{}/{}'.format( - 'http://www.opengis.net/def/crs/OGC/1.3/', - self._data.crs.to_epsg()) + bbox_crs = f'http://www.opengis.net/def/crs/OGC/1.3/{self._data.crs.to_epsg()' # noqa + properties['bbox_crs'] = bbox_crs properties['x_axis_label'] = 'x' properties['y_axis_label'] = 'y' @@ -640,8 +636,7 @@ def get_file_list(self, variable, datetime_=None): """ file_path = pathlib.Path(self.data).parent.resolve() - file_path_ = glob.glob(os.path.join(file_path, - '*{}*'.format(variable))) + file_path_ = glob.glob(os.path.join(file_path, f'*{variable}*')) file_path_.sort() if datetime_: diff --git a/msc_pygeoapi/provider/cansips_rasterio.py b/msc_pygeoapi/provider/cansips_rasterio.py index 5a10c277..2ae9ba26 100644 --- a/msc_pygeoapi/provider/cansips_rasterio.py +++ b/msc_pygeoapi/provider/cansips_rasterio.py @@ -4,8 +4,9 @@ # # # Copyright (c) 2022 Louis-Philippe Rousseau-Lambert +# Copyright (c) 2023 Tom Kralidis +# # - # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation # files (the "Software"), to deal in the Software without @@ -211,7 +212,7 @@ def get_coverage_rangetype(self, *args, **kwargs): i, dtype, nodataval = self._data.indexes[0], \ self._data.dtypes[0], self._data.nodatavals[0] - LOGGER.debug('Determing rangetype for band {}'.format(i)) + LOGGER.debug(f'Determing rangetype for band {i}') tags = self._data.tags(i) keys_to_remove = ['GRIB_FORECAST_SECONDS', @@ -238,12 +239,11 @@ def get_coverage_rangetype(self, *args, **kwargs): 'type': 'Quantity', 'name': name, 'encodingInfo': { - 'dataType': 'http://www.opengis.net/def/dataType/OGC/0/{}'.format(dtype) # noqa + 'dataType': f'http://www.opengis.net/def/dataType/OGC/0/{dtype}' # noqa }, 'nodata': nodataval, 'uom': { - 'id': 'http://www.opengis.net/def/uom/UCUM/{}'.format( - units), + 'id': f'http://www.opengis.net/def/uom/UCUM/{units}', 'type': 'UnitReference', 'code': units }, @@ -302,15 +302,13 @@ def query(self, properties=[1], subsets={}, bbox=[], if len(bbox) > 0: minx, miny, maxx, maxy = bbox - LOGGER.debug('Source coordinates: {}'.format( - [minx, miny, maxx, maxy])) + LOGGER.debug(f'Source coordinates: {minx}, {maxy}, {maxx}, {maxy}') # because cansips long is from 0 to 360 minx += 180 maxx += 180 - LOGGER.debug('Destination coordinates: {}'.format( - [minx, miny, maxx, maxy])) + LOGGER.debug(f'Destination coordinates: {minx}, {maxy}, {maxx}, {maxy}') # noqa shapes = [{ 'type': 'Polygon', @@ -497,7 +495,7 @@ def gen_covjson(self, metadata, data): else: bands_select = metadata['bands'] - LOGGER.debug('bands selected: {}'.format(bands_select)) + LOGGER.debug(f'bands selected: {bands_select}') for bs in bands_select: pm = _get_parameter_metadata( self._data.profile['driver'], self._data.tags(bs)) @@ -567,10 +565,9 @@ def _get_coverage_properties(self): if self._data.crs is not None: if self._data.crs.is_projected: - properties['bbox_crs'] = '{}/{}'.format( - 'http://www.opengis.net/def/crs/OGC/1.3/', - self._data.crs.to_epsg()) + bbox_crs = f'http://www.opengis.net/def/crs/OGC/1.3/{self._data.crs.to_epsg()}', # noqa + properties['bbox_crs'] = bbox_crs properties['x_axis_label'] = 'x' properties['y_axis_label'] = 'y' properties['bbox_units'] = self._data.crs.linear_units @@ -600,7 +597,7 @@ def get_file_list(self, variable, datetime_=None): file_path[-2] = '*' file_path_ = glob.glob(os.path.join('/'.join(file_path), - '{}*'.format(variable))) + f'{variable}*')) file_path_.sort() if datetime_: @@ -695,8 +692,7 @@ def get_band_datetime(self, datetime_, year, month): # making a list of the datetime for the given dim_ref_time possible_time = [] for i in range(1, 13): - possible_time.append(self.get_time_from_dim( - '{}-{}'.format(year, month), i)) + possible_time.append(self.get_time_from_dim(f'{year}-{month}', i)) if '/' not in datetime_: if datetime_ not in possible_time: diff --git a/msc_pygeoapi/provider/climate_xarray.py b/msc_pygeoapi/provider/climate_xarray.py index 28adf273..b9ce6376 100644 --- a/msc_pygeoapi/provider/climate_xarray.py +++ b/msc_pygeoapi/provider/climate_xarray.py @@ -4,6 +4,7 @@ # # # Copyright (c) 2022 Louis-Philippe Rousseau-Lambert +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -154,7 +155,7 @@ def get_coverage_rangetype(self): } for name, var in self._data.variables.items(): - LOGGER.debug('Determining rangetype for {}'.format(name)) + LOGGER.debug(f'Determining rangetype for {name}') desc, units = None, None if len(var.shape) >= 2: @@ -177,12 +178,11 @@ def get_coverage_rangetype(self): 'type': 'Quantity', 'name': var.attrs.get('long_name') or desc, 'encodingInfo': { - 'dataType': 'http://www.opengis.net/def/dataType/OGC/0/{}'.format(str(var.dtype)) # noqa + 'dataType': f'http://www.opengis.net/def/dataType/OGC/0/{var.dtype}' # noqa }, 'nodata': 'null', 'uom': { - 'id': 'http://www.opengis.net/def/uom/UCUM/{}'.format( - units), + 'id': f'http://www.opengis.net/def/uom/UCUM/{units}', 'type': 'UnitReference', 'code': units }, @@ -243,9 +243,8 @@ def _get_coverage_properties(self): } if 'crs' in self._data.variables.keys(): - properties['bbox_crs'] = '{}/{}'.format( - 'http://www.opengis.net/def/crs/OGC/1.3/', - self._data.crs.epsg_code) + bbox_crs = f'http://www.opengis.net/def/crs/OGC/1.3/{self._data.crs.epsg_code}' # noqa + properties['bbox_crs'] = bbox_crs properties['inverse_flattening'] = self._data.crs.\ inverse_flattening @@ -311,7 +310,7 @@ def _to_datetime_string(self, datetime_): if any(month in self.data for month in self.monthly_data): month = datetime_.astype('datetime64[M]').astype(int) % 12 + 1 year = datetime_.astype('datetime64[Y]').astype(int) + 1970 - value = '{}-{}'.format(year, str(month).zfill(2)) + value = f'{year}-{month:02}' else: value = datetime_.astype('datetime64[Y]').astype(int) + 1970 value = str(value) @@ -355,8 +354,7 @@ def query(self, properties=[], subsets={}, try: if percentile != [50]: pctl = str(percentile[0]) - self.data = self.data.replace('pctl50', - 'pctl{}'.format(pctl)) + self.data = self.data.replace('pctl50', f'pctl{pctl}') except Exception as err: LOGGER.error(err) @@ -503,7 +501,7 @@ def query(self, properties=[], subsets={}, else: query_params[self.time_field] = datetime_ - LOGGER.debug('Query parameters: {}'.format(query_params)) + LOGGER.debug(f'Query parameters: {query_params}') try: data = data.loc[query_params] except Exception as err: diff --git a/msc_pygeoapi/provider/elasticsearch.py b/msc_pygeoapi/provider/elasticsearch.py index 833fde6c..e2360b4e 100644 --- a/msc_pygeoapi/provider/elasticsearch.py +++ b/msc_pygeoapi/provider/elasticsearch.py @@ -2,7 +2,7 @@ # # Authors: Tom Kralidis # -# Copyright (c) 2021 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -176,7 +176,7 @@ def mask_prop(self, property_name): return property_name def __repr__(self): - return ' {}'.format(self.data) # noqa + return f' {self.data}' class ElasticsearchWMOWIS2BrokerMessagesProvider(ElasticsearchCatalogueWMOWIS2GDCProvider): # noqa @@ -186,4 +186,4 @@ def __init__(self, provider_def): super().__init__(provider_def) def __repr__(self): - return ' {}'.format(self.data) # noqa \ No newline at end of file + return f' {self.data}' diff --git a/msc_pygeoapi/provider/rdpa_rasterio.py b/msc_pygeoapi/provider/rdpa_rasterio.py index ebf4c4ea..ddc3e24c 100644 --- a/msc_pygeoapi/provider/rdpa_rasterio.py +++ b/msc_pygeoapi/provider/rdpa_rasterio.py @@ -4,8 +4,9 @@ # # # Copyright (c) 2022 Louis-Philippe Rousseau-Lambert +# Copyright (c) 2023 Tom Kralidis +# # - # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation # files (the "Software"), to deal in the Software without @@ -216,7 +217,7 @@ def get_coverage_rangetype(self, *args, **kwargs): for i, dtype, nodataval in zip(self._data.indexes, self._data.dtypes, self._data.nodatavals): - LOGGER.debug('Determing rangetype for band {}'.format(i)) + LOGGER.debug(f'Determing rangetype for band {i}') name, units = None, None if self._data.units[i-1] is None: @@ -230,12 +231,11 @@ def get_coverage_rangetype(self, *args, **kwargs): 'type': 'Quantity', 'name': name, 'encodingInfo': { - 'dataType': 'http://www.opengis.net/def/dataType/OGC/0/{}'.format(dtype) # noqa + 'dataType': f'http://www.opengis.net/def/dataType/OGC/0/{dtype}' # noqa }, 'nodata': nodataval, 'uom': { - 'id': 'http://www.opengis.net/def/uom/UCUM/{}'.format( - units), + 'id': f'http://www.opengis.net/def/uom/UCUM/{units}', 'type': 'UnitReference', 'code': units }, @@ -261,7 +261,7 @@ def query(self, properties=[1], subsets={}, bbox=[], nbits = 16 bands = properties - LOGGER.debug('Bands: {}, subsets: {}'.format(bands, subsets)) + LOGGER.debug(f'Bands: {bands}, subsets: {subsets}') args = { 'indexes': None @@ -305,10 +305,8 @@ def query(self, properties=[1], subsets={}, bbox=[], temp_geom_maxdown) maxx2down, miny2down = downright_coord['coordinates'] - LOGGER.debug('Source coordinates: {}'.format( - [minx, miny, maxx, maxy])) - LOGGER.debug('Destination coordinates: {}'.format( - [minx2, miny2, maxx2, maxy2])) + LOGGER.debug(f'Source coordinates: {minx}, {miny}, {maxx}, {maxy}') + LOGGER.debug(f'Destination coordinates: {minx2}, {miny2}, {maxx2}, {maxy2}') # noqa shapes = [{ 'type': 'Polygon', @@ -517,7 +515,7 @@ def gen_covjson(self, metadata, data): else: bands_select = metadata['bands'] - LOGGER.debug('bands selected: {}'.format(bands_select)) + LOGGER.debug(f'bands selected: {bands_select}') for bs in bands_select: pm = _get_parameter_metadata( self._data.profile['driver'], self._data.tags(bs)) @@ -586,10 +584,8 @@ def _get_coverage_properties(self): if self._data.crs is not None: if self._data.crs.is_projected: - properties['bbox_crs'] = '{}/{}'.format( - 'http://www.opengis.net/def/crs/OGC/1.3/', - self._data.crs.to_epsg()) - + bbox_crs = f'http://www.opengis.net/def/crs/OGC/1.3/{self._data.crs.to_epsg()}' # noqa + properties['bbox_crs'] = bbox_crs properties['x_axis_label'] = 'x' properties['y_axis_label'] = 'y' properties['bbox_units'] = self._data.crs.linear_units @@ -618,8 +614,7 @@ def get_file_list(self, variable, datetime_=None): file_path[-1] = '*' file_path[-2] = '*' - file_path_ = glob.glob(os.path.join('/'.join(file_path), - '*{}*'.format(variable))) + file_path_ = glob.glob(os.path.join('/'.join(file_path), f'*{variable}*')) # noqa file_path_.sort() if datetime_: diff --git a/msc_pygeoapi/provider/spei_xarray.py b/msc_pygeoapi/provider/spei_xarray.py index 3c8c0753..40d9e6f5 100644 --- a/msc_pygeoapi/provider/spei_xarray.py +++ b/msc_pygeoapi/provider/spei_xarray.py @@ -4,6 +4,7 @@ # # # Copyright (c) 2022 Louis-Philippe Rousseau-Lambert +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -126,8 +127,7 @@ def query(self, properties=['spei'], subsets={}, try: if percentile != [50]: pctl = str(percentile[0]) - self.data = self.data.replace('pctl50', - 'pctl{}'.format(pctl)) + self.data = self.data.replace('pctl50', f'pctl{pctl}') except Exception as err: LOGGER.error(err) @@ -187,7 +187,7 @@ def query(self, properties=['spei'], subsets={}, else: query_params[self.time_field] = datetime_ - LOGGER.debug('Query parameters: {}'.format(query_params)) + LOGGER.debug(f'Query parameters: {query_params}') try: data = self._data.loc[query_params] except Exception as err: diff --git a/msc_pygeoapi/util.py b/msc_pygeoapi/util.py index 92b43d39..27c09dca 100644 --- a/msc_pygeoapi/util.py +++ b/msc_pygeoapi/util.py @@ -1,8 +1,8 @@ # ================================================================= # -# Author: Tom Kralidis +# Author: Tom Kralidis # -# Copyright (c) 2021 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -78,7 +78,7 @@ def json_serial(obj): elif isinstance(obj, bytes): return obj.decode('utf-8') - msg = '{} type {} not serializable'.format(obj, type(obj)) + msg = f'{obj} type {type(obj)} not serializable' LOGGER.error(msg) raise TypeError(msg) diff --git a/setup.py b/setup.py index fe959e46..9b0363a1 100644 --- a/setup.py +++ b/setup.py @@ -1,8 +1,8 @@ # ================================================================= # -# Author: Tom Kralidis +# Author: Tom Kralidis # -# Copyright (c) 2020 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -73,7 +73,7 @@ def run(self): for file_ in os.listdir('..'): if file_.endswith(('.deb', '.build', '.changes')): - os.remove('../{}'.format(file_)) + os.remove(f'../{file_}') class PyTest(Command): @@ -153,7 +153,7 @@ def get_package_version(): author='Tom Kralidis', author_email='tom.kralidis@ec.gc.ca', maintainer='Tom Kralidis', - maintainer_email='tom.kralidis@canada.ca', + maintainer_email='tom.kralidis@ec.gc.ca', url='https://github.com/ECCC-MSC/msc-pygeoapi', install_requires=read('requirements.txt').splitlines(), packages=find_packages(exclude=['msc_pygeoapi.tests']), diff --git a/tests/conftest.py b/tests/conftest.py index 44d7c9a4..a4abf645 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,8 +1,8 @@ # ================================================================= # -# Author: Tom Kralidis +# Author: Tom Kralidis # -# Copyright (c) 2021 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation diff --git a/tests/test_aqhi_realtime.py b/tests/test_aqhi_realtime.py index 02d59184..9a71d17a 100644 --- a/tests/test_aqhi_realtime.py +++ b/tests/test_aqhi_realtime.py @@ -2,10 +2,11 @@ # # Author: Thinesh Sornalingam , # Robert Westhaver , -# Tom Kralidis +# Tom Kralidis # Felix Laframboise # # Copyright (c) 2021 Felix Laframboise +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -50,7 +51,7 @@ def test_api(url): ] # display all aqhi realtime data feature collections - req = '{}/collections'.format(url) + req = f'{url}/collections' response = requests.get(req).json() @@ -61,7 +62,7 @@ def test_api(url): for collection in aqhi_collections: # describe aqhi realtime data feature collection - req = '{}/collections/{}'.format(url, collection) + req = f'{url}/collections/{collection}' response = requests.get(req).json() @@ -81,7 +82,7 @@ def test_api(url): assert 'links' in response # access a single aqhi realtime feature - req = '{}/collections/{}/items'.format(url, collection) + req = f'{url}/collections/{collection}/items' response = requests.get(req).json() # single realtime measurement is returned from the past 30 days @@ -93,7 +94,7 @@ def test_api(url): # query aqhi realtime feature collection based on # region property - req = '{}/collections/{}/items'.format(url, collection) + req = f'{url}/collections/{collection}/items' params = { 'region': 'AADCE' } @@ -106,7 +107,7 @@ def test_api(url): # query aqhi realtime feature collection based on # region sorted by id - req = '{}/collections/{}/items'.format(url, collection) + req = f'{url}/collections/{collection}/items' params = { 'region': 'AADCE', 'sortby': 'datetime_utc' @@ -123,7 +124,7 @@ def test_api(url): # query aqhi realtime feature collection sorted by # earliest date and time - req = '{}/collections/{}/items'.format(url, collection) + req = f'{url}/collections/{collection}/items' params = { 'sortby': 'datetime_utc' } @@ -139,7 +140,7 @@ def test_api(url): # query aqhi realtime feature collection sorted by # latest date and time - req = '{}/collections/{}/items'.format(url, collection) + req = f'{url}/collections/{collection}/items' params = { 'sortby': '-datetime_utc' } @@ -160,7 +161,7 @@ def test_api(url): assert delta.seconds < 86400 # query aqhi realtime feature collection based on spatial subsetting - req = '{}/collections/{}/items'.format(url, collection) + req = f'{url}/collections/{collection}/items' params = { 'bbox': '-60,45,-50,50' } diff --git a/tests/test_hydat.py b/tests/test_hydat.py index 8f4657cf..b8f75ab7 100644 --- a/tests/test_hydat.py +++ b/tests/test_hydat.py @@ -1,8 +1,8 @@ # ================================================================= # -# Author: Tom Kralidis +# Author: Tom Kralidis # -# Copyright (c) 2021 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -52,7 +52,7 @@ def test_api(url): ] # display all hydrometric data feature collections - req = '{}/collections'.format(url) + req = f'{url}/collections' response = requests.get(req).json() collections = [c['id'] for c in response['collections']] @@ -61,7 +61,7 @@ def test_api(url): assert all(elem in collections for elem in hydat_collections) # describe hydrometric stations feature collection - req = '{}/collections/hydrometric-stations'.format(url) + req = f'{url}/collections/hydrometric-stations' response = requests.get(req).json() # feature collections metadata is displayed including appropriate title, @@ -79,7 +79,7 @@ def test_api(url): assert 'links' in response # query hydrometric stations feature collection - req = '{}/collections/hydrometric-stations/items'.format(url) + req = f'{url}/collections/hydrometric-stations/items' response = requests.get(req).json() # default list of 500 stations is returned with correct data @@ -87,7 +87,7 @@ def test_api(url): # query hydrometric stations feature collection and return # sorted by IDENTIFIER - req = '{}/collections/hydrometric-stations/items'.format(url) + req = f'{url}/collections/hydrometric-stations/items' params = { 'sortby': 'IDENTIFIER' } @@ -96,7 +96,7 @@ def test_api(url): assert response['features'][0]['id'] == '01AA002' # query hydrometric stations feature collection to return in CSV format - req = '{}/collections/hydrometric-stations/items'.format(url) + req = f'{url}/collections/hydrometric-stations/items' params = { 'f': 'csv' } @@ -107,7 +107,7 @@ def test_api(url): assert len(reader.fieldnames) == 12 # access a single hydrometric stations feature - req = '{}/collections/hydrometric-stations/items/01BH001'.format(url) + req = f'{url}/collections/hydrometric-stations/items/01BH001' response = requests.get(req, params=params).json() # single station (01BH001) is returned with correct data @@ -117,7 +117,7 @@ def test_api(url): # query hydrometric stations feature collection based on # PROV_TERR_STATE_LOC property - req = '{}/collections/hydrometric-stations/items'.format(url) + req = f'{url}/collections/hydrometric-stations/items' params = { 'PROV_TERR_STATE_LOC': 'SK' } @@ -129,7 +129,7 @@ def test_api(url): assert response['features'][feature]['properties']['PROV_TERR_STATE_LOC'] == 'SK' # noqa # query hydrometric stations feature collection based on STATUS_EN property - req = '{}/collections/hydrometric-stations/items'.format(url) + req = f'{url}/collections/hydrometric-stations/items' params = { 'STATUS_EN': 'Active' } @@ -140,7 +140,7 @@ def test_api(url): assert response['features'][feature]['properties']['STATUS_EN'] == 'Active' # access a single hydrometric daily mean feature - req = '{}/collections/hydrometric-daily-mean/items/10NC001.1979-07-19'.format(url) # noqa + req = f'{url}/collections/hydrometric-daily-mean/items/10NC001.1979-07-19' response = requests.get(req, params=params).json() # single daily mean is returned with correct data @@ -149,7 +149,7 @@ def test_api(url): # query hydrometric daily means feature collection based on # STATION_NUMBER property - req = '{}/collections/hydrometric-daily-mean/items'.format(url) + req = f'{url}/collections/hydrometric-daily-mean/items' params = { 'STATION_NUMBER': '10NC001' } @@ -161,7 +161,7 @@ def test_api(url): # query hydrometric daily means feature collection based on # STATION_NUMBER property sorted by IDENTIFIER - req = '{}/collections/hydrometric-daily-mean/items'.format(url) + req = f'{url}/collections/hydrometric-daily-mean/items' params = { 'STATION_NUMBER': '10NC001', 'sortby': 'IDENTIFIER' @@ -174,7 +174,7 @@ def test_api(url): # query hydrometric daily means feature collection based on # STATION_NUMBER property and temporal subsetting - req = '{}/collections/hydrometric-daily-mean/items'.format(url) + req = f'{url}/collections/hydrometric-daily-mean/items' params = { 'STATION_NUMBER': '10NC001', 'datetime': '1979-01-01/1981-01-01' @@ -189,7 +189,7 @@ def test_api(url): # query hydrometric daily means feature collection based on spatial # subsetting - req = '{}/collections/hydrometric-daily-mean/items'.format(url) + req = f'{url}/collections/hydrometric-daily-mean/items' params = { 'bbox': '-80,40,-50,50' } @@ -205,7 +205,7 @@ def test_api(url): assert coords[1] < bbox[3] # access a single hydrometric monthly mean feature - req = '{}/collections/hydrometric-monthly-mean/items/09EA004.1979-09'.format(url) # noqa + req = f'{url}/collections/hydrometric-monthly-mean/items/09EA004.1979-09' response = requests.get(req).json() @@ -215,7 +215,7 @@ def test_api(url): # query hydrometric monthly means feature collection based on # STATION_NUMBER property - req = '{}/collections/hydrometric-monthly-mean/items'.format(url) + req = f'{url}/collections/hydrometric-monthly-mean/items' params = { 'STATION_NUMBER': '10NC001' } @@ -227,7 +227,7 @@ def test_api(url): # query hydrometric monthly means feature collection based on # STATION_NUMBER property sorted by STATION_NAME - req = '{}/collections/hydrometric-monthly-mean/items'.format(url) + req = f'{url}/collections/hydrometric-monthly-mean/items' params = { 'STATION_NUMBER': '10NC001', 'sortby': 'STATION_NAME' @@ -240,7 +240,7 @@ def test_api(url): # query hydrometric monthly means feature collection based on # STATION_NUMBER property and temporal subsetting - req = '{}/collections/hydrometric-monthly-mean/items'.format(url) + req = f'{url}/collections/hydrometric-monthly-mean/items' params = { 'STATION_NUMBER': '10NC001', 'datetime': '1979-01/1981-01' @@ -255,7 +255,7 @@ def test_api(url): # query hydrometric monthly means feature collection based on # spatial subsetting - req = '{}/collections/hydrometric-monthly-mean/items'.format(url) + req = f'{url}/collections/hydrometric-monthly-mean/items' params = { 'bbox': '-80,40,-50,50' } @@ -271,7 +271,7 @@ def test_api(url): assert coords[1] < bbox[3] # access a single hydrometric annual statistics feature - req = '{}/collections/hydrometric-annual-statistics/items/01AA002.1976.discharge-debit'.format(url) # noqa + req = f'{url}/collections/hydrometric-annual-statistics/items/01AA002.1976.discharge-debit' # noqa response = requests.get(req, params=params).json() @@ -280,7 +280,7 @@ def test_api(url): assert response['properties']['MAX_VALUE'] == 281 # query hydrometric annual statistics feature collection based on data type - req = '{}/collections/hydrometric-annual-statistics/items'.format(url) + req = f'{url}/collections/hydrometric-annual-statistics/items' params = { 'DATA_TYPE_EN': 'Discharge' } @@ -294,7 +294,7 @@ def test_api(url): # query hydrometric annual statistics feature collection based on # data type sorted by MAX_VALUE - req = '{}/collections/hydrometric-annual-statistics/items'.format(url) + req = f'{url}/collections/hydrometric-annual-statistics/items' params = { 'DATA_TYPE_EN': 'Discharge', 'sortby': 'MAX_VALUE' @@ -307,7 +307,7 @@ def test_api(url): assert response['features'][0]['properties']['MAX_VALUE'] == 0 # access a single hydrometric annual peaks feature - req = '{}/collections/hydrometric-annual-peaks/items/02FE012.1961.level-niveaux.maximum-maximale'.format(url) # noqa + req = f'{url}/collections/hydrometric-annual-peaks/items/02FE012.1961.level-niveaux.maximum-maximale' # noqa response = requests.get(req).json() @@ -315,7 +315,7 @@ def test_api(url): assert response['id'] == '02FE012.1961.level-niveaux.maximum-maximale' # query hydrometric annual peaks feature collection based on STATION_NUMBER - req = '{}/collections/hydrometric-annual-statistics/items'.format(url) + req = f'{url}/collections/hydrometric-annual-statistics/items' params = { 'STATION_NUMBER': '01AG003' } @@ -328,7 +328,7 @@ def test_api(url): # query hydrometric annual peaks feature collection based on # STATION_NUMBER sorted by DATA_TYPE_EN - req = '{}/collections/hydrometric-annual-statistics/items'.format(url) + req = f'{url}/collections/hydrometric-annual-statistics/items' params = { 'STATION_NUMBER': '01AG003', 'sortby': 'DATA_TYPE_EN' diff --git a/tests/test_hydrometric_realtime.py b/tests/test_hydrometric_realtime.py index 8a3a0db7..199f06b3 100644 --- a/tests/test_hydrometric_realtime.py +++ b/tests/test_hydrometric_realtime.py @@ -1,8 +1,8 @@ # ================================================================= # -# Author: Tom Kralidis +# Author: Tom Kralidis # -# Copyright (c) 2021 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -47,7 +47,7 @@ def test_api(url): ] # display all hydrometric realtime data feature collections - req = '{}/collections'.format(url) + req = f'{url}/collections' response = requests.get(req).json() collections = [c['id'] for c in response['collections']] @@ -56,7 +56,7 @@ def test_api(url): assert all(elem in collections for elem in hydrometric_realtime_collections) # noqa # describe hydrometric realtime data feature collection - req = '{}/collections/hydrometric-realtime'.format(url) + req = f'{url}/collections/hydrometric-realtime' response = requests.get(req).json() # feature collections metadata is displayed including appropriate title, @@ -69,7 +69,7 @@ def test_api(url): assert 'links' in response # access a single hydrometric realtime feature - req = '{}/collections/hydrometric-realtime/items'.format(url) + req = f'{url}/collections/hydrometric-realtime/items' response = requests.get(req).json() # single realtime measurement is returned from the past 30 days @@ -81,7 +81,7 @@ def test_api(url): # query hydrometric realtime feature collection based on # STATION_NUMBER property - req = '{}/collections/hydrometric-realtime/items'.format(url) + req = f'{url}/collections/hydrometric-realtime/items' params = { 'STATION_NUMBER': '02KD004' } @@ -94,7 +94,7 @@ def test_api(url): # query hydrometric realtime feature collection based on # STATION_NUMBER property sorted by IDENTIFIER - req = '{}/collections/hydrometric-realtime/items'.format(url) + req = f'{url}/collections/hydrometric-realtime/items' params = { 'STATION_NUMBER': '02KD004', 'sortby': 'IDENTIFIER' @@ -111,7 +111,7 @@ def test_api(url): # query hydrometric realtime feature collection sorted by # earliest date and time - req = '{}/collections/hydrometric-realtime/items'.format(url) + req = f'{url}/collections/hydrometric-realtime/items' params = { 'sortby': 'DATETIME' } @@ -127,7 +127,7 @@ def test_api(url): # query hydrometric realtime feature collection sorted by # latest date and time - req = '{}/collections/hydrometric-realtime/items'.format(url) + req = f'{url}/collections/hydrometric-realtime/items' params = { 'sortby': '-DATETIME' } @@ -143,7 +143,7 @@ def test_api(url): assert delta.seconds < 3600 # query hydrometric realtime feature collection based on spatial subsetting - req = '{}/collections/hydrometric-realtime/items'.format(url) + req = f'{url}/collections/hydrometric-realtime/items' params = { 'bbox': '-80,50,-50,60' } diff --git a/tests/test_swob_realtime.py b/tests/test_swob_realtime.py index cad876d9..e9747022 100644 --- a/tests/test_swob_realtime.py +++ b/tests/test_swob_realtime.py @@ -2,11 +2,11 @@ # # Author: Thinesh Sornalingam , # Robert Westhaver , -# Tom Kralidis +# Tom Kralidis # # Copyright (c) 2020 Thinesh Sornalingam # Copyright (c) 2020 Robert Westhaver -# Copyright (c) 2021 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -61,8 +61,8 @@ def url(pytestconfig): def test_loader(swob): """Test suite for converting swobs to geojson""" - xml = get_test_file_path('{}.xml'.format(swob)) - geojson = read_json(get_test_file_path('{}.geojson'.format(swob))) + xml = get_test_file_path(f'{swob}.xml') + geojson = read_json(get_test_file_path(f'{swob}.geojson')) assert swob2geojson(xml) == geojson @@ -74,7 +74,7 @@ def test_api(url): ] # display all swob realtime data feature collections - req = '{}/collections'.format(url) + req = f'{url}/collections' response = requests.get(req).json() collections = [c['id'] for c in response['collections']] @@ -83,7 +83,7 @@ def test_api(url): assert all(elem in collections for elem in swob_collections) # describe swob realtime data feature collection - req = '{}/collections/swob-realtime'.format(url) + req = f'{url}/collections/swob-realtime' response = requests.get(req).json() # feature collections metadata is displayed including appropriate title, @@ -96,7 +96,7 @@ def test_api(url): assert 'links' in response # access a single swob realtime feature - req = '{}/collections/swob-realtime/items'.format(url) + req = f'{url}/collections/swob-realtime/items' response = requests.get(req).json() # single realtime measurement is returned from the past 30 days @@ -108,7 +108,7 @@ def test_api(url): # query swob realtime feature collection based on # msc_id-value property - req = '{}/collections/swob-realtime/items'.format(url) + req = f'{url}/collections/swob-realtime/items' params = { 'msc_id-value': '3031875' } @@ -121,7 +121,7 @@ def test_api(url): # query swob realtime feature collection based on # msc_id-value property sorted by id - req = '{}/collections/swob-realtime/items'.format(url) + req = f'{url}/collections/swob-realtime/items' params = { 'msc_id-value': '3031875', 'sortby': 'date_tm-value' @@ -138,7 +138,7 @@ def test_api(url): # query swob realtime feature collection sorted by # earliest date and time - req = '{}/collections/swob-realtime/items'.format(url) + req = f'{url}/collections/swob-realtime/items' params = { 'sortby': 'date_tm-value' } @@ -154,7 +154,7 @@ def test_api(url): # query swob realtime feature collection sorted by # latest date and time - req = '{}/collections/swob-realtime/items'.format(url) + req = f'{url}/collections/swob-realtime/items' params = { 'sortby': '-date_tm-value' } @@ -170,7 +170,7 @@ def test_api(url): assert delta.seconds < 3600 # query swob realtime feature collection based on spatial subsetting - req = '{}/collections/swob-realtime/items'.format(url) + req = f'{url}/collections/swob-realtime/items' params = { 'bbox': '-80,50,-50,60' } diff --git a/tests/util.py b/tests/util.py index 82928ade..711d46df 100644 --- a/tests/util.py +++ b/tests/util.py @@ -1,8 +1,8 @@ # ================================================================= # -# Author: Tom Kralidis +# Author: Tom Kralidis # -# Copyright (c) 2021 Tom Kralidis +# Copyright (c) 2023 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -39,7 +39,7 @@ def get_test_file_path(filename): if os.path.isfile(filename): return filename else: - return 'tests/{}'.format(filename) + return f'tests/{filename}' def read_json(file_name):