Skip to content

Commit

Permalink
Add scenarios (#10)
Browse files Browse the repository at this point in the history
* feat: Add generate command for generating a scaler CSV file
* feat: Read scaler configuration from a CSV
* feat: Add configurations for ssp119, ssp126 and ssp245
* fix: Update register hook

See python-attrs/cattrs#206 (comment)
  • Loading branch information
lewisjared authored Feb 13, 2023
1 parent 68ca942 commit fa05947
Show file tree
Hide file tree
Showing 21 changed files with 546 additions and 39 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -273,6 +273,6 @@ cython_debug/

# TODO: verify with Vic EPA that files can be published
data/raw/inventories/victoria
data/processed/input4MIPs
data/processed
notebooks/*.ipynb
notebooks/**/*.ipynb
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@

# Changelog

All notable changes to this project will be documented in this file.
Expand All @@ -17,6 +18,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

### Added

- Add `ssp119`, `ssp126` and `ssp245` scenario configurations [#10](https://github.com/climate-resource/spaemis/pull/10)
- Add `default_scaler` to the configuration for the scaler to be used if no specific scaler configuration is provided [#9](https://github.com/climate-resource/spaemis/pull/9)
- Move test configuration to `test-data` directory [#8](https://github.com/climate-resource/spaemis/pull/8)
- Add functionality to write out a xarray dataset as a set of CSVs that are formatted the same as the input emissions inventory data [#7](https://github.com/climate-resource/spaemis/pull/7)
Expand Down
1 change: 1 addition & 0 deletions src/spaemis/commands/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,6 @@
CLI commands
"""
from .base import cli # noqa
from .generate_command import run_generate_command # noqa
from .gse_emis_command import run_gse_command # noqa
from .project_command import run_project_command # noqa
58 changes: 58 additions & 0 deletions src/spaemis/commands/generate_command.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
"""
generate CLI command
"""
import io
import logging
from typing import Dict

import click
import pandas as pd
from yaml import safe_load

from spaemis.commands.base import cli

logger = logging.getLogger(__name__)


@cli.command(name="generate")
@click.option("--scaler", default="relative_change", help="Name of the scaler to use")
@click.option("--scaler-source", help="Source scenario for the scaler")
@click.option(
"--mappings",
help="YAML file containing the sector and variable mappings",
type=click.File(),
required=True,
)
def run_generate_command(scaler, scaler_source, mappings):
"""
Generate a scenario configuration file from a set of defaults
This is helpful for setting up a CSV of scaling options for later tweaking
"""
mappings = safe_load(mappings)

sector_mapping: Dict[str, str] = mappings["sectors"]
variable_mapping: Dict[str, str] = mappings["variables"]

scaler_information = []
for source_variable, target_variable in variable_mapping.items():
for source_sector, target_sector in sector_mapping.items():
scaler_information.append(
{
"variable": source_variable,
"sector": source_sector,
"scaler_name": scaler,
"scaler_variable_id": target_variable,
"scaler_source_id": scaler_source,
"scaler_sector": target_sector,
}
)

if not scaler_information:
raise click.ClickException("No scaler information generated")
scaler_information = pd.DataFrame(scaler_information)

buff = io.StringIO()
scaler_information.to_csv(buff, index=False)

click.echo(buff.getvalue())
52 changes: 46 additions & 6 deletions src/spaemis/commands/project_command.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,16 @@
from typing import Dict, Tuple

import click
import numpy as np
import xarray as xr

from spaemis.commands.base import cli
from spaemis.config import DownscalingScenarioConfig, VariableScalerConfig, load_config
from spaemis.config import (
DownscalingScenarioConfig,
VariableScalerConfig,
converter,
load_config,
)
from spaemis.inventory import EmissionsInventory, load_inventory, write_inventory_csvs
from spaemis.scaling import get_scaler_by_config

Expand Down Expand Up @@ -53,6 +59,29 @@ def scale_inventory(
return scaled_field.expand_dims(["sector", "year"]).to_dataset(name=cfg.variable)


def _create_output_data(options, config, template: xr.Dataset):
unique_variables = sorted(set([variable for variable, _ in options]))
unique_sectors = sorted(set([sector for _, sector in options]))
unique_years = sorted(config.timeslices)

coords = dict(
sector=unique_sectors,
year=unique_years,
lat=template.lat,
lon=template.lon,
)

return xr.Dataset(
data_vars={
variable: xr.DataArray(
np.nan, coords=coords, dims=("sector", "year", "lat", "lon")
)
for variable in unique_variables
},
coords=coords,
)


def calculate_projections(
config: DownscalingScenarioConfig, inventory: EmissionsInventory
) -> xr.Dataset:
Expand Down Expand Up @@ -89,7 +118,7 @@ def calculate_projections(
),
)

projections = []
output_ds = None

for variable_config in scaling_configs.values():
for slice_year in config.timeslices:
Expand All @@ -99,11 +128,14 @@ def calculate_projections(
variable_config.sector,
slice_year,
)

res = scale_inventory(variable_config, inventory, slice_year)
projections.append(res)

# Align dims and then merge
return xr.merge(xr.align(*projections, join="outer"))
if output_ds is None:
output_ds = _create_output_data(scaling_configs.keys(), config, res)
output_ds.update(res)

return output_ds


@cli.command(name="project")
Expand All @@ -112,7 +144,7 @@ def calculate_projections(
help="Path to a configuration file for the scenario of interest",
required=True,
)
@click.option("-o", "--out_dir", help="Directory to write the updated inventory")
@click.option("-o", "--out-dir", help="Directory to write the updated inventory")
def run_project_command(config, out_dir):
"""
Generate a set of emissions projection using an emissions inventory as a base
Expand All @@ -125,8 +157,16 @@ def run_project_command(config, out_dir):
logger.info(f"Creating output directory: {out_dir}")
os.makedirs(out_dir, exist_ok=True)

logger.info("Saving loaded configuration to output directory")
with open(os.path.join(out_dir, "config.yaml"), "w") as handle:
handle.write(converter.dumps(config, DownscalingScenarioConfig))

dataset = calculate_projections(config, inventory)

logger.info("Writing output dataset as netcdf")
dataset.to_netcdf(os.path.join(out_dir, "projections.nc"))

logger.info("Writing CSV files")
for year in config.timeslices:
target_dir = os.path.join(out_dir, str(year))
data_to_write = dataset.sel(year=year)
Expand Down
45 changes: 40 additions & 5 deletions src/spaemis/config.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,15 @@
"""
Description of the configuration
"""
import os.path
from typing import Any, ClassVar, List, Literal, Optional, Type, Union, get_args

from typing import Any, ClassVar, Literal, Optional, Type, Union, get_args

import pandas as pd
from attrs import define
from cattrs.preconf.pyyaml import make_converter

from spaemis.utils import chdir

converter = make_converter()
converter.register_unstructure_hook(str, lambda u: str(u))

Expand Down Expand Up @@ -48,6 +51,27 @@ class VariableScalerConfig:
method: ScalerMethod


def _convert_filename_to_scalers(
value: Union[dict, str], _
) -> List[VariableScalerConfig]:
if isinstance(value, str):
# load_config updates the current working directory to match the
# directory of a loaded config files otherwise a absolute filename is required
data = pd.read_csv(value).to_dict(orient="records")

def extract_scaler_info(data_item):
sector_info = {}
for key, value in data_item.copy().items():
if key.startswith("scaler_"):
sector_info[key[7:]] = value
data_item.pop(key)
return {**data_item, "method": sector_info}

value = [extract_scaler_info(item) for item in data]

return [converter.structure(item, VariableScalerConfig) for item in value]


@define
class DownscalingScenarioConfig:
"""
Expand All @@ -56,15 +80,25 @@ class DownscalingScenarioConfig:

inventory_name: str
inventory_year: int
timeslices: list[int]
scalers: list[VariableScalerConfig]
timeslices: List[int]
scalers: List[VariableScalerConfig]
default_scaler: Optional[ScalerMethod] = None


# Ideally we could use converter.register_structure_hook. See
# https://github.com/python-attrs/cattrs/issues/206#issuecomment-1013714386
converter.register_structure_hook_func(
lambda t: t == List[VariableScalerConfig], _convert_filename_to_scalers
)


def load_config(config_file: str) -> DownscalingScenarioConfig:
"""
Load and parse configuration from a file
Any filenames referenced in the configuration are relative to the configuration file
not the current directory.
Parameters
----------
config_file
Expand All @@ -75,4 +109,5 @@ def load_config(config_file: str) -> DownscalingScenarioConfig:
Validated configuration
"""
with open(config_file) as handle:
return converter.loads(handle.read(), DownscalingScenarioConfig)
with chdir(os.path.dirname(config_file)):
return converter.loads(handle.read(), DownscalingScenarioConfig)
28 changes: 28 additions & 0 deletions src/spaemis/config/mappings/victoria_mappings.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
variables:
CO: CO-em-anthro
NOx: NOx-em-anthro
SO2: SO2-em-anthro
PM10: BC-em-anthro
VOC: BC-em-anthro # Need to download VOC-em-anthro
sectors: # Any non-specified sectors are kept constant
# aircraft: # We don't easily have aviation pathways from input4MIPs
architect_coating: Residential, Commercial, Other
bakery: Residential, Commercial, Other
# charcoal
crematoria: Residential, Commercial, Other
cutback_bitumen: Transportation Sector
domestic_solvents: Solvents production and application
dry_cleaning: Residential, Commercial, Other
gas_leak: Energy Sector
industry_diffuse: Waste # or Industrial Sector
industry: Industrial Sector
motor_vehicles: Transportation Sector
panel_beaters: Residential, Commercial, Other
petcrematoria: Residential, Commercial, Other
pizza: Residential, Commercial, Other
printing: Residential, Commercial, Other
rail: Transportation Sector
servos: Residential, Commercial, Other
shipping: International Shipping
vicbakery: Residential, Commercial, Other
woodheater: Residential, Commercial, Other
14 changes: 14 additions & 0 deletions src/spaemis/config/scenarios/ssp119.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
inventory_name: victoria
inventory_year: 2016

timeslices:
- 2020
- 2040
- 2060
- 2080
- 2100

default_scaler:
name: constant

scalers: ssp119_scalers.csv
Loading

0 comments on commit fa05947

Please sign in to comment.