diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..f087b429 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +*.tar.gz filter=lfs diff=lfs merge=lfs -text diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml new file mode 100644 index 00000000..fc5d6498 --- /dev/null +++ b/.github/workflows/run_tests.yml @@ -0,0 +1,29 @@ +name: Test steps +on: + pull_request: {} + push: + branches: [ main ] +jobs: + python_run_scripts: + strategy: + fail-fast: false + matrix: + version: ['3.9'] + runs-on: ubuntu-latest + steps: + - name: install mpi + run: sudo apt update && sudo apt-get install openmpi-bin openmpi-doc libopenmpi-dev + - uses: actions/checkout@v3 + with: + lfs: true + - name: setup python + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.version }} # install the python version needed + cache: "pip" + - name: install icesat2-tracks using pip + run: pip install . + - name: install pytest + run: pip install pytest pytest-xdist pytest-sugar pytest-durations + - name: Run tests + run: pytest --capture=sys --verbose --showlocals --tb=long --numprocesses=auto tests/test_steps.py diff --git a/.github/workflows/test-B01_SL_load_single_file.yml b/.github/workflows/test-B01_SL_load_single_file.yml deleted file mode 100644 index cfbf9d7a..00000000 --- a/.github/workflows/test-B01_SL_load_single_file.yml +++ /dev/null @@ -1,35 +0,0 @@ -name: Test B01_SL_load_single_file -on: - pull_request: {} - push: - branches: [ main ] -jobs: - python_run_scripts: - strategy: - fail-fast: false - matrix: - version: ['3.11'] - runs-on: ubuntu-22.04 - steps: - - name: install mpi - run: sudo apt update && sudo apt-get install openmpi-bin openmpi-doc libopenmpi-dev - - uses: actions/checkout@v3 - - name: setup python - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.version }} # install the python version needed - cache: "pip" - - name: install icesat2-tracks using pip - run: pip install . - - name: List dependencies - run: pip list - - name: first step B01_SL_load_single_file - run: python src/icesat2_tracks/analysis_db/B01_SL_load_single_file.py 20190502052058_05180312_005_01 SH_testSLsinglefile2 True - - name: second step make_spectra - run: python src/icesat2_tracks/analysis_db/B02_make_spectra_gFT.py SH_20190502_05180312 SH_testSLsinglefile2 True - - name: third step plot_spectra - run: python src/icesat2_tracks/analysis_db/B03_plot_spectra_ov.py SH_20190502_05180312 SH_testSLsinglefile2 True - - name: fourth step IOWAGA thredds - run: python src/icesat2_tracks/analysis_db/A02c_IOWAGA_thredds_prior.py SH_20190502_05180312 SH_testSLsinglefile2 True - - name: Fifth step B04_angle - run: python src/icesat2_tracks/analysis_db/B04_angle.py SH_20190502_05180312 SH_testSLsinglefile2 True diff --git a/.gitignore b/.gitignore index a6f0b6af..26928ff8 100644 --- a/.gitignore +++ b/.gitignore @@ -38,6 +38,9 @@ analysis_db/support_files/ *.egg-info/ .installed.cfg *.egg +/logs +localci-runner.sh +tests/test_tarballs.py *__pycache__/ *__pycache__/* diff --git a/config/2021_IceSAT2_startup_local.py b/config/2021_IceSAT2_startup_local.py index 5b27eb7e..3b36dea2 100644 --- a/config/2021_IceSAT2_startup_local.py +++ b/config/2021_IceSAT2_startup_local.py @@ -1,29 +1,13 @@ import os -#os.environ["DISPLAY"] = "localhost:10.0" -# 14, 16, work -#standart libraries: -import numpy as np -import matplotlib -#matplotlib.use('Agg') import matplotlib.pyplot as plt -import pandas as pd +import sys -from matplotlib.gridspec import GridSpec import string +from icesat2_tracks.local_modules import m_colormanager_ph3 as M_color import xarray as xr -#xr.set_options(display_width=80, display_style='text') xr.set_options(display_style='text') -import sys -import imp - -import string - -# my own libraries: -#import m_general as M - -#import AA_plot_base as AA def json_load(name, path, verbose=False): import json full_name= (os.path.join(path,name+ '.json')) @@ -40,11 +24,10 @@ def json_load(name, path, verbose=False): sys.path.append(mconfig['paths']['local_script']) sys.path.append(mconfig['paths']['local_script'] +'/ICEsat2_SI_tools/') -import m_colormanager_ph3 as M_color -import m_tools_ph3 as MT -import m_general_ph3 as M + #load colorscheme +# The color method produces output to stdout; suppresed in the class def. CP col=M_color.color(path=mconfig['paths']['analysis']+'../config/', name='color_def') @@ -56,16 +39,7 @@ def json_load(name, path, verbose=False): SMALL_SIZE = 8 MEDIUM_SIZE = 10 BIGGER_SIZE = 12 -#csfont = {'fontname':'Comic Sans MS'} legend_properties = {'weight':'bold'} -#font.family: sans-serif -#font.sans-serif: Helvetica Neue - -#import matplotlib.font_manager as font_manager -#font_dirs = ['/home/mhell/HelveticaNeue/', ] -#font_files = font_manager.findSystemFonts(fontpaths=font_dirs) -#font_list = font_manager.createFontList(font_files) -#font_manager.fontManager.ttflist.extend(font_list) plt.rc('font', size=SMALL_SIZE, serif='Helvetica Neue', weight='normal') # controls default text sizes #plt.rc('font', size=SMALL_SIZE, serif='DejaVu Sans', weight='light') @@ -75,19 +49,11 @@ def json_load(name, path, verbose=False): plt.rc('xtick', labelsize=SMALL_SIZE) # fontsize of the tick labels plt.rc('ytick', labelsize=SMALL_SIZE) # fontsize of the tick labels plt.rc('legend', fontsize=SMALL_SIZE, frameon=False) # legend fontsize -plt.rc('figure', titlesize=MEDIUM_SIZE, titleweight='bold', autolayout=True) #, family='bold') # fontsize of the figure title - -#figure.autolayout : False -#matplotlib.rcParams['pdf.fonttype'] = 42 -#matplotlib.rcParams['ps.fonttype'] = 42 - +plt.rc('figure', titlesize=MEDIUM_SIZE, titleweight='bold', autolayout=True) plt.rc('path', simplify=True) plt.rcParams['figure.figsize'] = (10, 8)#(20.0, 10.0) #inline -#plt.rcParams['pcolor.shading'] = 'auto' -#rcParams['pcolor.shading'] = 'auto' -#plt.rc('pcolor', shading = 'auto') ### TICKS # see http://matplotlib.org/api/axis_api.html#matplotlib.axis.Tick @@ -125,50 +91,13 @@ def json_load(name, path, verbose=False): #ytick.minor.left : True # draw y axis left minor ticks #ytick.minor.right : True # draw y axis right minor ticks - plt.rc('xtick.major', size= 4, width=1 ) plt.rc('ytick.major', size= 3.8, width=1 ) -#axes.facecolor : white # axes background color -#axes.edgecolor : black # axes edge color -#axes.linewidth : 0.8 # edge linewidth -#axes.grid : False # display grid or not -#axes.titlesize : large # fontsize of the axes title -#axes.titlepad : 6.0 # pad between axes and title in points -#axes.labelsize : medium # fontsize of the x any y labels -#axes.labelpad : 4.0 # space between label and axis -#axes.labelweight : normal # weight of the x and y labels -#axes.labelcolor : black - plt.rc('axes', labelsize= MEDIUM_SIZE, labelweight='normal') - - - - -# axes.spines.left : True # display axis spines -# axes.spines.bottom : True -# axes.spines.top : True -# axes.spines.right : True plt.rc('axes.spines', top= False, right=False ) - - -def font_for_print(): - - SMALL_SIZE = 6 - MEDIUM_SIZE = 8 - BIGGER_SIZE = 10 - #csfont = {'fontname':'Comic Sans MS'} - legend_properties = {'weight':'bold'} - #font.family: sans-serif - #font.sans-serif: Helvetica Neue - - #import matplotlib.font_manager as font_manager - #font_dirs = ['/home/mhell/HelveticaNeue/', ] - #font_files = font_manager.findSystemFonts(fontpaths=font_dirs) - #font_list = font_manager.createFontList(font_files) - #font_manager.fontManager.ttflist.extend(font_list) - +def font_for_print(SMALL_SIZE = 6, MEDIUM_SIZE = 8, BIGGER_SIZE = 10): plt.rc('font', size=SMALL_SIZE, serif='Helvetica Neue', weight='normal') # controls default text sizes #plt.rc('font', size=SMALL_SIZE, serif='DejaVu Sans', weight='light') plt.rc('text', usetex='false') @@ -177,35 +106,11 @@ def font_for_print(): plt.rc('xtick', labelsize=SMALL_SIZE) # fontsize of the tick labels plt.rc('ytick', labelsize=SMALL_SIZE) # fontsize of the tick labels plt.rc('legend', fontsize=SMALL_SIZE, frameon=False) # legend fontsize - plt.rc('figure', titlesize=MEDIUM_SIZE, titleweight='bold', autolayout=True) #, family='bold') # fontsize of the figure title - - #figure.autolayout : False - #matplotlib.rcParams['pdf.fonttype'] = 42 - #matplotlib.rcParams['ps.fonttype'] = 42 - - - #plt.rc('xtick.major', size= 4, width=1 ) - #plt.rc('ytick.major', size= 3.8, width=1 ) - + plt.rc('figure', titlesize=MEDIUM_SIZE, titleweight='bold', autolayout=True) plt.rc('axes', labelsize= SMALL_SIZE, labelweight='normal') -def font_for_pres(): - - SMALL_SIZE = 10 - MEDIUM_SIZE = 12 - BIGGER_SIZE = 14 - #csfont = {'fontname':'Comic Sans MS'} - legend_properties = {'weight':'bold'} - #font.family: sans-serif - #font.sans-serif: Helvetica Neue - - #import matplotlib.font_manager as font_manager - #font_dirs = ['/home/mhell/HelveticaNeue/', ] - #font_files = font_manager.findSystemFonts(fontpaths=font_dirs) - #font_list = font_manager.createFontList(font_files) - #font_manager.fontManager.ttflist.extend(font_list) - +def font_for_pres(SMALL_SIZE = 10, MEDIUM_SIZE = 12, BIGGER_SIZE = 14): plt.rc('font', size=SMALL_SIZE, serif='Helvetica Neue', weight='normal') # controls default text sizes #plt.rc('font', size=SMALL_SIZE, serif='DejaVu Sans', weight='light') plt.rc('text', usetex='false') @@ -214,20 +119,6 @@ def font_for_pres(): plt.rc('xtick', labelsize=SMALL_SIZE) # fontsize of the tick labels plt.rc('ytick', labelsize=SMALL_SIZE) # fontsize of the tick labels plt.rc('legend', fontsize=SMALL_SIZE, frameon=False) # legend fontsize - plt.rc('figure', titlesize=MEDIUM_SIZE, titleweight='bold', autolayout=True) #, family='bold') # fontsize of the figure title - - #figure.autolayout : False - #matplotlib.rcParams['pdf.fonttype'] = 42 - #matplotlib.rcParams['ps.fonttype'] = 42 - - - #plt.rc('xtick.major', size= 4, width=1 ) - #plt.rc('ytick.major', size= 3.8, width=1 ) - + plt.rc('figure', titlesize=MEDIUM_SIZE, titleweight='bold', autolayout=True) plt.rc('axes', labelsize= SMALL_SIZE, labelweight='normal') - - - -# add project depenent libraries -#sys.path.append(config['paths']['local_script']) diff --git a/hooks/pre-push b/hooks/pre-push new file mode 100755 index 00000000..a01738e3 --- /dev/null +++ b/hooks/pre-push @@ -0,0 +1,18 @@ +#!/bin/sh + +# Count the number of 'def test_' in test_steps.py +n=$(grep -c '^def test_' tests/test_steps.py) + +# if n > $(nproc) then n = $(nproc) +if [ $n -gt $(nproc) ]; then + n=$(nproc) +fi + +# Run your Python script +pytest -n $n tests/test_steps.py + +# Check the exit status of the Python script +if [ $? -ne 0 ]; then + echo "Tests failed, aborting push." + exit 1 +fi diff --git a/pyproject.toml b/pyproject.toml index bc61136a..479de43b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -120,7 +120,11 @@ dependencies = [ # Optional "emcee >=3.1.4, <4.0.0", "netCDF4 >=1.6.5, <2.0.0", "siphon >=0.9, <1.0.0", - "h5py >=3.5.0, < 4.0.0" + "h5py >=3.5.0, < 4.0.0", + "termcolor >=2.4.0, < 3.0.0", + "pytest >=7.4.4, < 8.0.0", + "pytest-xdist >=3.5.0, < 4.0.0", + "typer >=0.9.0, < 1.0.0", ] # List additional groups of dependencies here (e.g. development diff --git a/src/icesat2_tracks/ICEsat2_SI_tools/generalized_FT.py b/src/icesat2_tracks/ICEsat2_SI_tools/generalized_FT.py index 3e88f22a..88a1f901 100644 --- a/src/icesat2_tracks/ICEsat2_SI_tools/generalized_FT.py +++ b/src/icesat2_tracks/ICEsat2_SI_tools/generalized_FT.py @@ -1,8 +1,17 @@ +import copy +import time + +import lmfit as LM +import matplotlib.pyplot as plt import numpy as np +import pandas as pd +import xarray as xr +from numpy import linalg +from scipy.signal import detrend -import icesat2_tracks.ICEsat2_SI_tools.spectral_estimates as spec import icesat2_tracks.ICEsat2_SI_tools.lanczos as lanczos -import matplotlib.pyplot as plt +import icesat2_tracks.ICEsat2_SI_tools.spectral_estimates as spec +import icesat2_tracks.local_modules.JONSWAP_gamma as spectal_models def rebin(data, dk): @@ -29,13 +38,9 @@ def smooth_data_to_weight(dd, m=150): dd_fake[2 * m : -2 * m] = dd weight = lanczos.lanczos_filter_1d_wrapping(np.arange(dd_fake.size), dd_fake, m) - weight = weight[2 * m : -2 * m] weight = weight / weight.max() - # ensure non-negative weights - weight[weight < 0.02] = 0.02 - return weight @@ -69,7 +74,7 @@ def get_weights_from_data( pars = Spec_fft.set_parameters(flim=np.sqrt(9.81 * k[-1]) / 2 / np.pi) k_max = (pars["f_max"].value * 2 * np.pi) ** 2 / 9.81 - if method is "gaussian": + if method == "gaussian": # simple gaussian weight def gaus(x, x_0, amp, sigma_g): return amp * np.exp(-0.5 * ((x - x_0) / sigma_g) ** 2) @@ -77,7 +82,7 @@ def gaus(x, x_0, amp, sigma_g): weight = gaus(k, k_max, 1, 0.02) ** (1 / 2) params = None - elif method is "parametric": + elif method == "parametric": # JONSWAP weight f = np.sqrt(9.81 * k) / (2 * np.pi) weight = Spec_fft.create_weight(freq=f, plot_flag=False, max_nfev=max_nfev) @@ -108,21 +113,7 @@ def gaus(x, x_0, amp, sigma_g): return weight, params -def define_weight_shutter(weight, k, Ncut=3): - "creates masking function to lower high wavenumber weights, Ncut is the numnber by which the spectral peak is multiplied" - # Limit high wavenumbers weights - weight_shutter = weight * 0 + 1 - ki_cut = weight.argmax() * Ncut # k of peak - N_res = weight.size - ki_cut - if N_res < 1: - return weight_shutter - weight_shutter[ki_cut:] = np.linspace(1, 0, N_res) - return weight_shutter - - def make_xarray_from_dict(D, name, dims, coords): - import xarray as xr - D_return = dict() for xi, I in D.items(): coords["x"] = xi @@ -136,29 +127,23 @@ def define_weights(stancil, prior, x, y, dx, k, max_nfev, plot_flag=False): return weights normalized to 1, prior_pars used for the next iteration """ - if (type(prior[0]) is bool) and not prior[0]: + if isinstance(prior[0], bool) and not prior[0]: # fit function to data weight, prior_pars = get_weights_from_data( x, y, dx, stancil, k, max_nfev, plot_flag=plot_flag, method="parametric" ) - weight_name = "$P_{init}$ from FFT" elif type(prior) is tuple: - # combine old and new weights weight = 0.2 * smooth_data_to_weight(prior[0]) + 0.8 * prior[1] + weight_name = "smth. $P_{i-1}$" prior_pars = {"alpha": None, "amp": None, "f_max": None, "gamma": None} - else: # prior = weight, this is all other iterations + else: weight = smooth_data_to_weight(prior) weight_name = "smth. from data" prior_pars = {"alpha": None, "amp": None, "f_max": None, "gamma": None} - # Limit high wavenumbers weights - weight = weight * define_weight_shutter(weight, k, Ncut=3) - if plot_flag: - import matplotlib.pyplot as plt - plt.plot(k, weight, zorder=12, c="darkgreen", linewidth=0.8, label=weight_name) # peak normlize weights by std of data @@ -188,7 +173,9 @@ def __init__(self, x, data, L, dx, wavenumber, data_error=None, ov=None): """ self.Lmeters = L - self.ov = int(L / 2) if ov is None else ov + self.ov = ( + int(L / 2) if ov is None else ov + ) # when not defined in create_chunk_boundaries then L/2 self.x = x self.dx = dx @@ -228,36 +215,29 @@ def cal_spectrogram( self.GG, params_dataframe params_dataframe is a pd.DataFrame that containes all the parameters of the fitting process (and may contain uncertainties too once they are calculated) """ - import xarray as xr - import copy - import pandas as pd - X = self.x if x is None else x # all x positions DATA = self.data if data is None else data # all data points ERR = self.error if error is None else error # all error for points Lmeters, dk = self.Lmeters, self.dk Lpoints = self.Lpoints Lpoints_full = int(Lmeters / self.dx) - self.xlims = (np.round(X.min()), X.max()) if xlims is None else xlims + # init Lomb scargle object with noise as nummy data () def calc_gFT_apply(stancil, prior): """ windows the data accoding to stencil and applies LS spectrogram returns: stancil center, spectrum for this stencil, number of datapoints in stancil """ - from scipy.signal import detrend - import matplotlib.pyplot as plt - import time ta = time.perf_counter() x_mask = (stancil[0] <= X) & (X <= stancil[-1]) + print(stancil[1]) x = X[x_mask] if ( - x.size / Lpoints < 0.40 + x.size / Lpoints < 0.1 ): # if there are not enough photos set results to nan - print(" -- data density to low, skip stancil") return { "stancil_center": stancil[1], "p_hat": np.concatenate([self.k * np.nan, self.k * np.nan]), @@ -277,8 +257,6 @@ def calc_gFT_apply(stancil, prior): FT = generalized_Fourier(x, y, self.k) if plot_flag: - import matplotlib.pyplot as plt - plt.figure(figsize=(3.34, 1.8), dpi=300) # define weights. Weights are normalized to 1 @@ -291,39 +269,14 @@ def calc_gFT_apply(stancil, prior): # define error err = ERR[x_mask] if ERR is not None else 1 - print("compute time weights : ", time.perf_counter() - ta) - + print("weights : ", time.perf_counter() - ta) ta = time.perf_counter() - FT.define_problem(weight, err) + FT.define_problem(weight, err) # 1st arg is Penalty, 2nd is error # solve problem: p_hat = FT.solve() - if np.isnan(np.mean(p_hat)): - print(" -- inversion nan!") - print(" -- data fraction", x.size / Lpoints) - print( - " -- weights:", - np.mean(weight), - "err:", - np.mean(err), - "y:", - np.mean(y), - ) - print(" -- skip stancil") - return { - "stancil_center": stancil[1], - "p_hat": np.concatenate([self.k * np.nan, self.k * np.nan]), - "inverse_stats": np.nan, - "y_model_grid": np.nan, - "y_data_grid": np.nan, - "x_size": x.size, - "PSD": False, - "weight": False, - "spec_adjust": np.nan, - } - - print("compute time solve : ", time.perf_counter() - ta) + print("solve : ", time.perf_counter() - ta) ta = time.perf_counter() x_pos = (np.round((x - stancil[0]) / self.dx, 0)).astype("int") @@ -335,7 +288,7 @@ def calc_gFT_apply(stancil, prior): y_data_grid = np.copy(eta) * np.nan y_data_grid[x_pos] = y - inverse_stats = FT.get_stats(self.dk, Lpoints_full, print_flag=plot_flag) + inverse_stats = FT.get_stats(self.dk, Lpoints_full, print_flag=True) # add fitting parameters of Prior to stats dict for k, I in prior_pars.items(): try: @@ -343,17 +296,17 @@ def calc_gFT_apply(stancil, prior): except: inverse_stats[k] = np.nan - print("compute time stats : ", time.perf_counter() - ta) + print("stats : ", time.perf_counter() - ta) # multiply with the standard deviation of the data to get dimensions right PSD = power_from_model(p_hat, dk, self.k.size, x.size, Lpoints) - if plot_flag: - if self.k.size * 2 > x.size: - col = "red" - else: - col = "blue" + if self.k.size * 2 > x.size: + col = "red" + else: + col = "blue" + if plot_flag: plt.plot(self.k, PSD, color=col, label="GFT fit", linewidth=0.5) plt.title( "non-dim Spectral Segment Models, 2M=" @@ -389,32 +342,26 @@ def calc_gFT_apply(stancil, prior): return return_dict - # derive L2 stancil - self.stancil_iter_list = spec.create_chunk_boundaries_unit_lengths( - Lmeters, self.xlims, ov=self.ov, iter_flag=False + # % derive L2 stancil + self.stancil_iter = spec.create_chunk_boundaries_unit_lengths( + Lmeters, self.xlims, ov=self.ov, iter_flag=True ) - self.stancil_iter = iter(self.stancil_iter_list.T.tolist()) # apply func to all stancils Spec_returns = list() - # form: PSD_from_GFT, weight_used in inversion prior = False, False - N_stencil = len(self.stancil_iter_list.T) - Ni = 1 for ss in copy.copy(self.stancil_iter): - print(Ni, "/", N_stencil, "Stancils") - # prior step if prior[0] is False: # make NL fit of piors do not exist - print("1st step: with NL-fit") + print("1st step with NL-fit") I_return = calc_gFT_apply(ss, prior=prior) prior = I_return["PSD"], I_return["weight"] + # 2nd step if prior[0] is False: - print("1st GFD failed (priors[0]=false), skip 2nd step") + print("priors still false skip 2nd step") else: - print("2nd step: use set priors:", type(prior[0]), type(prior[1])) - print(prior[0][0:3], prior[1][0:3]) + print("2nd step use set priors:", type(prior[0]), type(prior[0])) I_return = calc_gFT_apply(ss, prior=prior) prior = I_return["PSD"], I_return["weight"] @@ -434,8 +381,6 @@ def calc_gFT_apply(stancil, prior): ) ) - Ni += 1 - # unpack resutls of the mapping: GFT_model = dict() Z_model = dict() @@ -481,7 +426,7 @@ def calc_gFT_apply(stancil, prior): self.N_per_stancil = N_per_stancil chunk_positions = np.array(list(D_specs.keys())) - self.N_stancils = len(chunk_positions) # number of spectral realizatiobs + self.N_stancils = len(chunk_positions) # number of spectral realizations # repack data, create xarray # 1st LS spectal estimates @@ -489,16 +434,17 @@ def calc_gFT_apply(stancil, prior): G_power_data = make_xarray_from_dict( D_specs, "gFT_PSD_data", ["k"], {"k": self.k} ) - G_power_data = xr.concat(G_power_data.values(), dim="x").T + G_power_data = xr.concat(G_power_data.values(), dim="x").T # .to_dataset() G_power_model = make_xarray_from_dict( D_specs_model, "gFT_PSD_model", ["k"], {"k": self.k} ) - G_power_model = xr.concat(G_power_model.values(), dim="x").T + G_power_model = xr.concat(G_power_model.values(), dim="x").T # .to_dataset() self.G = G_power_model self.G.name = "gFT_PSD_model" + # 2nd FFT(Y_model) G_model_Z = make_xarray_from_dict(Z_model, "Z_hat", ["k"], {"k": self.k}) G_model_Z = xr.concat(G_model_Z.values(), dim="x").T @@ -518,8 +464,9 @@ def calc_gFT_apply(stancil, prior): # add weights to the data weights_k = make_xarray_from_dict(weights, "weight", ["k"], {"k": self.k}) - weights_k = xr.concat(weights_k.values(), dim="x").T # .to_dataset() + weights_k = xr.concat(weights_k.values(), dim="x").T + # 4th: model in real space eta = np.arange(0, self.Lmeters + self.dx, self.dx) - self.Lmeters / 2 y_model_eta = make_xarray_from_dict(y_model, "y_model", ["eta"], {"eta": eta}) y_data_eta = make_xarray_from_dict(y_data, "y_data", ["eta"], {"eta": eta}) @@ -575,13 +522,12 @@ def calc_gFT_apply(stancil, prior): ) sta, ste = xi - self.Lmeters / 2, xi + self.Lmeters / 2 - x_pos = (np.round((X[(sta <= X) & (X <= ste)] - sta) / self.dx)).astype( "int" ) x_err = np.copy(eta) * np.nan - # check sizes and adjust if necessary. + if x_pos.size > I["model_error_x"].size: x_pos = x_pos[0 : I["model_error_x"].size] print("adjust x") @@ -634,15 +580,11 @@ def calc_var(self): def parceval(self, add_attrs=True, weight_data=False): "test Parceval theorem" - import copy DATA = self.data - L = self.Lmeters X = self.x def get_stancil_var_apply(stancil): - from scipy.signal import detrend - "returns the variance of yy for stancil" x_mask = (stancil[0] < X) & (X <= stancil[-1]) idata = DATA[x_mask] @@ -745,7 +687,7 @@ def power_from_model(p_hat, dk, M, N_x, N_x_full): """ Z = complex_represenation(p_hat, M, N_x_full) - spec, _ = Z_to_power_gFT(Z, dk, N_x, N_x_full) # use spec_incomplete + spec, _ = Z_to_power_gFT(Z, dk, N_x, N_x_full) # spectral density respesenting the incomplete data return spec @@ -756,8 +698,6 @@ def __init__(self, x, ydata, k): """ non_dimensionalize (bool, default=True) if True, then the data and R_data_uncertainty is non-dimensionalized by the std of the data """ - import numpy as np - from numpy import linalg self.x, self.ydata, self.k = x, ydata, k self.M = self.k.size # number of wavenumbers @@ -792,8 +732,6 @@ def define_problem(self, P_weight, R_data_uncertainty): self.R_1d = R_data_uncertainty def solve(self): - from numpy import linalg - inv = linalg.inv """ solves the linear inverse problem, return hessian and p_hat @@ -864,8 +802,6 @@ def parceval(self, dk, Nx_full): def get_stats(self, dk, Nx_full, print_flag=False): residual = self.ydata - self.model() - - Lmeters = self.x[-1] - self.x[0] pars = { "data_var": self.ydata.var(), "model_var": self.model().var(), @@ -896,7 +832,7 @@ def get_stats(self, dk, Nx_full, print_flag=False): class get_prior_spec: def __init__(self, freq, data): - import lmfit as LM + """ """ self.LM = LM self.data = data @@ -919,8 +855,6 @@ def set_parameters(self, flim=None): self.params LMfit.parameters class needed for optimization """ - import numpy as np - params = self.LM.Parameters() def get_peak_pos(y, smooth=30): @@ -949,9 +883,6 @@ def model_func(self, f, params): ) def non_dim_spec_model(self, f, f_max, amp, gamma=1, angle_rad=0): - import icesat2_tracks.local_modules.JONSWAP_gamma as spectal_models - - U = 20 # results are incensitive to U f_true = f * np.cos(angle_rad) model = spectal_models.JONSWAP_default_alt(f_true, f_max, 20, gamma=gamma) model = amp * model / np.nanmean(model) @@ -984,13 +915,9 @@ def optimize(self, fitting_args=None, method="dual_annealing", max_nfev=None): return self.fitter def plot_data(self): - import matplotlib.pyplot as plt - plt.plot(self.freq, self.data, "k") def plot_model(self, pars): - import matplotlib.pyplot as plt - plt.plot(self.freq, self.model_func(self.freq, pars), "b--") def runningmean(self, var, m, tailcopy=False): diff --git a/src/icesat2_tracks/ICEsat2_SI_tools/io.py b/src/icesat2_tracks/ICEsat2_SI_tools/io.py index 521e305c..713f9b07 100644 --- a/src/icesat2_tracks/ICEsat2_SI_tools/io.py +++ b/src/icesat2_tracks/ICEsat2_SI_tools/io.py @@ -1,41 +1,36 @@ - - def init_from_input(arguments): - if (len(arguments) <= 1) | ('-f' in set(arguments) ) : - - track_name='20190605061807_10380310_004_01' - batch_key='SH_batch01' + if (len(arguments) <= 1) | ("-f" in set(arguments)): + track_name = "20190605061807_10380310_004_01" + batch_key = "SH_batch01" test_flag = True - print('use standard values') + print("use standard values") else: + track_name = arguments[1] + batch_key = arguments[2] + # $(hemisphere) $(coords) $(config) - track_name=arguments[1] - batch_key =arguments[2] - #$(hemisphere) $(coords) $(config) - - print('read vars from file: ' +str(arguments[1]) ) + print("read vars from file: " + str(arguments[1])) - if (len(arguments) >= 4): - if arguments[3] == 'True': + if len(arguments) >= 4: + if arguments[3] == "True": test_flag = True - elif arguments[3] == 'False': + elif arguments[3] == "False": test_flag = False else: - test_flag= arguments[3] + test_flag = arguments[3] - print('test_flag found, test_flag= '+str(test_flag) ) + print("test_flag found, test_flag= " + str(test_flag)) else: - test_flag=False + test_flag = False print(track_name) - - print('----- batch ='+ batch_key) - print('----- test_flag: ' + str(test_flag)) + print("----- batch =" + batch_key) + print("----- test_flag: " + str(test_flag)) return track_name, batch_key, test_flag -def init_data(ID_name, batch_key, ID_flag, ID_root, prefix ='A01b_ID'): +def init_data(ID_name, batch_key, ID_flag, ID_root, prefix="A01b_ID"): """ Takes inputs and retrieves the ID, track_names that can be loaded, hemis, batch inputs: are the outputs from init_from_input, specifically @@ -49,103 +44,138 @@ def init_data(ID_name, batch_key, ID_flag, ID_root, prefix ='A01b_ID'): """ print(ID_name, batch_key, ID_flag) - hemis, batch = batch_key.split('_') + hemis, batch = batch_key.split("_") if ID_flag: - ID_path = ID_root +'/'+batch_key+'/'+prefix+'/' - ID = json_load( prefix +'_'+ID_name, ID_path ) - track_names = ID['tracks']['ATL03'] + ID_path = ID_root + "/" + batch_key + "/" + prefix + "/" + ID = json_load(prefix + "_" + ID_name, ID_path) + track_names = ID["tracks"]["ATL03"] else: - track_names = ['ATL03_'+ID_name] - ID = ID_name + track_names = ["ATL03_" + ID_name] + ID = ID_name return ID, track_names, hemis, batch + def ID_to_str(ID_name): from datetime import datetime - IDs = ID_name.split('_') - date = datetime.strptime(IDs[1], '%Y%m%d').strftime('%Y-%m-%d')#.strftime('%m/%d/%Y') + IDs = ID_name.split("_") + date = datetime.strptime(IDs[1], "%Y%m%d").strftime( + "%Y-%m-%d" + ) # .strftime('%m/%d/%Y') date - return IDs[0] +' ' +date +' granule: ' + IDs[2] + return IDs[0] + " " + date + " granule: " + IDs[2] + class case_ID: """docstring for case_ID""" + def __init__(self, track_name): import re - track_name_pattern = r'(\D{2}|\d{2})_?(\d{4})(\d{2})(\d{2})(\d{2})?(\d{2})?(\d{2})?_(\d{4})(\d{2})(\d{2})_?(\d{3})?_?(\d{2})?' - case_ID_pattern = r'(\d{4})(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})' - track_name_rx = re.compile(track_name_pattern) - self.hemis,self.YY,self.MM,self.DD,self.HH,self.MN,self.SS,self.TRK,self.CYC,self.GRN,self.RL,self.VRS = track_name_rx.findall(track_name).pop() + track_name_pattern = r"(\D{2}|\d{2})_?(\d{4})(\d{2})(\d{2})(\d{2})?(\d{2})?(\d{2})?_(\d{4})(\d{2})(\d{2})_?(\d{3})?_?(\d{2})?" + case_ID_pattern = r"(\d{4})(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})" - if self.hemis == '01': - self.hemis = 'NH' - elif self.hemis == '02': - self.hemis = 'SH' + track_name_rx = re.compile(track_name_pattern) + ( + self.hemis, + self.YY, + self.MM, + self.DD, + self.HH, + self.MN, + self.SS, + self.TRK, + self.CYC, + self.GRN, + self.RL, + self.VRS, + ) = track_name_rx.findall(track_name).pop() + + if self.hemis == "01": + self.hemis = "NH" + elif self.hemis == "02": + self.hemis = "SH" else: self.hemis = self.hemis - #self.hemis = hemis + # self.hemis = hemis self.set() self.track_name_init = track_name def set(self): - block1 = (self.YY,self.MM,self.DD) - block2 = (self.TRK,self.CYC,self.GRN) + block1 = (self.YY, self.MM, self.DD) + block2 = (self.TRK, self.CYC, self.GRN) - self.ID = self.hemis+'_'+''.join(block1) +'_'+ ''.join(block2) + self.ID = self.hemis + "_" + "".join(block1) + "_" + "".join(block2) return self.ID def get_granule(self): - return ''.join((self.TRK,self.CYC,self.GRN)) + return "".join((self.TRK, self.CYC, self.GRN)) def set_dummy(self): - block1 = (self.YY,self.MM,self.DD) - block2 = (self.TRK,self.CYC,self.GRN) + block1 = (self.YY, self.MM, self.DD) + block2 = (self.TRK, self.CYC, self.GRN) - self.ID_dummy = ''.join(block1) +'_'+ ''.join(block2) + self.ID_dummy = "".join(block1) + "_" + "".join(block2) return self.ID_dummy def set_ATL03_trackname(self): - - block1 = (self.YY,self.MM,self.DD) - block1b = (self.HH,self.MN,self.SS) - block2 = (self.TRK,self.CYC,self.GRN) - if self.RL is '': + block1 = (self.YY, self.MM, self.DD) + block1b = (self.HH, self.MN, self.SS) + block2 = (self.TRK, self.CYC, self.GRN) + if self.RL is "": raise ValueError("RL not set") - if self.VRS is '': + if self.VRS is "": raise ValueError("VRS not set") - block3 = (self.RL,self.VRS) + block3 = (self.RL, self.VRS) - self.ID_ATL03 = ''.join(block1) +''.join(block1b) +'_'+ ''.join(block2) +'_'+ '_'.join(block3) + self.ID_ATL03 = ( + "".join(block1) + + "".join(block1b) + + "_" + + "".join(block2) + + "_" + + "_".join(block3) + ) return self.ID_ATL03 def set_ATL10_trackname(self): - - block1 = (self.YY,self.MM,self.DD) - block1b = (self.HH,self.MN,self.SS) - block2 = (self.TRK,self.CYC, '01') # granule is alwasy '01' for ATL10 - if self.RL is '': + block1 = (self.YY, self.MM, self.DD) + block1b = (self.HH, self.MN, self.SS) + block2 = (self.TRK, self.CYC, "01") # granule is alwasy '01' for ATL10 + if self.RL is "": raise ValueError("RL not set") - if self.VRS is '': + if self.VRS is "": raise ValueError("VRS not set") - block3 = (self.RL,self.VRS) + block3 = (self.RL, self.VRS) - if self.hemis == 'NH': - hemis = '01' - elif self.hemis == 'SH': - hemis = '02' + if self.hemis == "NH": + hemis = "01" + elif self.hemis == "SH": + hemis = "02" else: hemis = self.hemis - self.ID_ATL10 = hemis+'_'+''.join(block1) +''.join(block1b) +'_'+ ''.join(block2) +'_'+ '_'.join(block3) + self.ID_ATL10 = ( + hemis + + "_" + + "".join(block1) + + "".join(block1b) + + "_" + + "".join(block2) + + "_" + + "_".join(block3) + ) return self.ID_ATL10 -def nsidc_icesat2_get_associated_file(file_list, product, build=True, username=None, password=None): +def nsidc_icesat2_get_associated_file( + file_list, product, build=True, username=None, password=None +): """ THis method returns assocociated files names and paths for files given in file_list for the "product" ICEsat2 product @@ -163,34 +193,39 @@ def nsidc_icesat2_get_associated_file(file_list, product, build=True, username=N import posixpath import os import icesat2_toolkit.utilities - AUXILIARY=False - #product='ATL03' - DIRECTORY= None - FLATTEN=False - TIMEOUT=120 - MODE=0o775 - #file_list = ['ATL07-01_20210301023054_10251001_005_01'] + + AUXILIARY = False + # product='ATL03' + DIRECTORY = None + FLATTEN = False + TIMEOUT = 120 + MODE = 0o775 + # file_list = ['ATL07-01_20210301023054_10251001_005_01'] if build and not (username or password): - urs = 'urs.earthdata.nasa.gov' - username,login,password = netrc.netrc().authenticators(urs) - #-- build urllib2 opener and check credentials + urs = "urs.earthdata.nasa.gov" + username, login, password = netrc.netrc().authenticators(urs) + # -- build urllib2 opener and check credentials if build: - #-- build urllib2 opener with credentials + # -- build urllib2 opener with credentials icesat2_toolkit.utilities.build_opener(username, password) - #-- check credentials + # -- check credentials icesat2_toolkit.utilities.check_credentials() parser = lxml.etree.HTMLParser() - #-- remote https server for ICESat-2 Data - HOST = 'https://n5eil01u.ecs.nsidc.org' - #-- regular expression operator for extracting information from files - rx = re.compile(r'(processed_)?(ATL\d{2})(-\d{2})?_(\d{4})(\d{2})(\d{2})' - r'(\d{2})(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})_(\d{3})_(\d{2})') - #-- regular expression pattern for finding specific files - regex_suffix = '(.*?)$' if AUXILIARY else '(h5)$' - remote_regex_pattern = (r'{0}(-\d{{2}})?_(\d{{4}})(\d{{2}})(\d{{2}})' - r'(\d{{2}})(\d{{2}})(\d{{2}})_({1})({2})({3})_({4})_(\d{{2}})(.*?).{5}') + # -- remote https server for ICESat-2 Data + HOST = "https://n5eil01u.ecs.nsidc.org" + # -- regular expression operator for extracting information from files + rx = re.compile( + r"(processed_)?(ATL\d{2})(-\d{2})?_(\d{4})(\d{2})(\d{2})" + r"(\d{2})(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})_(\d{3})_(\d{2})" + ) + # -- regular expression pattern for finding specific files + regex_suffix = "(.*?)$" if AUXILIARY else "(h5)$" + remote_regex_pattern = ( + r"{0}(-\d{{2}})?_(\d{{4}})(\d{{2}})(\d{{2}})" + r"(\d{{2}})(\d{{2}})(\d{{2}})_({1})({2})({3})_({4})_(\d{{2}})(.*?).{5}" + ) # rx = re.compile(r'(processed_)?(ATL\d{2})(-\d{2})?_(\d{4})(\d{2})(\d{2})' # r'(\d{2})(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})_(\d{3})_(\d{2})(.*?).h5$') @@ -199,69 +234,70 @@ def nsidc_icesat2_get_associated_file(file_list, product, build=True, username=N # remote_regex_pattern = (r'{0}(-\d{{2}})?_(\d{{4}})(\d{{2}})(\d{{2}})' # r'(\d{{2}})(\d{{2}})(\d{{2}})_({1})({2})({3})_({4})_(\d{{2}})(.*?).{5}') - #-- build list of remote files, remote modification times and local files + # -- build list of remote files, remote modification times and local files original_files = [] remote_files = [] remote_mtimes = [] local_files = [] - remote_names =[] + remote_names = [] for input_file in file_list: - #print(input_file) - #-- extract parameters from ICESat-2 ATLAS HDF5 file name - SUB,PRD,HEM,YY,MM,DD,HH,MN,SS,TRK,CYC,GRN,RL,VRS = \ - rx.findall(input_file).pop() - #-- get directories from remote directory - product_directory = '{0}.{1}'.format(product,RL) - sd = '{0}.{1}.{2}'.format(YY,MM,DD) - PATH = [HOST,'ATLAS',product_directory,sd] - #-- local and remote data directories - remote_dir=posixpath.join(*PATH) - temp=os.path.dirname(input_file) if (DIRECTORY is None) else DIRECTORY - local_dir=os.path.expanduser(temp) if FLATTEN else os.path.join(temp,sd) - #-- create output directory if not currently existing + # print(input_file) + # -- extract parameters from ICESat-2 ATLAS HDF5 file name + SUB, PRD, HEM, YY, MM, DD, HH, MN, SS, TRK, CYC, GRN, RL, VRS = rx.findall( + input_file + ).pop() + # -- get directories from remote directory + product_directory = "{0}.{1}".format(product, RL) + sd = "{0}.{1}.{2}".format(YY, MM, DD) + PATH = [HOST, "ATLAS", product_directory, sd] + # -- local and remote data directories + remote_dir = posixpath.join(*PATH) + temp = os.path.dirname(input_file) if (DIRECTORY is None) else DIRECTORY + local_dir = os.path.expanduser(temp) if FLATTEN else os.path.join(temp, sd) + # -- create output directory if not currently existing # if not os.access(local_dir, os.F_OK): # os.makedirs(local_dir, MODE) - #-- compile regular expression operator for file parameters - args = (product,TRK,CYC,GRN,RL,regex_suffix) + # -- compile regular expression operator for file parameters + args = (product, TRK, CYC, GRN, RL, regex_suffix) R1 = re.compile(remote_regex_pattern.format(*args), re.VERBOSE) - #-- find associated ICESat-2 data file - #-- find matching files (for granule, release, version, track) - colnames,collastmod,colerror=icesat2_toolkit.utilities.nsidc_list(PATH, - build=False, - timeout=TIMEOUT, - parser=parser, - pattern=R1, - sort=True) + # -- find associated ICESat-2 data file + # -- find matching files (for granule, release, version, track) + colnames, collastmod, colerror = icesat2_toolkit.utilities.nsidc_list( + PATH, build=False, timeout=TIMEOUT, parser=parser, pattern=R1, sort=True + ) print(colnames) - #-- print if file was not found + # -- print if file was not found if not colnames: print(colerror) continue - #-- add to lists - for colname,remote_mtime in zip(colnames,collastmod): - #-- save original file to list (expands if getting auxiliary files) + # -- add to lists + for colname, remote_mtime in zip(colnames, collastmod): + # -- save original file to list (expands if getting auxiliary files) original_files.append(input_file) - #-- remote and local versions of the file - remote_files.append(posixpath.join(remote_dir,colname)) - local_files.append(os.path.join(local_dir,colname)) + # -- remote and local versions of the file + remote_files.append(posixpath.join(remote_dir, colname)) + local_files.append(os.path.join(local_dir, colname)) remote_mtimes.append(remote_mtime) remote_names.append(colname) - return original_files, remote_files, remote_names #product_directory, sd, + return original_files, remote_files, remote_names # product_directory, sd, + def json_load(name, path, verbose=False): import json import os - full_name= (os.path.join(path,name+ '.json')) - with open(full_name, 'r') as ifile: - data=json.load(ifile) + full_name = os.path.join(path, name + ".json") + + with open(full_name, "r") as ifile: + data = json.load(ifile) if verbose: - print('loaded from: ',full_name) + print("loaded from: ", full_name) return data -def ATL03_download(username,password, dpath, product_directory, sd, file_name): + +def ATL03_download(username, password, dpath, product_directory, sd, file_name): """ inputs: username: username for https://urs.earthdata.nasa.gov @@ -273,101 +309,119 @@ def ATL03_download(username,password, dpath, product_directory, sd, file_name): """ import icesat2_toolkit.utilities from icesat2_toolkit.read_ICESat2_ATL03 import read_HDF5_ATL03 - HOST = ['https://n5eil01u.ecs.nsidc.org','ATLAS',product_directory,sd, file_name] + + HOST = ["https://n5eil01u.ecs.nsidc.org", "ATLAS", product_directory, sd, file_name] # HOST = ['https://n5eil01u.ecs.nsidc.org','ATLAS','ATL03.003','2018.10.14', # 'ATL03_20181014000347_02350101_003_01.h5'] - print('download to:', dpath+'/'+HOST[-1]) - buffer,error=icesat2_toolkit.utilities.from_nsidc(HOST,username=username, - password=password,local=dpath+'/'+HOST[-1],verbose=True) - #-- raise exception if download error + print("download to:", dpath + "/" + HOST[-1]) + buffer, error = icesat2_toolkit.utilities.from_nsidc( + HOST, + username=username, + password=password, + local=dpath + "/" + HOST[-1], + verbose=True, + ) + # -- raise exception if download error if not buffer: raise Exception(error) -def save_pandas_table(table_dict, name , save_path): + +def save_pandas_table(table_dict, name, save_path): import os + if not os.path.exists(save_path): os.makedirs(save_path) import warnings from pandas import HDFStore from pandas.io.pytables import PerformanceWarning - warnings.filterwarnings('ignore',category=PerformanceWarning) - with HDFStore(save_path+'/'+name+'.h5') as store: - for name,table in table_dict.items(): - store[name]=table + warnings.filterwarnings("ignore", category=PerformanceWarning) -def load_pandas_table_dict(name , save_path): + with HDFStore(save_path + "/" + name + ".h5") as store: + for name, table in table_dict.items(): + store[name] = table + + +def load_pandas_table_dict(name, save_path): import warnings from pandas import HDFStore from pandas.io.pytables import PerformanceWarning - warnings.filterwarnings('ignore',category=PerformanceWarning) - return_dict=dict() - with HDFStore(save_path+'/'+name+'.h5') as store: - #print(store) - #print(store.keys()) + warnings.filterwarnings("ignore", category=PerformanceWarning) + + return_dict = dict() + with HDFStore(save_path + "/" + name + ".h5") as store: + # print(store) + # print(store.keys()) for k in store.keys(): - return_dict[k[1:]]=store.get(k) + return_dict[k[1:]] = store.get(k) return return_dict -def get_beam_hdf_store(ATL03_k): +def get_beam_hdf_store(ATL03_k): import pandas as pd - DD = pd.DataFrame()#columns = ATL03.keys()) + + DD = pd.DataFrame() # columns = ATL03.keys()) for ikey in ATL03_k.keys(): DD[ikey] = ATL03_k[ikey] return DD -def get_beam_var_hdf_store(ATL03_k, ikey): +def get_beam_var_hdf_store(ATL03_k, ikey): import pandas as pd - DD = pd.DataFrame()#columns = ATL03.keys()) + + DD = pd.DataFrame() # columns = ATL03.keys()) DD[ikey] = ATL03_k[ikey] return DD -def write_track_to_HDF5(data_dict, name, path, verbose=False, mode='w'): +def write_track_to_HDF5(data_dict, name, path, verbose=False, mode="w"): import os import h5py - mode = 'w' if mode is None else mode + + mode = "w" if mode is None else mode if not os.path.exists(path): os.makedirs(path) - full_name= (os.path.join(path,name+ '.h5')) + full_name = os.path.join(path, name + ".h5") store = h5py.File(full_name, mode) for k in data_dict.keys(): store1 = store.create_group(k) for kk, I in list(data_dict[k].items()): - store1[kk]=I - #store1.close() + store1[kk] = I + # store1.close() store.close() if verbose: - print('saved at: ' +full_name) + print("saved at: " + full_name) def get_time_for_track(delta_time, atlas_epoch): "returns pandas dataframe" import pandas as pd import convert_GPS_time as cGPS + # Conversion of delta_time to a calendar date temp = cGPS.convert_GPS_time(atlas_epoch[0] + delta_time, OFFSET=0.0) - year = temp['year'][:].astype('int') - month = temp['month'][:].astype('int') - day = temp['day'][:].astype('int') - hour = temp['hour'][:].astype('int') - minute = temp['minute'][:].astype('int') - second = temp['second'][:].astype('int') + year = temp["year"][:].astype("int") + month = temp["month"][:].astype("int") + day = temp["day"][:].astype("int") + hour = temp["hour"][:].astype("int") + minute = temp["minute"][:].astype("int") + second = temp["second"][:].astype("int") + + return pd.DataFrame( + {"year": year, "month": month, "day": day, "hour": hour, "second": second} + ) - return pd.DataFrame({'year':year, 'month':month, 'day':day, 'hour':hour, 'second':second}) -def getATL03_beam(fileT, numpy=False, beam='gt1l', maxElev=1e6): +def getATL03_beam(fileT, numpy=False, beam="gt1l", maxElev=1e6): """ returns 'beam' from fileT as pandas table. fillT path of file @@ -379,38 +433,38 @@ def getATL03_beam(fileT, numpy=False, beam='gt1l', maxElev=1e6): import h5py import convert_GPS_time as cGPS import pandas as pd + # Open the file - ATL03 = h5py.File(fileT, 'r') - lons = ATL03[beam+'/heights/lon_ph'][:] - lats = ATL03[beam+'/heights/lat_ph'][:] + ATL03 = h5py.File(fileT, "r") + lons = ATL03[beam + "/heights/lon_ph"][:] + lats = ATL03[beam + "/heights/lat_ph"][:] # Along track distance from equator i think. - along_track_distance=ATL03[beam+'/heights/dist_ph_along'][:] - across_track_distance=ATL03[beam+'/heights/dist_ph_across'][:] - #dem_h = ATL03[beam+'/geophys_corr/dem_h'][:] - #delta_time_dem_h = ATL03[beam+'/geophys_corr/delta_time'][:] - segment_dist_x=ATL03[beam+'/geolocation/segment_dist_x'][:] - segment_length=ATL03[beam+'/geolocation/segment_length'][:] - segment_id = ATL03[beam+'/geolocation/segment_id'][:] - - delta_time_geolocation = ATL03[beam+'/geolocation/delta_time'][:] - reference_photon_index= ATL03[beam+'/geolocation/reference_photon_index'][:] - ph_index_beg= ATL03[beam+'/geolocation/ph_index_beg'][:] - - - ph_id_count = ATL03[beam+'/heights/ph_id_count'][:] + along_track_distance = ATL03[beam + "/heights/dist_ph_along"][:] + across_track_distance = ATL03[beam + "/heights/dist_ph_across"][:] + # dem_h = ATL03[beam+'/geophys_corr/dem_h'][:] + # delta_time_dem_h = ATL03[beam+'/geophys_corr/delta_time'][:] + segment_dist_x = ATL03[beam + "/geolocation/segment_dist_x"][:] + segment_length = ATL03[beam + "/geolocation/segment_length"][:] + segment_id = ATL03[beam + "/geolocation/segment_id"][:] + + delta_time_geolocation = ATL03[beam + "/geolocation/delta_time"][:] + reference_photon_index = ATL03[beam + "/geolocation/reference_photon_index"][:] + ph_index_beg = ATL03[beam + "/geolocation/ph_index_beg"][:] + + ph_id_count = ATL03[beam + "/heights/ph_id_count"][:] # Nathan says it's the number of seconds since the GPS epoch on midnight Jan. 6, 1980 - delta_time = ATL03[beam+'/heights/delta_time'][:] - #podppd_flag=ATL03[beam+'/geolocation/podppd_flag'][:] + delta_time = ATL03[beam + "/heights/delta_time"][:] + # podppd_flag=ATL03[beam+'/geolocation/podppd_flag'][:] # #Add this value to delta time parameters to compute full gps_seconds - atlas_epoch = ATL03['/ancillary_data/atlas_sdp_gps_epoch'][:] + atlas_epoch = ATL03["/ancillary_data/atlas_sdp_gps_epoch"][:] # Conversion of delta_time to a calendar date temp = cGPS.convert_GPS_time(atlas_epoch[0] + delta_time, OFFSET=0.0) # Express delta_time relative to start time of granule - #delta_time_granule=delta_time-delta_time[0] + # delta_time_granule=delta_time-delta_time[0] # year = temp['year'][:].astype('int') # month = temp['month'][:].astype('int') @@ -422,33 +476,41 @@ def getATL03_beam(fileT, numpy=False, beam='gt1l', maxElev=1e6): # Primary variables of interest # Photon height - heights=ATL03[beam+'/heights/h_ph'][:] - #print(heights.shape) + heights = ATL03[beam + "/heights/h_ph"][:] + # print(heights.shape) # Flag for signal confidence # column index: 0=Land; 1=Ocean; 2=SeaIce; 3=LandIce; 4=InlandWater # values: - #-- -1: Events not associated with a specific surface type - #-- 0: noise - #-- 1: buffer but algorithm classifies as background - #-- 2: low - #-- 3: medium - #-- 4: high - - mask_ocean = ATL03[beam+'/heights/signal_conf_ph'][:, 1] > 2 # ocean points medium or high quality - mask_seaice = ATL03[beam+'/heights/signal_conf_ph'][:, 2] > 2 # sea ice points medium or high quality - mask_total = (mask_seaice | mask_ocean) - - if sum(~mask_total) == (ATL03[beam+'/heights/signal_conf_ph'][:, 1]).size: - print('zero photons, lower photon quality to 2 or higher') - mask_ocean = ATL03[beam+'/heights/signal_conf_ph'][:, 1] > 1 # ocean points medium or high quality - mask_seaice = ATL03[beam+'/heights/signal_conf_ph'][:, 2] > 1 # sea ice points medium or high quality - mask_total = (mask_seaice | mask_ocean) - - signal_confidence = ATL03[beam+'/heights/signal_conf_ph'][:, 1:3].max(1) - #print(signal_confidence.shape) - - #return signal_confidence + # -- -1: Events not associated with a specific surface type + # -- 0: noise + # -- 1: buffer but algorithm classifies as background + # -- 2: low + # -- 3: medium + # -- 4: high + + mask_ocean = ( + ATL03[beam + "/heights/signal_conf_ph"][:, 1] > 2 + ) # ocean points medium or high quality + mask_seaice = ( + ATL03[beam + "/heights/signal_conf_ph"][:, 2] > 2 + ) # sea ice points medium or high quality + mask_total = mask_seaice | mask_ocean + + if sum(~mask_total) == (ATL03[beam + "/heights/signal_conf_ph"][:, 1]).size: + print("zero photons, lower photon quality to 2 or higher") + mask_ocean = ( + ATL03[beam + "/heights/signal_conf_ph"][:, 1] > 1 + ) # ocean points medium or high quality + mask_seaice = ( + ATL03[beam + "/heights/signal_conf_ph"][:, 2] > 1 + ) # sea ice points medium or high quality + mask_total = mask_seaice | mask_ocean + + signal_confidence = ATL03[beam + "/heights/signal_conf_ph"][:, 1:3].max(1) + # print(signal_confidence.shape) + + # return signal_confidence # Add photon rate and background rate to the reader here ATL03.close() @@ -458,26 +520,45 @@ def getATL03_beam(fileT, numpy=False, beam='gt1l', maxElev=1e6): return along_track_dist, elev else: - dF = pd.DataFrame({'heights':heights, 'lons':lons, 'lats':lats, 'signal_confidence':signal_confidence, 'mask_seaice':mask_seaice, - 'delta_time':delta_time, 'along_track_distance':along_track_distance, #'delta_time_granule':delta_time_granule, - 'across_track_distance':across_track_distance,'ph_id_count':ph_id_count})#, - #'year':year, 'month':month, 'day':day, 'hour':hour,'minute':minute , 'second':second}) - - dF_seg = pd.DataFrame({'delta_time':delta_time_geolocation, 'segment_dist_x':segment_dist_x, 'segment_length':segment_length, 'segment_id':segment_id, - 'reference_photon_index':reference_photon_index, 'ph_index_beg':ph_index_beg}) + dF = pd.DataFrame( + { + "heights": heights, + "lons": lons, + "lats": lats, + "signal_confidence": signal_confidence, + "mask_seaice": mask_seaice, + "delta_time": delta_time, + "along_track_distance": along_track_distance, #'delta_time_granule':delta_time_granule, + "across_track_distance": across_track_distance, + "ph_id_count": ph_id_count, + } + ) # , + #'year':year, 'month':month, 'day':day, 'hour':hour,'minute':minute , 'second':second}) + + dF_seg = pd.DataFrame( + { + "delta_time": delta_time_geolocation, + "segment_dist_x": segment_dist_x, + "segment_length": segment_length, + "segment_id": segment_id, + "reference_photon_index": reference_photon_index, + "ph_index_beg": ph_index_beg, + } + ) # Filter out high elevation values - print('seg_dist shape ', segment_dist_x.shape) - print('df shape ',dF.shape) + print("seg_dist shape ", segment_dist_x.shape) + print("df shape ", dF.shape) dF = dF[mask_total] - #dF_seg = dF_seg[mask_total] - #print('df[mask] shape ',dF.shape) + # dF_seg = dF_seg[mask_total] + # print('df[mask] shape ',dF.shape) # Reset row indexing - #dF=dF#.reset_index(drop=True) + # dF=dF#.reset_index(drop=True) return dF, dF_seg -def getATL03_height_correction(fileT, beam='gt1r'): + +def getATL03_height_correction(fileT, beam="gt1r"): """ This method returns relevant data for wave estimates from ALT 07 tracks. returns: Pandas data frame @@ -486,25 +567,27 @@ def getATL03_height_correction(fileT, beam='gt1r'): import h5py import pandas as pd + # Open the file - ATL03 = h5py.File(fileT, 'r') + ATL03 = h5py.File(fileT, "r") ### bulk positions and statistics vars_bulk = [ - 'delta_time', # referenc time since equator crossing - 'dem_h', # best giod approxiamtion - ] + "delta_time", # referenc time since equator crossing + "dem_h", # best giod approxiamtion + ] - D_bulk= dict() + D_bulk = dict() for var in vars_bulk: - D_bulk[var] = ATL03[beam+'/geophys_corr/'+var][:] + D_bulk[var] = ATL03[beam + "/geophys_corr/" + var][:] dF_bulk = pd.DataFrame(D_bulk) ATL03.close() return dF_bulk -def getATL07_beam(fileT, beam='gt1r', maxElev=1e6): + +def getATL07_beam(fileT, beam="gt1r", maxElev=1e6): """ This method returns relevant data for wave estimates from ALT 07 tracks. returns: Pandas data frame @@ -513,81 +596,82 @@ def getATL07_beam(fileT, beam='gt1r', maxElev=1e6): import h5py import pandas as pd + # Open the file - ATL07 = h5py.File(fileT, 'r') + ATL07 = h5py.File(fileT, "r") ### bulk positions and statistics vars_bulk = [ - 'longitude', - 'latitude', - 'height_segment_id',# Height segment ID (10 km segments) - 'seg_dist_x' # Along track distance from the equator crossing to the segment center. - ] + "longitude", + "latitude", + "height_segment_id", # Height segment ID (10 km segments) + "seg_dist_x", # Along track distance from the equator crossing to the segment center. + ] - D_bulk= dict() + D_bulk = dict() for var in vars_bulk: - D_bulk[var] = ATL07[beam+'/sea_ice_segments/'+var] + D_bulk[var] = ATL07[beam + "/sea_ice_segments/" + var] dF_bulk = pd.DataFrame(D_bulk) # Nathan says it's the number of seconds since the GPS epoch on midnight Jan. 6, 1980 - delta_time=ATL07[beam+'/sea_ice_segments/delta_time'][:] + delta_time = ATL07[beam + "/sea_ice_segments/delta_time"][:] # #Add this value to delta time parameters to compute full gps_seconds - atlas_epoch=ATL07['/ancillary_data/atlas_sdp_gps_epoch'][:] - dF_time = get_time_for_track(delta_time,atlas_epoch) - dF_time['delta_time'] = delta_time + atlas_epoch = ATL07["/ancillary_data/atlas_sdp_gps_epoch"][:] + dF_time = get_time_for_track(delta_time, atlas_epoch) + dF_time["delta_time"] = delta_time ### Primary variables of interest - vars = [ - 'across_track_distance', #Across track distance of photons averaged over the sea ice height segment. - 'height_segment_asr_calc', #Computed apparent surface reflectance for the sea ice segment. - 'height_segment_confidence',# # Height segment confidence flag - 'height_segment_fit_quality_flag', # Flag Values: ['-1', '1', '2', '3', '4', '5'] - #Flag Meanings: ['invalid', 'best', 'high', 'med', 'low', 'poor'] - 'height_segment_height', # Beam segment height - 'height_segment_length_seg', # Along track length of segment - 'height_segment_ssh_flag', #Flag for potential leads, 0=sea ice, 1 = sea surface - 'height_segment_surface_error_est', #Error estimate of the surface height - 'height_segment_type',# Flag Values: ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'] - # Flag Meanings: ['cloud_covered', 'other', 'specular_lead_low_w_bkg', 'specular_lead_low', 'specular_lead_high_w_bkg', 'specular_lead_high', 'dark_lead_smooth_w_bkg', 'dark_lead_smooth' - 'height_segment_w_gaussian', # Width of Gaussian fit - 'height_segment_quality', # Height quality flag, 1 for good fit, 0 for bad - ] - #vars = ['beam_fb_height', 'beam_fb_sigma' , 'beam_fb_confidence' , 'beam_fb_quality_flag'] - - D_heights=dict() + "across_track_distance", # Across track distance of photons averaged over the sea ice height segment. + "height_segment_asr_calc", # Computed apparent surface reflectance for the sea ice segment. + "height_segment_confidence", # # Height segment confidence flag + "height_segment_fit_quality_flag", # Flag Values: ['-1', '1', '2', '3', '4', '5'] + # Flag Meanings: ['invalid', 'best', 'high', 'med', 'low', 'poor'] + "height_segment_height", # Beam segment height + "height_segment_length_seg", # Along track length of segment + "height_segment_ssh_flag", # Flag for potential leads, 0=sea ice, 1 = sea surface + "height_segment_surface_error_est", # Error estimate of the surface height + "height_segment_type", # Flag Values: ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'] + # Flag Meanings: ['cloud_covered', 'other', 'specular_lead_low_w_bkg', 'specular_lead_low', 'specular_lead_high_w_bkg', 'specular_lead_high', 'dark_lead_smooth_w_bkg', 'dark_lead_smooth' + "height_segment_w_gaussian", # Width of Gaussian fit + "height_segment_quality", # Height quality flag, 1 for good fit, 0 for bad + ] + # vars = ['beam_fb_height', 'beam_fb_sigma' , 'beam_fb_confidence' , 'beam_fb_quality_flag'] + + D_heights = dict() for var in vars: - D_heights[var] = ATL07[beam+'/sea_ice_segments/heights/' +var][:] + D_heights[var] = ATL07[beam + "/sea_ice_segments/heights/" + var][:] dF_heights = pd.DataFrame(D_heights) - vars_env = { - 'mss':'geophysical/height_segment_mss', # Mean sea surface height above WGS-84 reference ellipsoid (range: -105 to 87m), based on the DTU13 model. - 't2m':'geophysical/height_segment_t2m',#Temperature at 2m above the displacement height (K) - 'u2m':'geophysical/height_segment_u2m',#Eastward wind at 2m above the displacement height (m/s-1) - 'v2m':'geophysical/height_segment_v2m',#Northward wind at 2m above the displacement height (m/s-1) - 'n_photons_actual':'stats/n_photons_actual', # Number of photons gathered - 'photon_rate':'stats/photon_rate', #photon_rate + "mss": "geophysical/height_segment_mss", # Mean sea surface height above WGS-84 reference ellipsoid (range: -105 to 87m), based on the DTU13 model. + "t2m": "geophysical/height_segment_t2m", # Temperature at 2m above the displacement height (K) + "u2m": "geophysical/height_segment_u2m", # Eastward wind at 2m above the displacement height (m/s-1) + "v2m": "geophysical/height_segment_v2m", # Northward wind at 2m above the displacement height (m/s-1) + "n_photons_actual": "stats/n_photons_actual", # Number of photons gathered + "photon_rate": "stats/photon_rate", # photon_rate } - D_env=dict() - for var,I in vars_env.items(): - D_env[ var] = ATL07[beam+'/sea_ice_segments/' +I][:] + D_env = dict() + for var, I in vars_env.items(): + D_env[var] = ATL07[beam + "/sea_ice_segments/" + I][:] dF_env = pd.DataFrame(D_env) - - #Df = pd.concat({k: pd.DataFrame(v).T for k, v in data.items()}, axis=0) - DF = pd.concat({ 'time': dF_time, 'ref': dF_bulk, 'heights': dF_heights, 'env': dF_env }, axis=1) + # Df = pd.concat({k: pd.DataFrame(v).T for k, v in data.items()}, axis=0) + DF = pd.concat( + {"time": dF_time, "ref": dF_bulk, "heights": dF_heights, "env": dF_env}, axis=1 + ) ATL07.close() # Filter out high elevation values - DF = DF[(DF['heights']['height_segment_height'] 0) | np.isnan(G_beam.ice) - - lats = list(ice_mask.latitude.data) - lats.sort(reverse=True) - - # find 1st latitude that is completely full with sea ice. - ice_lat_pos = next( - ( - i - for i, j in enumerate( - (ice_mask.sum("longitude") == ice_mask.longitude.size).sel( - latitude=lats - ) - ) - if j - ), - None, - ) - # recreate lat mask based on this criteria - lat_mask = lats < lats[ice_lat_pos] - lat_mask = xr.DataArray( - lat_mask.repeat(ice_mask.longitude.size).reshape(ice_mask.shape), - dims=ice_mask.dims, - coords=ice_mask.coords, + # DEFINE SEARCH REGION AND SIZE OF BOXES FOR AVERAGES + dlon_deg = 1 # degree range aroud 1st point + dlat_deg = 30, 5 # degree range aroud 1st point + dlat_deg_prior = 2, 1 # degree range aroud 1st point + + dtime = 4 # in hours + + lon_range = G1["lons"].min() - dlon_deg, G1["lons"].max() + dlon_deg + lat_range = np.sign(G1["lats"].min()) * 78, G1["lats"].max() + dlat_deg[1] + lat_range_prior = ( + G1["lats"].min() - dlat_deg_prior[0], + G1["lats"].max() + dlat_deg_prior[1], ) - lat_mask["latitude"] = lats - - # combine ice mask and new lat mask - ice_mask = ice_mask + lat_mask else: - ice_mask = np.isnan(G_beam.ice) - lats = ice_mask.latitude - - # find closed latituyde with with non-nan data - ice_lat_pos = ( - abs( - lats.where(ice_mask.sum("longitude") > 4, np.nan) - - np.array(lat_range).mean() - ) - .argmin() - .data + # DEFINE SEARCH REGION AND SIZE OF BOXES FOR AVERAGES + dlon_deg = 2 # lon degree range aroud 1st point + dlat_deg = 20, 20 # lat degree range aroud 1st point + dlat_deg_prior = 2, 1 # degree range aroud 1st point + + dtime = 4 # in hours + + lon_range = G1["lons"].min() - dlon_deg, G1["lons"].max() + dlon_deg + lat_range = G1["lats"].min() - dlat_deg[0], G1["lats"].max() + dlat_deg[1] + lat_range_prior = ( + G1["lats"].min() - dlat_deg_prior[0], + G1["lats"].max() + dlat_deg_prior[1], ) - # redefine lat-range - lat_range = lats[ice_lat_pos].data - 2, lats[ice_lat_pos].data + 2 - lat_flag2 = (lat_range[0] < lats.data) & (lats.data < lat_range[1]) - - lat_mask = xr.DataArray( - lat_flag2.repeat(ice_mask.longitude.size).reshape(ice_mask.shape), - dims=ice_mask.dims, - coords=ice_mask.coords, - ) - lat_mask["latitude"] = lats - - # plot 1st figure - def draw_range(lon_range, lat_range, *args, **kargs): - plt.plot( - [lon_range[0], lon_range[1], lon_range[1], lon_range[0], lon_range[0]], - [lat_range[0], lat_range[0], lat_range[1], lat_range[1], lat_range[0]], - *args, - **kargs, - ) - - dir_clev = np.arange(0, 380, 20) - f_clev = np.arange(1 / 40, 1 / 5, 0.01) - fvar = ["ice", "dir", "dp", "spr", "fp", "hs"] - fcmap = [ - plt.cm.Blues_r, - color_schemes.circle_medium_triple, - color_schemes.circle_medium_triple, - plt.cm.Blues, - plt.cm.Blues, - plt.cm.Blues, - ] - fpos = [0, 1, 2, 3, 4, 5] - clevs = [ - np.arange(0, 1, 0.2), - dir_clev, - dir_clev, - np.arange(0, 90, 10), - f_clev, - np.arange(0.5, 9, 0.5), + timestamp = pd.to_datetime(ID["pars"]["start"]["delta_time"], unit="s") + time_range = np.datetime64(timestamp) - np.timedelta64(dtime, "h"), np.datetime64( + timestamp + ) + np.timedelta64(dtime, "h") + + ## load WW3 data + # ECMWF hindcast + # data_url = 'https://tds3.ifremer.fr/thredds/IOWAGA-WW3-HINDCAST/IOWAGA-GLOBAL_ECMWF-WW3-HINDCAST_FULL_TIME_SERIE.xml' + + # CFSR hindcast + # data_url = 'https://tds3.ifremer.fr/thredds/IOWAGA-WW3-HINDCAST/IOWAGA-GLOBAL_CFSR-WW3-HINDCAST_FULL_TIME_SERIE.xml' + + # ECMWF forecast + data_url = "https://tds3.ifremer.fr/thredds/IOWAGA-WW3-FORECAST/IOWAGA-WW3-FORECAST_GLOBMULTI_GLOB-30M.xml" + + cat = TDSCatalog(data_url) + + ncss = cat.datasets[ + "IOWAGA-WW3-FORECAST_GLOBMULTI_GLOB-30M_FIELD_NC_MARC_WW3-GLOB-30M" + ].remote_access(use_xarray=True) + + var_list = [ + "dir", + "dp", + "fp", + "hs", + "ice", + "spr", + "t01", + "t02", + "plp0", + "pdir0", + "pdir1", + "pdir2", + "pdir3", + "pdir4", + "pdir5", + "pspr0", + "pspr1", + "pspr2", + "pspr3", + "pspr4", + "pspr5", + "ptp0", + "ptp1", + "ptp2", + "ptp3", + "ptp4", + "ptp5", + "phs0", + "phs1", + "phs2", + "phs3", + "phs4", + "phs5", ] - font_for_print() - - F = M.figure_axis_xy(4, 3.5, view_scale=0.9, container=True) - plt.suptitle( - track_name_short + " | " + file_name_base[0:-1].replace("_", " "), y=1.3 + # chunk data + IOWAGA = ncss[var_list] + IOWAGA["time"] = np.array([np.datetime64(k0) for k0 in IOWAGA.time.data]).astype( + "M8[h]" + ) + IOWAGA = IOWAGA.rename( + name_dict={ + "pdir0": "pdp0", + "pdir1": "pdp1", + "pdir2": "pdp2", + "pdir3": "pdp3", + "pdir4": "pdp4", + "pdir5": "pdp5", + } ) - lon, lat = G_beam.longitude, G_beam.latitude - - gs = GridSpec(9, 6, wspace=0.1, hspace=0.4) - - for fv, fp, fc, cl in zip(fvar, fpos, fcmap, clevs): - ax1 = F.fig.add_subplot(gs[0:7, fp]) - if fp == 0: - ax1.spines["bottom"].set_visible(False) - ax1.spines["left"].set_visible(False) - ax1.tick_params(labelbottom=True, bottom=True) - - else: - ax1.axis("off") - plt.plot(G1["lons"], G1["lats"], ".r", markersize=5) - draw_range(lon_range, lat_range_prior, c="red", linewidth=1, zorder=12) - draw_range(lon_range, lat_range, c="blue", linewidth=0.7, zorder=10) + def sel_data(Ibeam, lon_range, lat_range, timestamp=None): + """ + this method returns the selected data in the lon-lat box at an interpolated timestamp + """ + # TODO: refactor to avoid code duplication + lon_flag = (lon_range[0] < Ibeam.longitude.data) & ( + Ibeam.longitude.data < lon_range[1] + ) + lat_flag = (lat_range[0] < Ibeam.latitude.data) & ( + Ibeam.latitude.data < lat_range[1] + ) + time_flag = (time_range[0] < Ibeam.time.data) & ( + Ibeam.time.data < time_range[1] + ) - if fv != "ice": - cm = plt.pcolor(lon, lat, G_beam[fv], vmin=cl[0], vmax=cl[-1], cmap=fc) - if G_beam.ice.shape[0] > 1: - plt.contour(lon, lat, G_beam.ice, colors="black", linewidths=0.6) + if timestamp is None: + Ibeam = Ibeam.isel(latitude=lat_flag, longitude=lon_flag) else: - cm = plt.pcolor(lon, lat, G_beam[fv], vmin=cl[0], vmax=cl[-1], cmap=fc) - - plt.title(G_beam[fv].long_name.replace(" ", "\n") + "\n" + fv, loc="left") - ax1.axis("equal") - - ax2 = F.fig.add_subplot(gs[-1, fp]) - cbar = plt.colorbar(cm, cax=ax2, orientation="horizontal", aspect=1, fraction=1) - cl_ticks = np.linspace(cl[0], cl[-1], 3) - - cbar.set_ticks(np.round(cl_ticks, 3)) - cbar.set_ticklabels(np.round(cl_ticks, 2)) - - F.save_pup(path=plot_path, name=plot_name + "_hindcast_data") - - G_beam_masked = G_beam.where(~ice_mask, np.nan) - ice_mask_prior = ice_mask.sel(latitude=G_prior.latitude) - G_prior_masked = G_prior.where(~ice_mask_prior, np.nan) - - def test_nan_frac(imask): - "test if False is less then 0.3" - return ((~imask).sum() / imask.size).data < 0.3 + Ibeam = ( + Ibeam.isel(latitude=lat_flag, longitude=lon_flag, time=time_flag) + .sortby("time") + .interp(time=np.datetime64(timestamp)) + ) + return Ibeam - while test_nan_frac(ice_mask_prior): - print(lat_range_prior) - lat_range_prior = lat_range_prior[0] + 0.5, lat_range_prior[1] + 0.5 + try: + G_beam = sel_data(IOWAGA, lon_range, lat_range, timestamp).load() G_prior = sel_data(G_beam, lon_range, lat_range_prior) - ice_mask_prior = ice_mask.sel(latitude=G_prior.latitude) - G_prior_masked = G_prior.where(~ice_mask_prior, np.nan) - - ### make pandas table with obs track end postitions - - key_list = list(G_prior_masked.keys()) - # define directional and amplitude pairs - # pack as (amp, angle) - key_list_pairs = { - "mean": ("hs", "dir"), - "peak": ("hs", "dp"), - "partion0": ("phs0", "pdp0"), - "partion1": ("phs1", "pdp1"), - "partion2": ("phs2", "pdp2"), - "partion3": ("phs3", "pdp3"), - "partion4": ("phs4", "pdp4"), - } + if hemis == "SH": + # create Ice mask + ice_mask = (G_beam.ice > 0) | np.isnan(G_beam.ice) + + lats = list(ice_mask.latitude.data) + lats.sort(reverse=True) + + # find 1st latitude that is completely full with sea ice. + ice_lat_pos = next( + ( + i + for i, j in enumerate( + (ice_mask.sum("longitude") == ice_mask.longitude.size).sel( + latitude=lats + ) + ) + if j + ), + None, + ) + # recreate lat mask based on this criteria + lat_mask = lats < lats[ice_lat_pos] + lat_mask = xr.DataArray( + lat_mask.repeat(ice_mask.longitude.size).reshape(ice_mask.shape), + dims=ice_mask.dims, + coords=ice_mask.coords, + ) + lat_mask["latitude"] = lats - key_list_pairs2 = list() - for k in key_list_pairs.values(): - key_list_pairs2.append(k[0]) - key_list_pairs2.append(k[1]) + # combine ice mask and new lat mask + ice_mask = ice_mask + lat_mask + + else: + ice_mask = np.isnan(G_beam.ice) + lats = ice_mask.latitude + + # find closed latituyde with with non-nan data + ice_lat_pos = ( + abs( + lats.where(ice_mask.sum("longitude") > 4, np.nan) + - np.array(lat_range).mean() + ) + .argmin() + .data + ) - key_list_scaler = set(key_list) - set(key_list_pairs2) + # redefine lat-range + lat_range = lats[ice_lat_pos].data - 2, lats[ice_lat_pos].data + 2 + lat_flag2 = (lat_range[0] < lats.data) & (lats.data < lat_range[1]) - ### derive angle avearge - Tend = pd.DataFrame(index=key_list, columns=["mean", "std", "name"]) + lat_mask = xr.DataArray( + lat_flag2.repeat(ice_mask.longitude.size).reshape(ice_mask.shape), + dims=ice_mask.dims, + coords=ice_mask.coords, + ) + lat_mask["latitude"] = lats + + # plot 1st figure + def draw_range(lon_range, lat_range, *args, **kargs): + plt.plot( + [lon_range[0], lon_range[1], lon_range[1], lon_range[0], lon_range[0]], + [lat_range[0], lat_range[0], lat_range[1], lat_range[1], lat_range[0]], + *args, + **kargs, + ) - for k, pair in key_list_pairs.items(): - ave_amp, ave_deg, std_amp, std_deg = waves.get_ave_amp_angle( - G_prior_masked[pair[0]].data, G_prior_masked[pair[1]].data - ) - Tend.loc[pair[0]] = ave_amp, std_amp, G_prior_masked[pair[0]].long_name - Tend.loc[pair[1]] = ave_deg, std_deg, G_prior_masked[pair[1]].long_name - - for k in key_list_scaler: - Tend.loc[k] = ( - G_prior_masked[k].mean().data, - G_prior_masked[k].std().data, - G_prior_masked[k].long_name, + dir_clev = np.arange(0, 380, 20) + f_clev = np.arange(1 / 40, 1 / 5, 0.01) + fvar = ["ice", "dir", "dp", "spr", "fp", "hs"] + fcmap = [ + plt.cm.Blues_r, + color_schemes.circle_medium_triple, + color_schemes.circle_medium_triple, + plt.cm.Blues, + plt.cm.Blues, + plt.cm.Blues, + ] + fpos = [0, 1, 2, 3, 4, 5] + clevs = [ + np.arange(0, 1, 0.2), + dir_clev, + dir_clev, + np.arange(0, 90, 10), + f_clev, + np.arange(0.5, 9, 0.5), + ] + + font_for_print() + + F = M.figure_axis_xy(4, 3.5, view_scale=0.9, container=True) + plt.suptitle( + track_name_short + " | " + file_name_base[0:-1].replace("_", " "), y=1.3 ) + lon, lat = G_beam.longitude, G_beam.latitude - Tend = Tend.T - Tend["lon"] = [ - ice_mask_prior.longitude.mean().data, - ice_mask_prior.longitude.std().data, - "lontigude", - ] - Tend["lat"] = [ - ice_mask_prior.latitude[ice_mask_prior.sum("longitude") == 0].mean().data, - ice_mask_prior.latitude[ice_mask_prior.sum("longitude") == 0].std().data, - "latitude", - ] - Tend = Tend.T + gs = GridSpec(9, 6, wspace=0.1, hspace=0.4) - Prior = dict() - Prior["incident_angle"] = { - "value": Tend["mean"]["dp"].astype("float"), - "name": Tend["name"]["dp"], - } - Prior["spread"] = { - "value": Tend["mean"]["spr"].astype("float"), - "name": Tend["name"]["spr"], - } - Prior["Hs"] = { - "value": Tend["mean"]["hs"].astype("float"), - "name": Tend["name"]["hs"], - } - Prior["peak_period"] = { - "value": 1 / Tend["mean"]["fp"].astype("float"), - "name": "1/" + Tend["name"]["fp"], - } + for fv, fp, fc, cl in zip(fvar, fpos, fcmap, clevs): + ax1 = F.fig.add_subplot(gs[0:7, fp]) + if fp == 0: + ax1.spines["bottom"].set_visible(False) + ax1.spines["left"].set_visible(False) + ax1.tick_params(labelbottom=True, bottom=True) - Prior["center_lon"] = { - "value": Tend["mean"]["lon"].astype("float"), - "name": Tend["name"]["lon"], - } - Prior["center_lat"] = { - "value": Tend["mean"]["lat"].astype("float"), - "name": Tend["name"]["lat"], - } + else: + ax1.axis("off") - target_name = "A02_" + track_name + "_hindcast_success" - - MT.save_pandas_table({"priors_hindcast": Tend}, save_name, save_path) -except: - target_name = "A02_" + track_name + "_hindcast_fail" - -def plot_prior(Prior, axx): - angle = Prior["incident_angle"][ - "value" - ] # incident direction in degrees from North clockwise (Meerological convention) - # use - angle_plot = -angle - 90 - axx.quiver( - Prior["center_lon"]["value"], - Prior["center_lat"]["value"], - -np.cos(angle_plot * np.pi / 180), - -np.sin(angle_plot * np.pi / 180), - scale=4.5, - zorder=12, - width=0.1, - headlength=4.5, - minshaft=2, - alpha=0.6, - color="black", - ) - axx.plot( - Prior["center_lon"]["value"], - Prior["center_lat"]["value"], - ".", - markersize=6, - zorder=12, - alpha=1, - color="black", - ) - tstring = ( - " " - + str(np.round(Prior["peak_period"]["value"], 1)) - + "sec \n " - + str(np.round(Prior["Hs"]["value"], 1)) - + "m\n " - + str(np.round(angle, 1)) - + "deg" - ) - plt.text(lon_range[1], Prior["center_lat"]["value"], tstring) + plt.plot(G1["lons"], G1["lats"], ".r", markersize=5) + draw_range(lon_range, lat_range_prior, c="red", linewidth=1, zorder=12) + draw_range(lon_range, lat_range, c="blue", linewidth=0.7, zorder=10) + if fv != "ice": + cm = plt.pcolor(lon, lat, G_beam[fv], vmin=cl[0], vmax=cl[-1], cmap=fc) + if G_beam.ice.shape[0] > 1: + plt.contour(lon, lat, G_beam.ice, colors="black", linewidths=0.6) + else: + cm = plt.pcolor(lon, lat, G_beam[fv], vmin=cl[0], vmax=cl[-1], cmap=fc) -try: - # plot 2nd figure + plt.title(G_beam[fv].long_name.replace(" ", "\n") + "\n" + fv, loc="left") + ax1.axis("equal") - font_for_print() - F = M.figure_axis_xy(2, 4.5, view_scale=0.9, container=False) + ax2 = F.fig.add_subplot(gs[-1, fp]) + cbar = plt.colorbar( + cm, cax=ax2, orientation="horizontal", aspect=1, fraction=1 + ) + cl_ticks = np.linspace(cl[0], cl[-1], 3) - ax1 = F.ax - lon, lat = G_beam.longitude, G_beam.latitude - ax1.spines["bottom"].set_visible(False) - ax1.spines["left"].set_visible(False) - ax1.tick_params(labelbottom=True, bottom=True) + cbar.set_ticks(np.round(cl_ticks, 3)) + cbar.set_ticklabels(np.round(cl_ticks, 2)) - plot_prior(Prior, ax1) + F.save_pup(path=plot_path, name=plot_name + "_hindcast_data") - str_list = list() - for i in np.arange(0, 6): - str_list.append( - " " - + str(np.round(Tend.loc["ptp" + str(i)]["mean"], 1)) - + "sec\n " - + str(np.round(Tend.loc["phs" + str(i)]["mean"], 1)) - + "m " - + str(np.round(Tend.loc["pdp" + str(i)]["mean"], 1)) - + "d" - ) - - plt.text(lon_range[1], lat_range[0], "\n ".join(str_list)) + ice_mask_prior = ice_mask.sel(latitude=G_prior.latitude) + G_prior_masked = G_prior.where(~ice_mask_prior, np.nan) + + def test_nan_frac(imask): + "test if False is less then 0.3" + return ((~imask).sum() / imask.size).data < 0.3 + + while test_nan_frac(ice_mask_prior): + print(lat_range_prior) + lat_range_prior = lat_range_prior[0] + 0.5, lat_range_prior[1] + 0.5 + G_prior = sel_data(G_beam, lon_range, lat_range_prior) + ice_mask_prior = ice_mask.sel(latitude=G_prior.latitude) + + G_prior_masked = G_prior.where(~ice_mask_prior, np.nan) + + ### make pandas table with obs track end postitions + + key_list = list(G_prior_masked.keys()) + # define directional and amplitude pairs + # pack as (amp, angle) + key_list_pairs = { + "mean": ("hs", "dir"), + "peak": ("hs", "dp"), + "partion0": ("phs0", "pdp0"), + "partion1": ("phs1", "pdp1"), + "partion2": ("phs2", "pdp2"), + "partion3": ("phs3", "pdp3"), + "partion4": ("phs4", "pdp4"), + } + + key_list_pairs2 = list() + for k in key_list_pairs.values(): + key_list_pairs2.append(k[0]) + key_list_pairs2.append(k[1]) + + key_list_scaler = set(key_list) - set(key_list_pairs2) + + ### derive angle avearge + Tend = pd.DataFrame(index=key_list, columns=["mean", "std", "name"]) + + for k, pair in key_list_pairs.items(): + ave_amp, ave_deg, std_amp, std_deg = waves.get_ave_amp_angle( + G_prior_masked[pair[0]].data, G_prior_masked[pair[1]].data + ) + Tend.loc[pair[0]] = ave_amp, std_amp, G_prior_masked[pair[0]].long_name + Tend.loc[pair[1]] = ave_deg, std_deg, G_prior_masked[pair[1]].long_name + + for k in key_list_scaler: + Tend.loc[k] = ( + G_prior_masked[k].mean().data, + G_prior_masked[k].std().data, + G_prior_masked[k].long_name, + ) - for vv in zip( - ["pdp0", "pdp1", "pdp2", "pdp3", "pdp4", "pdp5"], - ["phs0", "phs1", "phs3", "phs4", "phs5"], - ): - angle_plot = -Tend.loc[vv[0]]["mean"] - 90 - vsize = (1 / Tend.loc[vv[1]]["mean"]) ** (1 / 2) * 5 - print(vsize) - ax1.quiver( + Tend = Tend.T + Tend["lon"] = [ + ice_mask_prior.longitude.mean().data, + ice_mask_prior.longitude.std().data, + "lontigude", + ] + Tend["lat"] = [ + ice_mask_prior.latitude[ice_mask_prior.sum("longitude") == 0].mean().data, + ice_mask_prior.latitude[ice_mask_prior.sum("longitude") == 0].std().data, + "latitude", + ] + Tend = Tend.T + + Prior = dict() + Prior["incident_angle"] = { + "value": Tend["mean"]["dp"].astype("float"), + "name": Tend["name"]["dp"], + } + Prior["spread"] = { + "value": Tend["mean"]["spr"].astype("float"), + "name": Tend["name"]["spr"], + } + Prior["Hs"] = { + "value": Tend["mean"]["hs"].astype("float"), + "name": Tend["name"]["hs"], + } + Prior["peak_period"] = { + "value": 1 / Tend["mean"]["fp"].astype("float"), + "name": "1/" + Tend["name"]["fp"], + } + + Prior["center_lon"] = { + "value": Tend["mean"]["lon"].astype("float"), + "name": Tend["name"]["lon"], + } + Prior["center_lat"] = { + "value": Tend["mean"]["lat"].astype("float"), + "name": Tend["name"]["lat"], + } + + target_name = "A02_" + track_name + "_hindcast_success" + MT.save_pandas_table({"priors_hindcast": Tend}, save_name, save_path) + except Exception: + target_name = "A02_" + track_name + "_hindcast_fail" + + def plot_prior(Prior, axx): + angle = Prior["incident_angle"][ + "value" + ] # incident direction in degrees from North clockwise (Meerological convention) + # use + angle_plot = -angle - 90 + axx.quiver( Prior["center_lon"]["value"], Prior["center_lat"]["value"], -np.cos(angle_plot * np.pi / 180), -np.sin(angle_plot * np.pi / 180), - scale=vsize, - zorder=5, + scale=4.5, + zorder=12, width=0.1, headlength=4.5, - minshaft=4, + minshaft=2, alpha=0.6, - color="green", + color="black", + ) + axx.plot( + Prior["center_lon"]["value"], + Prior["center_lat"]["value"], + ".", + markersize=6, + zorder=12, + alpha=1, + color="black", ) + tstring = ( + " " + + str(np.round(Prior["peak_period"]["value"], 1)) + + "sec \n " + + str(np.round(Prior["Hs"]["value"], 1)) + + "m\n " + + str(np.round(angle, 1)) + + "deg" + ) + plt.text(lon_range[1], Prior["center_lat"]["value"], tstring) + + try: + # plot 2nd figure + + font_for_print() + F = M.figure_axis_xy(2, 4.5, view_scale=0.9, container=False) + + ax1 = F.ax + lon, lat = G_beam.longitude, G_beam.latitude + ax1.spines["bottom"].set_visible(False) + ax1.spines["left"].set_visible(False) + ax1.tick_params(labelbottom=True, bottom=True) + + plot_prior(Prior, ax1) + + str_list = list() + for i in np.arange(0, 6): + str_list.append( + " " + + str(np.round(Tend.loc["ptp" + str(i)]["mean"], 1)) + + "sec\n " + + str(np.round(Tend.loc["phs" + str(i)]["mean"], 1)) + + "m " + + str(np.round(Tend.loc["pdp" + str(i)]["mean"], 1)) + + "d" + ) - plt.plot(G1["lons"], G1["lats"], ".r", markersize=5) - draw_range(lon_range, lat_range_prior, c="red", linewidth=1, zorder=11) - draw_range(lon_range, lat_range, c="blue", linewidth=0.7, zorder=10) + plt.text(lon_range[1], lat_range[0], "\n ".join(str_list)) + + for vv in zip( + ["pdp0", "pdp1", "pdp2", "pdp3", "pdp4", "pdp5"], + ["phs0", "phs1", "phs3", "phs4", "phs5"], + ): + angle_plot = -Tend.loc[vv[0]]["mean"] - 90 + vsize = (1 / Tend.loc[vv[1]]["mean"]) ** (1 / 2) * 5 + ax1.quiver( + Prior["center_lon"]["value"], + Prior["center_lat"]["value"], + -np.cos(angle_plot * np.pi / 180), + -np.sin(angle_plot * np.pi / 180), + scale=vsize, + zorder=5, + width=0.1, + headlength=4.5, + minshaft=4, + alpha=0.6, + color="green", + ) - fv = "ice" - if fv != "ice": - plt.pcolor(lon, lat, G_beam[fv].where(~ice_mask, np.nan), cmap=fc) - plt.contour(lon, lat, G_beam.ice, colors="black", linewidths=0.6) - else: - plt.pcolor(lon, lat, G_beam[fv], cmap=fc) - - plt.title( - "Prior\n" - + file_name_base[0:-1].replace("_", " ") - + "\n" - + track_name_short - + "\nIncident angle", - loc="left", + plt.plot(G1["lons"], G1["lats"], ".r", markersize=5) + draw_range(lon_range, lat_range_prior, c="red", linewidth=1, zorder=11) + draw_range(lon_range, lat_range, c="blue", linewidth=0.7, zorder=10) + + fv = "ice" + if fv != "ice": + plt.pcolor(lon, lat, G_beam[fv].where(~ice_mask, np.nan), cmap=fc) + plt.contour(lon, lat, G_beam.ice, colors="black", linewidths=0.6) + else: + plt.pcolor(lon, lat, G_beam[fv], cmap=fc) + + plt.title( + "Prior\n" + + file_name_base[0:-1].replace("_", " ") + + "\n" + + track_name_short + + "\nIncident angle", + loc="left", + ) + ax1.axis("equal") + + F.save_pup(path=plot_path, name=plot_name + "_hindcast_prior") + except Exception as e: + print(e) + echo("print 2nd figure failed", "red") + + MT.json_save( + target_name, save_path, str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")) ) - ax1.axis("equal") - F.save_pup(path=plot_path, name=plot_name + "_hindcast_prior") -except Exception as e: - print(e) - print("print 2nd figure failed") + echo("done") -MT.json_save( - target_name, save_path, str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M")) -) -print("done") +step4app = makeapp(run_A02c_IOWAGA_thredds_prior, name="threads-prior") + +if __name__ == "__main__": + step4app() diff --git a/src/icesat2_tracks/analysis_db/B01_SL_load_single_file.py b/src/icesat2_tracks/analysis_db/B01_SL_load_single_file.py index b558fd3f..e5929183 100644 --- a/src/icesat2_tracks/analysis_db/B01_SL_load_single_file.py +++ b/src/icesat2_tracks/analysis_db/B01_SL_load_single_file.py @@ -1,13 +1,16 @@ +#!/usr/bin/env python """ This file open a ICEsat2 tbeam_stats.pyrack applied filters and corections and returns smoothed photon heights on a regular grid in an .nc file. This is python 3.11 """ -import sys import datetime import copy +from pathlib import Path import xarray as xr -from sliderule import sliderule, icesat2 +from sliderule import icesat2 +import matplotlib +import typer from icesat2_tracks.config.IceSAT2_startup import ( mconfig, @@ -21,91 +24,18 @@ import icesat2_tracks.local_modules.m_tools_ph3 as MT from icesat2_tracks.local_modules import m_general_ph3 as M - -xr.set_options(display_style="text") - - -# Select region and retrive batch of tracks - -track_name, batch_key, ID_flag = io.init_from_input( - sys.argv -) # loads standard experiment - -plot_flag = True -hemis = batch_key.split("_")[0] - - -save_path = mconfig["paths"]["work"] + "/" + batch_key + "/B01_regrid/" -MT.mkdirs_r(save_path) - -save_path_json = mconfig["paths"]["work"] + "/" + batch_key + "/A01b_ID/" -MT.mkdirs_r(save_path_json) - -ATL03_track_name = "ATL03_" + track_name + ".h5" - -# Configure SL Session -icesat2.init("slideruleearth.io") - - -# plot the ground tracks in geographic location -# Generate ATL06-type segments using the ATL03-native photon classification -# Use the ocean classification for photons with a confidence parmeter to 2 or higher (low confidence or better) - -params = { - "srt": 1, # Ocean classification - "len": 25, # 10-meter segments - "ats": 3, # require that each segment contain photons separated by at least 5 m - "res": 10, # return one photon every 10 m - "dist_in_seg": False, # if False units of len and res are in meters - "track": 0, # return all ground tracks - "pass_invalid": False, - "cnt": 20, - "sigma_r_max": 4, # maximum standard deviation of photon in extend - "cnf": 2, # require classification confidence of 2 or more - "atl03_geo_fields": ["dem_h"], -} - - -# YAPC alternatibe -params_yapc = { - "srt": 1, - "len": 20, - "ats": 3, - "res": 10, - "dist_in_seg": False, # if False units of len and res are in meters - "track": 0, - "pass_invalid": False, - "cnf": 2, - "cnt": 20, - "sigma_r_max": 4, # maximum standard deviation of photon in extend - "maxi": 10, - "yapc": dict( - knn=0, win_h=6, win_x=11, min_ph=4, score=100 - ), # use the YAPC photon classifier; these are the recommended parameters, but the results might be more specific with a smaller win_h value, or a higher score cutoff - # "yapc": dict(knn=0, win_h=3, win_x=11, min_ph=4, score=50), # use the YAPC photon classifier; these are the recommended parameters, but the results might be more specific with a smaller win_h value, or a higher score cutoff - "atl03_geo_fields": ["dem_h"], -} - -maximum_height = 30 # (meters) maximum height past dem_h correction -print("STARTS") -gdf = icesat2.atl06p(params_yapc, resources=[ATL03_track_name]) -print("ENDS") -gdf = sct.correct_and_remove_height(gdf, maximum_height) - - -cdict = dict() -for s, b in zip(gdf["spot"].unique(), ["gt1l", "gt1r", "gt2l", "gt2r", "gt3l", "gt3r"]): - cdict[s] = color_schemes.rels[b] - - -font_for_pres() -F_atl06 = M.figure_axis_xy(6.5, 5, view_scale=0.6) -F_atl06.fig.suptitle(track_name) - -beam_stats.plot_ATL06_track_data(gdf, cdict) +from icesat2_tracks.clitools import ( + validate_track_name, + validate_batch_key, + validate_output_dir, + suppress_stdout, + update_paths_mconfig, + echo, + echoparam, + makeapp, +) -# main routine for defining the x coordinate and sacing table data def make_B01_dict(table_data, split_by_beam=True, to_hdf5=False): """ converts a GeoDataFrame from Sliderule to GeoDataFrames for each beam witht the correct columns and names @@ -150,97 +80,192 @@ def make_B01_dict(table_data, split_by_beam=True, to_hdf5=False): return table_data -# define reference point and then define 'x' -table_data = copy.copy(gdf) +def run_B01_SL_load_single_file( + track_name: str = typer.Option(..., callback=validate_track_name), + batch_key: str = typer.Option(..., callback=validate_batch_key), + ID_flag: bool = True, + plot_flag: bool = True, + output_dir: str = typer.Option(None, callback=validate_output_dir), +): + """ + Open an ICEsat2 tbeam_stats.pyrack, apply filters and corections, and output smoothed photon heights on a regular grid in an .nc file. + """ + # report input parameters + echo("** Input parameters:") + echoparam("track_name", track_name) + echoparam("batch_key", batch_key) + echoparam("ID_flag", ID_flag) + echoparam("plot_flag", plot_flag) + echoparam("output_dir", output_dir) + + xr.set_options(display_style="text") + matplotlib.use("Agg") # prevent plot windows from opening -# the reference point is defined as the most equatorward point of the polygon. -# It's distance from the equator is subtracted from the distance of each photon. -table_data = sct.define_x_coordinate_from_data(table_data) -table_time = table_data["time"] -table_data.drop(columns=["time"], inplace=True) + # Select region and retrive batch of tracks + with suppress_stdout(): + track_name, batch_key, ID_flag = io.init_from_input( + [ + None, + track_name, + batch_key, + ID_flag, + ] # init_from_input expects sys.argv with 4 elements + ) # loads standard experiment -# renames columns and splits beams -Ti = make_B01_dict(table_data, split_by_beam=True, to_hdf5=True) + hemis = batch_key.split("_")[0] -for kk in Ti.keys(): - Ti[kk]["dist"] = Ti[kk]["x"].copy() - Ti[kk]["heights_c_weighted_mean"] = Ti[kk]["h_mean"].copy() - Ti[kk]["heights_c_std"] = Ti[kk]["h_sigma"].copy() + workdir, plotsdir = update_paths_mconfig(output_dir, mconfig) -segment = track_name.split("_")[1][-2:] -ID_name = sct.create_ID_name(gdf.iloc[0], segment=segment) -print(ID_name) -io.write_track_to_HDF5(Ti, ID_name + "_B01_binned", save_path) # regridding heights + save_path = Path(workdir, batch_key, "B01_regrid") + save_path.mkdir(parents=True, exist_ok=True) -# plot the ground tracks in geographic location + save_path_json = Path(workdir, batch_key, "A01b_ID") + save_path_json.mkdir(parents=True, exist_ok=True) -all_beams = mconfig["beams"]["all_beams"] -high_beams = mconfig["beams"]["high_beams"] -low_beams = mconfig["beams"]["low_beams"] + ATL03_track_name = "ATL03_" + track_name + ".h5" -D = beam_stats.derive_beam_statistics(Ti, all_beams, Lmeter=12.5e3, dx=10) + # Configure SL Session + icesat2.init("slideruleearth.io") -# save figure from above: -plot_path = ( - mconfig["paths"]["plot"] + "/" + hemis + "/" + batch_key + "/" + ID_name + "/" -) -MT.mkdirs_r(plot_path) -F_atl06.save_light(path=plot_path, name="B01b_ATL06_corrected.png") -plt.close() + # plot the ground tracks in geographic location + # Generate ATL06-type segments using the ATL03-native photon classification + # Use the ocean classification for photons with a confidence parmeter to 2 or higher (low confidence or better) + + # YAPC alternatibe + params_yapc = { + "srt": 1, + "len": 20, + "ats": 3, + "res": 10, + "dist_in_seg": False, # if False units of len and res are in meters + "track": 0, + "pass_invalid": False, + "cnf": 2, + "cnt": 20, + "sigma_r_max": 4, # maximum standard deviation of photon in extend + "maxi": 10, + "yapc": dict( + knn=0, win_h=6, win_x=11, min_ph=4, score=100 + ), # use the YAPC photon classifier; these are the recommended parameters, but the results might be more specific with a smaller win_h value, or a higher score cutoff + # "yapc": dict(knn=0, win_h=3, win_x=11, min_ph=4, score=50), # use the YAPC photon classifier; these are the recommended parameters, but the results might be more specific with a smaller win_h value, or a higher score cutoff + "atl03_geo_fields": ["dem_h"], + } + + maximum_height = 30 # (meters) maximum height past dem_h correction + echo("STARTS", "white") + echo("Fetching ATL03 data from sliderule", "white") + gdf = icesat2.atl06p(params_yapc, resources=[ATL03_track_name]) + echo("ENDS", "white") + gdf = sct.correct_and_remove_height(gdf, maximum_height) + + cdict = dict() + for s, b in zip( + gdf["spot"].unique(), ["gt1l", "gt1r", "gt2l", "gt2r", "gt3l", "gt3r"] + ): + cdict[s] = color_schemes.rels[b] -if plot_flag: font_for_pres() - F = M.figure_axis_xy(8, 4.3, view_scale=0.6) - beam_stats.plot_beam_statistics( - D, - high_beams, - low_beams, - color_schemes.rels, - track_name=track_name - + "| ascending =" - + str(sct.ascending_test_distance(gdf)), - ) + F_atl06 = M.figure_axis_xy(6.5, 5, view_scale=0.6) + F_atl06.fig.suptitle(track_name) - F.save_light(path=plot_path, name="B01b_beam_statistics.png") - plt.close() + beam_stats.plot_ATL06_track_data(gdf, cdict) - # plot the ground tracks in geographic location - gdf[::100].plot(markersize=0.1, figsize=(4, 6)) - plt.title( - track_name + "\nascending =" + str(sct.ascending_test_distance(gdf)), loc="left" - ) - M.save_anyfig(plt.gcf(), path=plot_path, name="B01_track.png") - plt.close() + # main routine for defining the x coordinate and sacing table data + # define reference point and then define 'x' + table_data = copy.copy(gdf) -print("write A01b .json") -DD = {"case_ID": ID_name, "tracks": {}} + # the reference point is defined as the most equatorward point of the polygon. + # It's distance from the equator is subtracted from the distance of each photon. + table_data = sct.define_x_coordinate_from_data(table_data) + table_time = table_data["time"] + table_data.drop(columns=["time"], inplace=True) -DD["tracks"]["ATL03"] = "ATL10-" + track_name + # renames columns and splits beams + Ti = make_B01_dict(table_data, split_by_beam=True, to_hdf5=True) + for kk in Ti.keys(): + Ti[kk]["dist"] = Ti[kk]["x"].copy() + Ti[kk]["heights_c_weighted_mean"] = Ti[kk]["h_mean"].copy() + Ti[kk]["heights_c_std"] = Ti[kk]["h_sigma"].copy() + + segment = track_name.split("_")[1][-2:] + ID_name = sct.create_ID_name(gdf.iloc[0], segment=segment) + echoparam("ID_name", ID_name) + io.write_track_to_HDF5(Ti, ID_name + "_B01_binned", save_path) # regridding heights + + # plot the ground tracks in geographic location + + all_beams = mconfig["beams"]["all_beams"] + high_beams = mconfig["beams"]["high_beams"] + low_beams = mconfig["beams"]["low_beams"] + + D = beam_stats.derive_beam_statistics(Ti, all_beams, Lmeter=12.5e3, dx=10) + + # save figure from above: + plot_path = Path(plotsdir, hemis, batch_key, ID_name) + plot_path.mkdir(parents=True, exist_ok=True) + + F_atl06.save_light(path=plot_path, name="B01b_ATL06_corrected.png") + plt.close() + + if plot_flag: + font_for_pres() + F = M.figure_axis_xy(8, 4.3, view_scale=0.6) + beam_stats.plot_beam_statistics( + D, + high_beams, + low_beams, + color_schemes.rels, + track_name=track_name + + "| ascending =" + + str(sct.ascending_test_distance(gdf)), + ) + + F.save_light(path=plot_path, name="B01b_beam_statistics.png") + plt.close() + + # plot the ground tracks in geographic location + gdf[::100].plot(markersize=0.1, figsize=(4, 6)) + plt.title( + track_name + "\nascending =" + str(sct.ascending_test_distance(gdf)), + loc="left", + ) + M.save_anyfig(plt.gcf(), path=plot_path, name="B01_track.png") + plt.close() + + echo("write A01b .json") + DD = {"case_ID": ID_name, "tracks": {}} + + DD["tracks"]["ATL03"] = "ATL10-" + track_name + + start_pos = abs(table_data.lats).argmin() + end_pos = abs(table_data.lats).argmax() + + # add other pars: + DD["pars"] = { + "poleward": sct.ascending_test(gdf), + "region": "0", + "start": { + "longitude": table_data.lons[start_pos], + "latitude": table_data.lats[start_pos], + "seg_dist_x": float(table_data.x[start_pos]), + "delta_time": datetime.datetime.timestamp(table_time[start_pos]), + }, + "end": { + "longitude": table_data.lons[end_pos], + "latitude": table_data.lats[end_pos], + "seg_dist_x": float(table_data.x[end_pos]), + "delta_time": datetime.datetime.timestamp(table_time[end_pos]), + }, + } -start_pos = abs(table_data.lats).argmin() -end_pos = abs(table_data.lats).argmax() + MT.json_save2(name="A01b_ID_" + ID_name, path=save_path_json, data=DD) + echo("done") -# add other pars: -DD["pars"] = { - "poleward": sct.ascending_test(gdf), - "region": "0", - "start": { - "longitude": table_data.lons[start_pos], - "latitude": table_data.lats[start_pos], - "seg_dist_x": float(table_data.x[start_pos]), - "delta_time": datetime.datetime.timestamp(table_time[start_pos]), - }, - "end": { - "longitude": table_data.lons[end_pos], - "latitude": table_data.lats[end_pos], - "seg_dist_x": float(table_data.x[end_pos]), - "delta_time": datetime.datetime.timestamp(table_time[end_pos]), - }, -} -MT.json_save2(name="A01b_ID_" + ID_name, path=save_path_json, data=DD) +step1app = makeapp(run_B01_SL_load_single_file, name="load-file") -print("done") +if __name__ == "__main__": + step1app() diff --git a/src/icesat2_tracks/analysis_db/B02_make_spectra_gFT.py b/src/icesat2_tracks/analysis_db/B02_make_spectra_gFT.py index 00da0ff5..86021ab1 100644 --- a/src/icesat2_tracks/analysis_db/B02_make_spectra_gFT.py +++ b/src/icesat2_tracks/analysis_db/B02_make_spectra_gFT.py @@ -1,37 +1,49 @@ +#!/usr/bin/env python """ This file open a ICEsat2 track applied filters and corections and returns smoothed photon heights on a regular grid in an .nc file. This is python 3 """ -import sys - -import matplotlib.pyplot as plt -from icesat2_tracks.config.IceSAT2_startup import mconfig +import copy +import datetime +import h5py +import time +from pathlib import Path -from threadpoolctl import threadpool_info, threadpool_limits -from pprint import pprint import numpy as np import xarray as xr +from pprint import pprint +from scipy.ndimage import label +from threadpoolctl import threadpool_info, threadpool_limits +import matplotlib +import typer -import h5py +import icesat2_tracks.ICEsat2_SI_tools.generalized_FT as gFT import icesat2_tracks.ICEsat2_SI_tools.io as io import icesat2_tracks.ICEsat2_SI_tools.spectral_estimates as spec - -import time -import imp -import copy -import icesat2_tracks.ICEsat2_SI_tools.spicke_remover as spicke_remover -import datetime -import icesat2_tracks.ICEsat2_SI_tools.generalized_FT as gFT -from scipy.ndimage import label +import icesat2_tracks.local_modules.m_general_ph3 as M +import icesat2_tracks.local_modules.m_spectrum_ph3 as spicke_remover import icesat2_tracks.local_modules.m_tools_ph3 as MT -from icesat2_tracks.local_modules import m_general_ph3 as M - +from icesat2_tracks.config.IceSAT2_startup import mconfig, plt + +from icesat2_tracks.clitools import ( + echo, + validate_batch_key, + validate_output_dir, + suppress_stdout, + update_paths_mconfig, + report_input_parameters, + validate_track_name_steps_gt_1, + makeapp, +) import tracemalloc -def linear_gap_fill(F,key_lead, key_int): +matplotlib.use("Agg") # prevent plot windows from opening + + +def linear_gap_fill(F, key_lead, key_int): """ F pd.DataFrame key_lead key in F that determined the independent coordindate @@ -45,468 +57,508 @@ def linear_gap_fill(F,key_lead, key_int): return y_g -track_name, batch_key, test_flag = io.init_from_input( - sys.argv -) # loads standard experiment -hemis, batch = batch_key.split("_") -ATlevel = "ATL03" +def run_B02_make_spectra_gFT( + track_name: str = typer.Option(..., callback=validate_track_name_steps_gt_1), + batch_key: str = typer.Option(..., callback=validate_batch_key), + ID_flag: bool = True, + output_dir: str = typer.Option(None, callback=validate_output_dir), +): + """ + TODO: add docstring + """ + with suppress_stdout(): + track_name, batch_key, _ = io.init_from_input( + [ + None, + track_name, + batch_key, + ID_flag, + ] # init_from_input expects sys.argv with 4 elements + ) -load_path = mconfig["paths"]["work"] + "/" + batch_key + "/B01_regrid/" -load_file = load_path + "processed_" + ATlevel + "_" + track_name + ".h5" + kargs = { + "track_name": track_name, + "batch_key": batch_key, + "ID_flag": ID_flag, + "output_dir": output_dir, + } + report_input_parameters(**kargs) -save_path = mconfig["paths"]["work"] + "/" + batch_key + "/B02_spectra/" -save_name = "B02_" + track_name + hemis, batch = batch_key.split("_") -plot_path = ( - mconfig["paths"]["plot"] - + "/" - + hemis - + "/" - + batch_key - + "/" - + track_name - + "/B03_spectra/" -) -MT.mkdirs_r(plot_path) -MT.mkdirs_r(save_path) -bad_track_path = mconfig["paths"]["work"] + "bad_tracks/" + batch_key + "/" + workdir, plotsdir = update_paths_mconfig(output_dir, mconfig) -all_beams = mconfig["beams"]["all_beams"] -high_beams = mconfig["beams"]["high_beams"] -low_beams = mconfig["beams"]["low_beams"] + load_path = Path(workdir, batch_key, "B01_regrid") -N_process = 4 -print("N_process=", N_process) + save_path = Path(workdir, batch_key, "B02_spectra") + save_name = f"B02_{track_name}" -Gd = h5py.File(load_path + "/" + track_name + "_B01_binned.h5", "r") + plot_path = Path(plotsdir, hemis, batch_key, track_name, "B_spectra") + plot_path.mkdir(parents=True, exist_ok=True) + save_path.mkdir(parents=True, exist_ok=True) + bad_track_path = Path(workdir, "bad_tracks", batch_key) -# test amount of nans in the data -nan_fraction = list() -for k in all_beams: - heights_c_std = io.get_beam_var_hdf_store(Gd[k], "x") - nan_fraction.append(np.sum(np.isnan(heights_c_std)) / heights_c_std.shape[0]) + all_beams = mconfig["beams"]["all_beams"] -del heights_c_std + N_process = 4 -# test if beam pairs have bad ratio -bad_ratio_flag = False -for group in mconfig["beams"]["groups"]: - Ia = Gd[group[0]] - Ib = Gd[group[1]] - ratio = Ia["x"][:].size / Ib["x"][:].size - if (ratio > 10) | (ratio < 0.1): - print("bad data ratio ", ratio, 1 / ratio) - bad_ratio_flag = True + # open with hdf5 + Gd = h5py.File(Path(load_path) / (track_name + "_B01_binned.h5"), "r") -if (np.array(nan_fraction).mean() > 0.95) | bad_ratio_flag: - print( - "nan fraction > 95%, or bad ratio of data, pass this track, add to bad tracks" - ) - MT.json_save( - track_name, - bad_track_path, - { - "nan_fraction": np.array(nan_fraction).mean(), - "date": str(datetime.date.today()), - }, - ) - print("exit.") - exit() + # test amount of nans in the data + nan_fraction = list() + for k in all_beams: + heights_c_std = io.get_beam_var_hdf_store(Gd[k], "dist") + nan_fraction.append(np.sum(np.isnan(heights_c_std)) / heights_c_std.shape[0]) -# test LS with an even grid where missing values are set to 0 -imp.reload(spec) -print(Gd.keys()) -Gi = Gd[list(Gd.keys())[0]] # to select a test beam -dist = io.get_beam_var_hdf_store(Gd[list(Gd.keys())[0]], "x") + del heights_c_std -# make dataframe form hdf5 -# derive spectal limits -# Longest deserved period: -T_max = 40 # sec -k_0 = (2 * np.pi / T_max) ** 2 / 9.81 -x = np.array(dist).squeeze() -dx = np.round(np.median(np.diff(x)), 1) -min_datapoint = 2 * np.pi / k_0 / dx + # test if beam pairs have bad ratio + bad_ratio_flag = False + for group in mconfig["beams"]["groups"]: + Ia = Gd[group[0]] + Ib = Gd[group[1]] + ratio = Ia["x"][:].size / Ib["x"][:].size + if (ratio > 10) | (ratio < 0.1): + # print("bad data ratio ", ratio, 1 / ratio) # TODO: add logger + bad_ratio_flag = True -Lpoints = int(np.round(min_datapoint) * 10) -Lmeters = Lpoints * dx + if (np.array(nan_fraction).mean() > 0.95) | bad_ratio_flag: + print( + "nan fraction > 95%, or bad ratio of data, pass this track, add to bad tracks" + ) + MT.json_save( + track_name, + bad_track_path, + { + "nan_fraction": np.array(nan_fraction).mean(), + "date": str(datetime.date.today()), + }, + ) + print("exit.") + exit() + # test LS with an even grid where missing values are set to 0 + print(Gd.keys()) + Gi = Gd[list(Gd.keys())[0]] # to select a test beam + dist = io.get_beam_var_hdf_store(Gd[list(Gd.keys())[0]], "dist") -print("L number of gridpoint:", Lpoints) -print("L length in km:", Lmeters / 1e3) -print("approx number windows", 2 * dist.iloc[-1] / Lmeters - 1) + # derive spectal limits + # Longest deserved period: + T_max = 40 # sec + k_0 = (2 * np.pi / T_max) ** 2 / 9.81 + x = np.array(dist).squeeze() + dx = np.round(np.median(np.diff(x)), 1) + min_datapoint = 2 * np.pi / k_0 / dx -T_min = 6 -lambda_min = 9.81 * T_min**2 / (2 * np.pi) -flim = 1 / T_min + Lpoints = int(np.round(min_datapoint) * 10) + Lmeters = Lpoints * dx + print("L number of gridpoint:", Lpoints) + print("L length in km:", Lmeters / 1e3) + print("approx number windows", 2 * dist.iloc[-1] / Lmeters - 1) -oversample = 2 -dlambda = Lmeters * oversample -dk = 2 * np.pi / dlambda -kk = np.arange(0, 1 / lambda_min, 1 / dlambda) * 2 * np.pi -kk = kk[k_0 <= kk] + T_min = 6 + lambda_min = 9.81 * T_min**2 / (2 * np.pi) -print("2 M = ", kk.size * 2) + oversample = 2 + dlambda = Lmeters * oversample + kk = np.arange(0, 1 / lambda_min, 1 / dlambda) * 2 * np.pi + kk = kk[k_0 <= kk] -print("define global xlims") -dist_list = np.array([np.nan, np.nan]) -for k in all_beams: - print(k) - x = Gd[k + "/x"][:] - print(x[0], x[-1]) - dist_list = np.vstack([dist_list, [x[0], x[-1]]]) + print("2 M = ", kk.size * 2) -xlims = np.nanmin(dist_list[:, 0]) - dx, np.nanmin(dist_list[:, 1]) + print("define global xlims") + dist_list = np.array([np.nan, np.nan]) + for k in all_beams: + # print(k) # TODO: add logger + hkey = "heights_c_weighted_mean" + x = Gd[k + "/dist"][:] + # print(x[0], x[-1]) # TODO: add logger + dist_list = np.vstack([dist_list, [x[0], x[-1]]]) + xlims = np.nanmin(dist_list[:, 0]) - dx, np.nanmin(dist_list[:, 1]) -for k in all_beams: - dist_i = io.get_beam_var_hdf_store(Gd[k], "x") - x_mask = (dist_i > xlims[0]) & (dist_i < xlims[1]) - print(k, sum(x_mask["x"]) / (xlims[1] - xlims[0])) + dist_lim = 2000e3 # maximum distanc in the sea ice tha tis analysed: + if (xlims[1] - xlims[0]) > dist_lim: + xlims = xlims[0], xlims[0] + dist_lim + print("-reduced xlims length to ", xlims[0] + dist_lim, "m") -print("-reduced frequency resolution") -kk = kk[::2] -print("set xlims: ", xlims) -print( - "Loop start: ", - tracemalloc.get_traced_memory()[0] / 1e6, - tracemalloc.get_traced_memory()[1] / 1e6, -) + for k in all_beams: + dist_i = io.get_beam_var_hdf_store(Gd[k], "dist") + x_mask = (dist_i > xlims[0]) & (dist_i < xlims[1]) + # print(k, sum(x_mask["dist"]) / (xlims[1] - xlims[0])) # TODO: add logger -G_gFT = dict() -G_gFT_x = dict() -G_rar_fft = dict() -Pars_optm = dict() - - -k = all_beams[1] -# sliderule version -hkey = "h_mean" -hkey_sigma = "h_sigma" -for k in all_beams: - tracemalloc.start() - # ------------------------------- use gridded data - Gi = io.get_beam_hdf_store(Gd[k]) - x_mask = (Gi["x"] > xlims[0]) & (Gi["x"] < xlims[1]) - if sum(x_mask) / (xlims[1] - xlims[0]) < 0.005: - print("------------------- not data in beam found; skip") - - Gd_cut = Gi[x_mask] - x = Gd_cut["x"] - del Gi - # cut data: - x_mask = (x >= xlims[0]) & (x <= xlims[1]) - x = x[x_mask] - dd = np.copy(Gd_cut[hkey]) - - dd_error = np.copy(Gd_cut[hkey_sigma]) - - dd_error[np.isnan(dd_error)] = 100 - - # compute slope spectra !! - dd = np.gradient(dd) - dd, _ = spicke_remover.spicke_remover(dd, spreed=10, verbose=False) - dd_nans = (np.isnan(dd)) + (Gd_cut["N_photos"] <= 5) - - # using gappy data - dd_no_nans = dd[~dd_nans] # windowing is applied here - x_no_nans = x[~dd_nans] - dd_error_no_nans = dd_error[~dd_nans] - - print("gFT") - with threadpool_limits(limits=N_process, user_api="blas"): - pprint(threadpool_info()) - - S = gFT.wavenumber_spectrogram_gFT( - np.array(x_no_nans), - np.array(dd_no_nans), - Lmeters, - dx, - kk, - data_error=dd_error_no_nans, - ov=None, - ) - GG, GG_x, Params = S.cal_spectrogram( - xlims=xlims, max_nfev=8000, plot_flag=False - ) + print("-reduced frequency resolution") + kk = kk[::2] + print("set xlims: ", xlims) print( - "after ", - k, + "Loop start: ", tracemalloc.get_traced_memory()[0] / 1e6, tracemalloc.get_traced_memory()[1] / 1e6, ) - plot_data_model = False - if plot_data_model: - for i in np.arange(0, 16, 2): - c1 = "blue" - c2 = "red" - - GGi = GG.isel(x=i) - - xi_1 = GG_x.x[i] - xi_2 = GG_x.x[i + 1] - - F = M.figure_axis_xy(16, 2) - eta = GG_x.eta - - y_model = GG_x.y_model[:, i] - plt.plot(eta + xi_1, y_model, "-", c=c1, linewidth=0.8, alpha=1, zorder=12) - y_model = GG_x.y_model[:, i + 1] - plt.plot(eta + xi_2, y_model, "-", c=c2, linewidth=0.8, alpha=1, zorder=12) - - FT = gFT.generalized_Fourier(eta + xi_1, None, GG.k) - _ = FT.get_H() - FT.p_hat = np.concatenate([GGi.gFT_cos_coeff, GGi.gFT_sin_coeff]) - plt.plot( - eta + xi_1, - FT.model(), - "-", - c="orange", - linewidth=0.8, - alpha=1, - zorder=2, - ) + G_gFT = dict() + G_gFT_x = dict() + G_rar_fft = dict() + Pars_optm = dict() + + k = all_beams[0] + for k in all_beams: + tracemalloc.start() + # ------------------------------- use gridded data + hkey = "heights_c_weighted_mean" + Gi = io.get_beam_hdf_store(Gd[k]) + x_mask = (Gi["dist"] > xlims[0]) & (Gi["dist"] < xlims[1]) + if sum(x_mask) / (xlims[1] - xlims[0]) < 0.005: + # print("------------------- not data in beam found; skip") # TODO: add logger + continue + + Gd_cut = Gi[x_mask] + x = Gd_cut["dist"] + del Gi + # cut data: + x_mask = (x >= xlims[0]) & (x <= xlims[1]) + x = x[x_mask] + + dd = np.copy(Gd_cut[hkey]) + + dd_error = np.copy(Gd_cut["heights_c_std"]) + dd_error[np.isnan(dd_error)] = 100 + # plt.hist(1/dd_weight, bins=40) + F = M.figure_axis_xy(6, 3) + plt.subplot(2, 1, 1) + + # compute slope spectra !! + dd = np.gradient(dd) + dd, _ = spicke_remover.spicke_remover(dd, spreed=10, verbose=False) + dd_nans = (np.isnan(dd)) + (Gd_cut["N_photos"] <= 5) + + # using gappy data + dd_no_nans = dd[~dd_nans] # windowing is applied here + x_no_nans = x[~dd_nans] + dd_error_no_nans = dd_error[~dd_nans] - FT = gFT.generalized_Fourier(eta + xi_2, None, GG.k) - _ = FT.get_H() - FT.p_hat = np.concatenate([GGi.gFT_cos_coeff, GGi.gFT_sin_coeff]) - plt.plot( - eta + xi_2, - FT.model(), - "-", - c="orange", - linewidth=0.8, - alpha=1, - zorder=2, - ) - - # oringial data - plt.plot(x, dd, "-", c="k", linewidth=2, alpha=0.6, zorder=11) - - F.ax.axvline(xi_1 + eta[0].data, linewidth=4, color=c1, alpha=0.5) - F.ax.axvline(xi_1 + eta[-1].data, linewidth=4, color=c1, alpha=0.5) - F.ax.axvline(xi_2 + eta[0].data, linewidth=4, color=c2, alpha=0.5) - F.ax.axvline(xi_2 + eta[-1].data, linewidth=4, color=c2, alpha=0.5) - - ylims = -np.nanstd(dd) * 2, np.nanstd(dd) * 2 - plt.text( - xi_1 + eta[0].data, - ylims[-1], - " N=" - + str(GG.sel(x=xi_1, method="nearest").N_per_stancil.data) - + " N/2M= " - + str( - GG.sel(x=xi_1, method="nearest").N_per_stancil.data / 2 / kk.size - ), + plt.plot( + x_no_nans, dd_no_nans, ".", color="black", markersize=1, label="slope (m/m)" + ) + plt.legend() + plt.show() + + with threadpool_limits(limits=N_process, user_api="blas"): + pprint(threadpool_info()) + + S = gFT.wavenumber_spectrogram_gFT( + np.array(x_no_nans), + np.array(dd_no_nans), + Lmeters, + dx, + kk, + data_error=dd_error_no_nans, + ov=None, ) - plt.text( - xi_2 + eta[0].data, - ylims[-1], - " N=" - + str(GG.sel(x=xi_2, method="nearest").N_per_stancil.data) - + " N/2M= " - + str( - GG.sel(x=xi_2, method="nearest").N_per_stancil.data / 2 / kk.size - ), + GG, GG_x, Params = S.cal_spectrogram( + xlims=xlims, max_nfev=8000, plot_flag=False ) - plt.xlim(xi_1 + eta[0].data * 1.2, xi_2 + eta[-1].data * 1.2) - - plt.ylim(ylims[0], ylims[-1]) - plt.show() - - # add x-mean spectal error estimate to xarray - S.parceval(add_attrs=True, weight_data=False) - - # assign beam coordinate - GG.coords["beam"] = GG_x.coords["beam"] = str(k) - GG, GG_x = GG.expand_dims(dim="beam", axis=1), GG_x.expand_dims(dim="beam", axis=1) - # repack such that all coords are associated with beam - GG.coords["N_per_stancil"] = (("x", "beam"), np.expand_dims(GG["N_per_stancil"], 1)) - GG.coords["spec_adjust"] = (("x", "beam"), np.expand_dims(GG["spec_adjust"], 1)) - - # add more coodindates to the Dataset - x_coord_no_gaps = linear_gap_fill(Gd_cut, "x", "x") - y_coord_no_gaps = linear_gap_fill(Gd_cut, "x", "y") - mapped_coords = spec.sub_sample_coords( - Gd_cut["x"], x_coord_no_gaps, y_coord_no_gaps, S.stancil_iter, map_func=None - ) - - GG.coords["x_coord"] = GG_x.coords["x_coord"] = ( - ("x", "beam"), - np.expand_dims(mapped_coords[:, 1], 1), - ) - GG.coords["y_coord"] = GG_x.coords["y_coord"] = ( - ("x", "beam"), - np.expand_dims(mapped_coords[:, 2], 1), - ) - - # if data staarts with nans replace coords with nans again - if (GG.coords["N_per_stancil"] == 0).squeeze()[0].data: - nlabel = label((GG.coords["N_per_stancil"] == 0).squeeze())[0] - nan_mask = nlabel == nlabel[0] - GG.coords["x_coord"][nan_mask] = np.nan - GG.coords["y_coord"][nan_mask] = np.nan - - lons_no_gaps = linear_gap_fill(Gd_cut, "x", "lons") - lats_no_gaps = linear_gap_fill(Gd_cut, "x", "lats") - mapped_coords = spec.sub_sample_coords( - Gd_cut["x"], lons_no_gaps, lats_no_gaps, S.stancil_iter, map_func=None - ) - - GG.coords["lon"] = GG_x.coords["lon"] = ( - ("x", "beam"), - np.expand_dims(mapped_coords[:, 1], 1), - ) - GG.coords["lat"] = GG_x.coords["lat"] = ( - ("x", "beam"), - np.expand_dims(mapped_coords[:, 2], 1), - ) - - # calculate number data points - def get_stancil_nans(stancil): - x_mask = (stancil[0] < x) & (x <= stancil[-1]) - idata = Gd_cut["N_photos"][x_mask] - return stancil[1], idata.sum() - - photon_list = np.array( - list(dict(map(get_stancil_nans, copy.copy(S.stancil_iter))).values()) - ) - GG.coords["N_photons"] = (("x", "beam"), np.expand_dims(photon_list, 1)) - - # Save to dict - G_gFT[k] = GG - G_gFT_x[k] = GG_x - Pars_optm[k] = Params - - # plot - plt.subplot(2, 1, 2) - G_gFT_power = GG.gFT_PSD_data.squeeze() - plt.plot( - G_gFT_power.k, np.nanmean(G_gFT_power, 1), "gray", label="mean gFT power data " - ) - G_gFT_power = GG.gFT_PSD_model.squeeze() - plt.plot(GG.k, np.nanmean(S.G, 1), "k", label="mean gFT power model") - # standard FFT - print("FFT") - dd[dd_nans] = 0 + print( + "after ", + k, + tracemalloc.get_traced_memory()[0] / 1e6, + tracemalloc.get_traced_memory()[1] / 1e6, + ) - S = spec.wavenumber_spectrogram(x, dd, Lpoints) - G = S.cal_spectrogram() - S.mean_spectral_error() # add x-mean spectal error estimate to xarray - S.parceval(add_attrs=True) + plot_data_model = False + if plot_data_model: + for i in np.arange(0, 16, 2): + c1 = "blue" + c2 = "red" + + GGi = GG.isel(x=i) + + xi_1 = GG_x.x[i] + xi_2 = GG_x.x[i + 1] + + F = M.figure_axis_xy(16, 2) + eta = GG_x.eta + + y_model = GG_x.y_model[:, i] + plt.plot( + eta + xi_1, y_model, "-", c=c1, linewidth=0.8, alpha=1, zorder=12 + ) + y_model = GG_x.y_model[:, i + 1] + plt.plot( + eta + xi_2, y_model, "-", c=c2, linewidth=0.8, alpha=1, zorder=12 + ) + + FT = gFT.generalized_Fourier(eta + xi_1, None, GG.k) + _ = FT.get_H() + FT.p_hat = np.concatenate([GGi.gFT_cos_coeff, GGi.gFT_sin_coeff]) + plt.plot( + eta + xi_1, + FT.model(), + "-", + c="orange", + linewidth=0.8, + alpha=1, + zorder=2, + ) + + FT = gFT.generalized_Fourier(eta + xi_2, None, GG.k) + _ = FT.get_H() + FT.p_hat = np.concatenate([GGi.gFT_cos_coeff, GGi.gFT_sin_coeff]) + plt.plot( + eta + xi_2, + FT.model(), + "-", + c="orange", + linewidth=0.8, + alpha=1, + zorder=2, + ) + + # oringial data + plt.plot(x, dd, "-", c="k", linewidth=2, alpha=0.6, zorder=11) + + F.ax.axvline(xi_1 + eta[0].data, linewidth=4, color=c1, alpha=0.5) + F.ax.axvline(xi_1 + eta[-1].data, linewidth=4, color=c1, alpha=0.5) + F.ax.axvline(xi_2 + eta[0].data, linewidth=4, color=c2, alpha=0.5) + F.ax.axvline(xi_2 + eta[-1].data, linewidth=4, color=c2, alpha=0.5) + + ylims = -np.nanstd(dd) * 2, np.nanstd(dd) * 2 + plt.text( + xi_1 + eta[0].data, + ylims[-1], + " N=" + + str(GG.sel(x=xi_1, method="nearest").N_per_stancil.data) + + " N/2M= " + + str( + GG.sel(x=xi_1, method="nearest").N_per_stancil.data + / 2 + / kk.size + ), + ) + plt.text( + xi_2 + eta[0].data, + ylims[-1], + " N=" + + str(GG.sel(x=xi_2, method="nearest").N_per_stancil.data) + + " N/2M= " + + str( + GG.sel(x=xi_2, method="nearest").N_per_stancil.data + / 2 + / kk.size + ), + ) + plt.xlim(xi_1 + eta[0].data * 1.2, xi_2 + eta[-1].data * 1.2) + + plt.ylim(ylims[0], ylims[-1]) + plt.show() + + S.parceval(add_attrs=True, weight_data=False) + + # assign beam coordinate + GG.coords["beam"] = GG_x.coords["beam"] = str(k) + GG, GG_x = GG.expand_dims(dim="beam", axis=1), GG_x.expand_dims( + dim="beam", axis=1 + ) + # repack such that all coords are associated with beam + GG.coords["N_per_stancil"] = ( + ("x", "beam"), + np.expand_dims(GG["N_per_stancil"], 1), + ) + GG.coords["spec_adjust"] = (("x", "beam"), np.expand_dims(GG["spec_adjust"], 1)) + + # add more coodindates to the Dataset + x_coord_no_gaps = linear_gap_fill(Gd_cut, "dist", "x") + y_coord_no_gaps = linear_gap_fill(Gd_cut, "dist", "y") + mapped_coords = spec.sub_sample_coords( + Gd_cut["dist"], + x_coord_no_gaps, + y_coord_no_gaps, + S.stancil_iter, + map_func=None, + ) - # assign beam coordinate - G.coords["beam"] = str(k) - G = G.expand_dims(dim="beam", axis=2) - G.coords["mean_El"] = (("k", "beam"), np.expand_dims(G["mean_El"], 1)) - G.coords["mean_Eu"] = (("k", "beam"), np.expand_dims(G["mean_Eu"], 1)) - G.coords["x"] = G.coords["x"] * dx + GG.coords["x_coord"] = GG_x.coords["x_coord"] = ( + ("x", "beam"), + np.expand_dims(mapped_coords[:, 1], 1), + ) + GG.coords["y_coord"] = GG_x.coords["y_coord"] = ( + ("x", "beam"), + np.expand_dims(mapped_coords[:, 2], 1), + ) - stancil_iter = spec.create_chunk_boundaries(int(Lpoints), dd_nans.size) + # if data staarts with nans replace coords with nans again + if (GG.coords["N_per_stancil"] == 0).squeeze()[0].data: + nlabel = label((GG.coords["N_per_stancil"] == 0).squeeze())[0] + nan_mask = nlabel == nlabel[0] + GG.coords["x_coord"][nan_mask] = np.nan + GG.coords["y_coord"][nan_mask] = np.nan + + lons_no_gaps = linear_gap_fill(Gd_cut, "dist", "lons") + lats_no_gaps = linear_gap_fill(Gd_cut, "dist", "lats") + mapped_coords = spec.sub_sample_coords( + Gd_cut["dist"], lons_no_gaps, lats_no_gaps, S.stancil_iter, map_func=None + ) - def get_stancil_nans(stancil): - idata = dd_nans[stancil[0] : stancil[-1]] - return stancil[1], idata.size - idata.sum() + GG.coords["lon"] = GG_x.coords["lon"] = ( + ("x", "beam"), + np.expand_dims(mapped_coords[:, 1], 1), + ) + GG.coords["lat"] = GG_x.coords["lat"] = ( + ("x", "beam"), + np.expand_dims(mapped_coords[:, 2], 1), + ) - N_list = np.array(list(dict(map(get_stancil_nans, stancil_iter)).values())) + # calculate number data points + def get_stancil_nans(stancil): + x_mask = (stancil[0] < x) & (x <= stancil[-1]) + idata = Gd_cut["N_photos"][x_mask] # noqa: F821 + return stancil[1], idata.sum() - # repack such that all coords are associated with beam - G.coords["N_per_stancil"] = (("x", "beam"), np.expand_dims(N_list, 1)) + photon_list = np.array( + list(dict(map(get_stancil_nans, copy.copy(S.stancil_iter))).values()) + ) + GG.coords["N_photons"] = (("x", "beam"), np.expand_dims(photon_list, 1)) - # save to dict and cut to the same size gFT - try: - G_rar_fft[k] = G.sel(x=slice(GG.x[0], GG.x[-1].data)) - except: - G_rar_fft[k] = G.isel(x=(GG.x[0].data < G.x.data) & (G.x.data < GG.x[-1].data)) + # Save to dict + G_gFT[k] = GG + G_gFT_x[k] = GG_x + Pars_optm[k] = Params - # for plotting - try: - G_rar_fft_p = G.squeeze() + # plot + plt.subplot(2, 1, 2) + G_gFT_power = GG.gFT_PSD_data.squeeze() plt.plot( - G_rar_fft_p.k, - G_rar_fft_p[:, G_rar_fft_p["N_per_stancil"] > 10].mean("x"), - "darkblue", - label="mean FFT", + G_gFT_power.k, + np.nanmean(G_gFT_power, 1), + "gray", + label="mean gFT power data ", ) - plt.legend() - - except: - pass - time.sleep(3) - plt.close("all") - - -del Gd_cut -Gd.close() + G_gFT_power = GG.gFT_PSD_model.squeeze() + plt.plot(GG.k, np.nanmean(S.G, 1), "k", label="mean gFT power model") + + # standard FFT + print("FFT") + dd[dd_nans] = 0 + + S = spec.wavenumber_spectrogram(x, dd, Lpoints) + G = S.cal_spectrogram() + S.mean_spectral_error() # add x-mean spectal error estimate to xarray + S.parceval(add_attrs=True) + + # assign beam coordinate + G.coords["beam"] = str(k) + G = G.expand_dims(dim="beam", axis=2) + G.coords["mean_El"] = (("k", "beam"), np.expand_dims(G["mean_El"], 1)) + G.coords["mean_Eu"] = (("k", "beam"), np.expand_dims(G["mean_Eu"], 1)) + G.coords["x"] = G.coords["x"] * dx + + stancil_iter = spec.create_chunk_boundaries(int(Lpoints), dd_nans.size) + + def get_stancil_nans(stancil): + idata = dd_nans[stancil[0] : stancil[-1]] + return stancil[1], idata.size - idata.sum() + + N_list = np.array(list(dict(map(get_stancil_nans, stancil_iter)).values())) + + # repack such that all coords are associated with beam + G.coords["N_per_stancil"] = (("x", "beam"), np.expand_dims(N_list, 1)) + + # save to dict and cut to the same size gFT + try: + G_rar_fft[k] = G.sel(x=slice(GG.x[0], GG.x[-1].data)) + except: + G_rar_fft[k] = G.isel( + x=(GG.x[0].data < G.x.data) & (G.x.data < GG.x[-1].data) + ) -# save fitting parameters -MT.save_pandas_table(Pars_optm, save_name + "_params", save_path) + # for plotting + try: + G_rar_fft_p = G.squeeze() + plt.plot( + G_rar_fft_p.k, + G_rar_fft_p[:, G_rar_fft_p["N_per_stancil"] > 10].mean("x"), + "darkblue", + label="mean FFT", + ) + plt.legend() + plt.show() + except: + pass + time.sleep(3) + del Gd_cut + Gd.close() -# repack data -def repack_attributes(DD): - attr_dim_list = list(DD.keys()) - for k in attr_dim_list: - for ka in list(DD[k].attrs.keys()): - I = DD[k] - I.coords[ka] = ("beam", np.expand_dims(I.attrs[ka], 0)) - return DD + # save fitting parameters + MT.save_pandas_table(Pars_optm, save_name + "_params", str(save_path)) + # repack data + def repack_attributes(DD): + attr_dim_list = list(DD.keys()) + for k in attr_dim_list: + for ka in list(DD[k].attrs.keys()): + I = DD[k] + I.coords[ka] = ("beam", np.expand_dims(I.attrs[ka], 0)) + return DD -beams_missing = set(all_beams) - set(G_gFT.keys()) + beams_missing = set(all_beams) - set(G_gFT.keys()) + def make_dummy_beam(GG, beam): + dummy = GG.copy(deep=True) + for var in list(dummy.var()): + dummy[var] = dummy[var] * np.nan + dummy["beam"] = [beam] + return dummy -def make_dummy_beam(GG, beam): - dummy = GG.copy(deep=True) - for var in list(dummy.var()): - dummy[var] = dummy[var] * np.nan - dummy["beam"] = [beam] - return dummy + for beam in beams_missing: + GG = list(G_gFT.values())[0] + dummy = make_dummy_beam(GG, beam) + dummy["N_photons"] = dummy["N_photons"] * 0 + dummy["N_per_stancil"] = dummy["N_per_stancil"] * 0 + G_gFT[beam] = dummy + GG = list(G_gFT_x.values())[0] + G_gFT_x[beam] = make_dummy_beam(GG, beam) -for beam in beams_missing: - GG = list(G_gFT.values())[0] - dummy = make_dummy_beam(GG, beam) - dummy["N_photons"] = dummy["N_photons"] * 0 - dummy["N_per_stancil"] = dummy["N_per_stancil"] * 0 - G_gFT[beam] = dummy + GG = list(G_rar_fft.values())[0].copy(deep=True) + GG.data = GG.data * np.nan + GG["beam"] = [beam] + G_rar_fft[beam] = GG - GG = list(G_gFT_x.values())[0] - G_gFT_x[beam] = make_dummy_beam(GG, beam) + G_rar_fft.keys() - GG = list(G_rar_fft.values())[0].copy(deep=True) - GG.data = GG.data * np.nan - GG["beam"] = [beam] - G_rar_fft[beam] = GG + G_gFT = repack_attributes(G_gFT) + G_gFT_x = repack_attributes(G_gFT_x) + G_rar_fft = repack_attributes(G_rar_fft) -G_rar_fft.keys() + # save results + G_gFT_DS = xr.merge(G_gFT.values()) + G_gFT_DS["Z_hat_imag"] = G_gFT_DS.Z_hat.imag + G_gFT_DS["Z_hat_real"] = G_gFT_DS.Z_hat.real + G_gFT_DS = G_gFT_DS.drop_vars("Z_hat") + G_gFT_DS.attrs["name"] = "gFT_estimates" -G_gFT = repack_attributes(G_gFT) -G_gFT_x = repack_attributes(G_gFT_x) -G_rar_fft = repack_attributes(G_rar_fft) + savepathname = str(save_path / save_name) + G_gFT_DS.to_netcdf(savepathname + "_gFT_k.nc") + G_gFT_x_DS = xr.merge(G_gFT_x.values()) + G_gFT_x_DS.attrs["name"] = "gFT_estimates_real_space" + G_gFT_x_DS.to_netcdf(savepathname + "_gFT_x.nc") -# save results -G_gFT_DS = xr.merge(G_gFT.values()) -G_gFT_DS["Z_hat_imag"] = G_gFT_DS.Z_hat.imag -G_gFT_DS["Z_hat_real"] = G_gFT_DS.Z_hat.real -G_gFT_DS = G_gFT_DS.drop("Z_hat") -G_gFT_DS.attrs["name"] = "gFT_estimates" -G_gFT_DS.to_netcdf(save_path + save_name + "_gFT_k.nc") + G_fft_DS = xr.merge(G_rar_fft.values()) + G_fft_DS.attrs["name"] = "FFT_power_spectra" + G_fft_DS.to_netcdf(savepathname + "_FFT.nc") -G_gFT_x_DS = xr.merge(G_gFT_x.values()) -G_gFT_x_DS.attrs["name"] = "gFT_estimates_real_space" -G_gFT_x_DS.to_netcdf(save_path + save_name + "_gFT_x.nc") + echo("saved and done") -G_fft_DS = xr.merge(G_rar_fft.values()) -G_fft_DS.attrs["name"] = "FFT_power_spectra" -G_fft_DS.to_netcdf(save_path + save_name + "_FFT.nc") +step2app = makeapp(run_B02_make_spectra_gFT, name="makespectra") -print("saved and done") +if __name__ == "__main__": + step2app() diff --git a/src/icesat2_tracks/analysis_db/B03_plot_spectra_ov.py b/src/icesat2_tracks/analysis_db/B03_plot_spectra_ov.py index ca7984d2..146e423c 100644 --- a/src/icesat2_tracks/analysis_db/B03_plot_spectra_ov.py +++ b/src/icesat2_tracks/analysis_db/B03_plot_spectra_ov.py @@ -1,134 +1,36 @@ +#!/usr/bin/env python3 """ This file open a ICEsat2 track applied filters and corections and returns smoothed photon heights on a regular grid in an .nc file. This is python 3 """ -import sys +from pathlib import Path +import matplotlib import numpy as np import xarray as xr from matplotlib.gridspec import GridSpec +import typer + import icesat2_tracks.ICEsat2_SI_tools.io as io import icesat2_tracks.ICEsat2_SI_tools.generalized_FT as gFT import icesat2_tracks.local_modules.m_tools_ph3 as MT from icesat2_tracks.local_modules import m_general_ph3 as M -from icesat2_tracks.config.IceSAT2_startup import mconfig, color_schemes, plt, font_for_print - -track_name, batch_key, test_flag = io.init_from_input( - sys.argv # TODO: Handle via CLI -) # loads standard experiment -hemis, batch = batch_key.split("_") - -load_path = mconfig["paths"]["work"] + batch_key + "/B02_spectra/" -load_file = load_path + "B02_" + track_name -plot_path = ( - mconfig["paths"]["plot"] + "/" + hemis + "/" + batch_key + "/" + track_name + "/" -) # TODO: Update with pathlib -MT.mkdirs_r(plot_path) - -Gk = xr.open_dataset(load_file + "_gFT_k.nc") -Gx = xr.open_dataset(load_file + "_gFT_x.nc") - -Gfft = xr.open_dataset(load_file + "_FFT.nc") - -all_beams = mconfig["beams"]["all_beams"] -high_beams = mconfig["beams"]["high_beams"] -low_beams = mconfig["beams"]["low_beams"] -color_schemes.colormaps2(21) - -col_dict = color_schemes.rels -F = M.figure_axis_xy(9, 3, view_scale=0.5) - -plt.subplot(1, 3, 1) -plt.title(track_name, loc="left") -for k in all_beams: - I = Gk.sel(beam=k) - I2 = Gx.sel(beam=k) - plt.plot(I["lon"], I["lat"], ".", c=col_dict[k], markersize=0.7, linewidth=0.3) - plt.plot(I2["lon"], I2["lat"], "|", c=col_dict[k], markersize=0.7) - - -plt.xlabel("lon") -plt.ylabel("lat") - -plt.subplot(1, 3, 2) - -xscale = 1e3 -for k in all_beams: - I = Gk.sel(beam=k) - plt.plot( - I["x_coord"] / xscale, - I["y_coord"] / xscale, - ".", - c=col_dict[k], - linewidth=0.8, - markersize=0.8, - ) - -plt.xlabel("x_coord (km)") -plt.ylabel("y_coord (km)") - -plt.subplot(1, 3, 3) - -xscale = 1e3 -for k in all_beams: - I = Gk.sel(beam=k) - plt.plot( - I["x_coord"] / xscale, - (I["y_coord"] - I["y_coord"][0]), - ".", - c=col_dict[k], - linewidth=0.8, - markersize=0.8, - ) - -plt.xlabel("x_coord (km)") -plt.ylabel("y_coord deviation (m)") - - -F.save_light(path=plot_path, name="B03_specs_coord_check") - - -def dict_weighted_mean(Gdict, weight_key): - """ - returns the weighted meean of a dict of xarray, data_arrays - weight_key must be in the xr.DataArrays - """ - - akey = list(Gdict.keys())[0] - GSUM = Gdict[akey].copy() - GSUM.data = np.zeros(GSUM.shape) - N_per_stancil = GSUM.N_per_stancil * 0 - N_photons = np.zeros(GSUM.N_per_stancil.size) - - counter = 0 - for I in Gdict.items(): - I = I.squeeze() - if len(I.x) != 0: - GSUM += I.where(~np.isnan(I), 0) * I[weight_key] - N_per_stancil += I[weight_key] - if "N_photons" in GSUM.coords: - N_photons += I["N_photons"] - counter += 1 - - GSUM = GSUM / N_per_stancil - - if "N_photons" in GSUM.coords: - GSUM.coords["N_photons"] = (("x", "beam"), np.expand_dims(N_photons, 1)) - - GSUM["beam"] = ["weighted_mean"] - GSUM.name = "power_spec" - - return GSUM - - -G_gFT_wmean = ( - Gk["gFT_PSD_data"].where(~np.isnan(Gk["gFT_PSD_data"]), 0) * Gk["N_per_stancil"] -).sum("beam") / Gk["N_per_stancil"].sum("beam") -G_gFT_wmean["N_per_stancil"] = Gk["N_per_stancil"].sum("beam") +from icesat2_tracks.config.IceSAT2_startup import ( + mconfig, + color_schemes, + plt, + font_for_print, +) -G_fft_wmean = (Gfft.where(~np.isnan(Gfft), 0) * Gfft["N_per_stancil"]).sum( - "beam" -) / Gfft["N_per_stancil"].sum("beam") -G_fft_wmean["N_per_stancil"] = Gfft["N_per_stancil"].sum("beam") +from icesat2_tracks.clitools import ( + echo, + validate_batch_key, + validate_output_dir, + suppress_stdout, + update_paths_mconfig, + report_input_parameters, + validate_track_name_steps_gt_1, + makeapp, +) def plot_wavenumber_spectrogram(ax, Gi, clev, title=None, plot_photon_density=True): @@ -162,402 +64,501 @@ def plot_wavenumber_spectrogram(ax, Gi, clev, title=None, plot_photon_density=Tr plt.title(title, loc="left") -Gmean = G_gFT_wmean.rolling(k=5, center=True).mean() +def plot_data_eta(D, offset=0, **kargs): + eta_1 = D.eta # + D.x + y_data = D.y_model + offset + plt.plot(eta_1, y_data, **kargs) + return eta_1 -try: - k_max = Gmean.k[Gmean.isel(x=slice(0, 5)).mean("x").argmax().data].data -except: - k_max = Gmean.k[Gmean.isel(x=slice(0, 20)).mean("x").argmax().data].data -k_max_range = (k_max * 0.75, k_max * 1, k_max * 1.25) -font_for_print() -F = M.figure_axis_xy(6.5, 5.6, container=True, view_scale=1) -Lmeters = Gk.L.data[0] +def plot_model_eta(D, ax, offset=0, **kargs): + eta = D.eta # + D.x + y_data = D.y_model + offset + plt.plot(eta, y_data, **kargs) -plt.suptitle("gFT Slope Spectrograms\n" + track_name, y=0.98) -gs = GridSpec(3, 3, wspace=0.2, hspace=0.5) + ax.axvline(eta[0].data, linewidth=0.1, color=kargs["color"], alpha=0.5) + ax.axvline(eta[-1].data, linewidth=0.1, color=kargs["color"], alpha=0.5) -Gplot = ( - G_gFT_wmean.squeeze() - .rolling(k=10, min_periods=1, center=True) - .median() - .rolling(x=3, min_periods=1, center=True) - .median() -) -dd = 10 * np.log10(Gplot) -dd = dd.where(~np.isinf(dd), np.nan) -clev_log = M.clevels([dd.quantile(0.01).data, dd.quantile(0.98).data * 1.2], 31) * 1 -xlims = Gmean.x[0] / 1e3, Gmean.x[-1] / 1e3 +matplotlib.use("Agg") # prevent plot windows from opening -k = high_beams[0] -for pos, k, pflag in zip( - [gs[0, 0], gs[0, 1], gs[0, 2]], high_beams, [True, False, False] -): - ax0 = F.fig.add_subplot(pos) - Gplot = Gk.sel(beam=k).gFT_PSD_data.squeeze() - dd2 = 10 * np.log10(Gplot) - dd2 = dd2.where(~np.isinf(dd2), np.nan) - plot_wavenumber_spectrogram( - ax0, dd2, clev_log, title=k + " unsmoothed", plot_photon_density=True - ) - plt.xlim(xlims) - if pflag: - plt.ylabel("Wave length\n(meters)") - plt.legend() -for pos, k, pflag in zip( - [gs[1, 0], gs[1, 1], gs[1, 2]], low_beams, [True, False, False] +def run_B03_plot_spectra_ov( + track_name: str = typer.Option(..., callback=validate_track_name_steps_gt_1), + batch_key: str = typer.Option(..., callback=validate_batch_key), + ID_flag: bool = True, + output_dir: str = typer.Option(None, callback=validate_output_dir), ): - ax0 = F.fig.add_subplot(pos) - Gplot = Gk.sel(beam=k).gFT_PSD_data.squeeze() - dd2 = 10 * np.log10(Gplot) - dd2 = dd2.where(~np.isinf(dd2), np.nan) - plot_wavenumber_spectrogram( - ax0, dd2, clev_log, title=k + " unsmoothed", plot_photon_density=True - ) - plt.xlim(xlims) - if pflag: - plt.ylabel("Wave length\n(meters)") - plt.legend() - -ax0 = F.fig.add_subplot(gs[2, 0]) + """ + TODO: add docstring + """ + with suppress_stdout(): + track_name, batch_key, _ = io.init_from_input( + [ + None, + track_name, + batch_key, + ID_flag, + ] # init_from_input expects sys.argv with 4 elements + ) -plot_wavenumber_spectrogram( - ax0, - dd, - clev_log, - title="smoothed weighted mean \n10 $\log_{10}( (m/m)^2 m )$", - plot_photon_density=True, -) -plt.xlim(xlims) + kargs = { + "track_name": track_name, + "batch_key": batch_key, + "ID_flag": ID_flag, + "output_dir": output_dir, + } + report_input_parameters(**kargs) -line_styles = ["--", "-", "--"] -for k_max, style in zip(k_max_range, line_styles): - ax0.axhline(2 * np.pi / k_max, color="red", linestyle=style, linewidth=0.5) + hemis, batch = batch_key.split("_") -if pflag: - plt.ylabel("Wave length\n(meters)") - plt.legend() + workdir, plotsdir = update_paths_mconfig(output_dir, mconfig) -pos = gs[2, 1] -ax0 = F.fig.add_subplot(pos) -plt.title("Photons density ($m^{-1}$)", loc="left") - -for k in all_beams: - I = Gk.sel(beam=k)["gFT_PSD_data"] - plt.plot(Gplot.x / 1e3, I.N_photons / I.L.data, label=k, linewidth=0.8) -plt.plot( - Gplot.x / 1e3, - G_gFT_wmean.N_per_stancil / 3 / I.L.data, - c="black", - label="ave Photons", - linewidth=0.8, -) -plt.xlim(xlims) -plt.xlabel("Distance from the Ice Edge (km)") - -pos = gs[2, 2] - -ax0 = F.fig.add_subplot(pos) -ax0.set_yscale("log") - -plt.title("Peak Spectal Power", loc="left") - -x0 = Gk.x[0].data -for k in all_beams: - I = Gk.sel(beam=k)["gFT_PSD_data"] - plt.scatter( - I.x.data / 1e3, - I.sel(k=slice(k_max_range[0], k_max_range[2])).integrate("k").data, - s=0.5, - marker=".", - color="red", - alpha=0.3, - ) - I = Gfft.sel(beam=k) - plt.scatter( - (x0 + I.x.data) / 1e3, - I.power_spec.sel(k=slice(k_max_range[0], k_max_range[2])).integrate("k"), - s=0.5, - marker=".", - c="blue", - alpha=0.3, - ) + load_path = Path(workdir, batch_key, "B02_spectra") + load_file = str(load_path / ("B02_" + track_name)) + plot_path = Path(plotsdir, hemis, batch_key, track_name) + MT.mkdirs_r(plot_path) + Gk = xr.open_dataset(load_file + "_gFT_k.nc") + Gx = xr.open_dataset(load_file + "_gFT_x.nc") -Gplot = G_fft_wmean.squeeze() -Gplot = Gplot.power_spec[:, Gplot.N_per_stancil >= Gplot.N_per_stancil.max().data * 0.9] -plt.plot( - (x0 + Gplot.x) / 1e3, - Gplot.sel(k=slice(k_max_range[0], k_max_range[2])).integrate("k"), - ".", - markersize=1.5, - c="blue", - label="FFT", -) + Gfft = xr.open_dataset(load_file + "_FFT.nc") -Gplot = G_gFT_wmean.squeeze() -plt.plot( - Gplot.x / 1e3, - Gplot.sel(k=slice(k_max_range[0], k_max_range[2])).integrate("k"), - ".", - markersize=1.5, - c="red", - label="gFT", -) + all_beams = mconfig["beams"]["all_beams"] + high_beams = mconfig["beams"]["high_beams"] + low_beams = mconfig["beams"]["low_beams"] + color_schemes.colormaps2(21) -plt.ylabel("1e-3 $(m)^2~m$") -plt.legend() + col_dict = color_schemes.rels + F = M.figure_axis_xy(9, 3, view_scale=0.5) -F.save_light(path=plot_path, name="B03_specs_L" + str(Lmeters)) + plt.subplot(1, 3, 1) + plt.title(track_name, loc="left") + for k in all_beams: + I = Gk.sel(beam=k) + I2 = Gx.sel(beam=k) + plt.plot(I["lon"], I["lat"], ".", c=col_dict[k], markersize=0.7, linewidth=0.3) + plt.plot(I2["lon"], I2["lat"], "|", c=col_dict[k], markersize=0.7) -Gk.sel(beam=k).gFT_PSD_data.plot() + plt.xlabel("lon") + plt.ylabel("lat") + plt.subplot(1, 3, 2) -def plot_model_eta(D, ax, offset=0, xscale=1e3, **kargs): - eta = D.eta + D.x - y_data = D.y_model + offset - plt.plot(eta / xscale, y_data, **kargs) - - ax.axvline(eta[0].data / xscale, linewidth=2, color=kargs["color"], alpha=0.5) - ax.axvline(eta[-1].data / xscale, linewidth=2, color=kargs["color"], alpha=0.5) - - -def add_info(D, Dk, ylims): - eta = D.eta + D.x - N_per_stancil, ksize = Dk.N_per_stancil.data, Dk.k.size - plt.text( - eta[0].data, - ylims[-1], - " N=" - + numtostr(N_per_stancil) - + " N/2M= " - + fltostr(N_per_stancil / 2 / ksize, 1), - ) + xscale = 1e3 + for k in all_beams: + I = Gk.sel(beam=k) + plt.plot( + I["x_coord"] / xscale, + I["y_coord"] / xscale, + ".", + c=col_dict[k], + linewidth=0.8, + markersize=0.8, + ) + plt.xlabel("x_coord (km)") + plt.ylabel("y_coord (km)") -def plot_data_eta(D, offset=0, xscale=1e3, **kargs): - eta_1 = D.eta + D.x - y_data = D.y_model + offset - plt.plot(eta_1 / xscale, y_data, **kargs) - return eta_1 + plt.subplot(1, 3, 3) + xscale = 1e3 + for k in all_beams: + I = Gk.sel(beam=k) + plt.plot( + I["x_coord"] / xscale, + (I["y_coord"] - I["y_coord"][0]), + ".", + c=col_dict[k], + linewidth=0.8, + markersize=0.8, + ) -def plot_data_eta(D, offset=0, **kargs): - eta_1 = D.eta # + D.x - y_data = D.y_model + offset - plt.plot(eta_1, y_data, **kargs) - return eta_1 + plt.xlabel("x_coord (km)") + plt.ylabel("y_coord deviation (m)") + + F.save_light(path=plot_path, name="B03_specs_coord_check") + + G_gFT_wmean = ( + Gk["gFT_PSD_data"].where(~np.isnan(Gk["gFT_PSD_data"]), 0) * Gk["N_per_stancil"] + ).sum("beam") / Gk["N_per_stancil"].sum("beam") + G_gFT_wmean["N_per_stancil"] = Gk["N_per_stancil"].sum("beam") + + G_fft_wmean = (Gfft.where(~np.isnan(Gfft), 0) * Gfft["N_per_stancil"]).sum( + "beam" + ) / Gfft["N_per_stancil"].sum("beam") + G_fft_wmean["N_per_stancil"] = Gfft["N_per_stancil"].sum("beam") + Gmean = G_gFT_wmean.rolling(k=5, center=True).mean() + + try: + k_max = Gmean.k[Gmean.isel(x=slice(0, 5)).mean("x").argmax().data].data + except: + k_max = Gmean.k[Gmean.isel(x=slice(0, 20)).mean("x").argmax().data].data + + k_max_range = (k_max * 0.75, k_max * 1, k_max * 1.25) + font_for_print() + F = M.figure_axis_xy(6.5, 5.6, container=True, view_scale=1) + Lmeters = Gk.L.data[0] + + plt.suptitle("gFT Slope Spectrograms\n" + track_name, y=0.98) + gs = GridSpec(3, 3, wspace=0.2, hspace=0.5) + + Gplot = ( + G_gFT_wmean.squeeze() + .rolling(k=10, min_periods=1, center=True) + .median() + .rolling(x=3, min_periods=1, center=True) + .median() + ) + dd = 10 * np.log10(Gplot) + dd = dd.where(~np.isinf(dd), np.nan) + clev_log = M.clevels([dd.quantile(0.01).data, dd.quantile(0.98).data * 1.2], 31) * 1 + xlims = Gmean.x[0] / 1e3, Gmean.x[-1] / 1e3 -def plot_model_eta(D, ax, offset=0, **kargs): - eta = D.eta # + D.x - y_data = D.y_model + offset - plt.plot(eta, y_data, **kargs) + k = high_beams[0] + for pos, k, pflag in zip( + [gs[0, 0], gs[0, 1], gs[0, 2]], high_beams, [True, False, False] + ): + ax0 = F.fig.add_subplot(pos) + Gplot = Gk.sel(beam=k).gFT_PSD_data.squeeze() + dd2 = 10 * np.log10(Gplot) + dd2 = dd2.where(~np.isinf(dd2), np.nan) + plot_wavenumber_spectrogram( + ax0, dd2, clev_log, title=k + " unsmoothed", plot_photon_density=True + ) + plt.xlim(xlims) + if pflag: + plt.ylabel("Wave length\n(meters)") + plt.legend() - ax.axvline(eta[0].data, linewidth=0.1, color=kargs["color"], alpha=0.5) - ax.axvline(eta[-1].data, linewidth=0.1, color=kargs["color"], alpha=0.5) + for pos, k, pflag in zip( + [gs[1, 0], gs[1, 1], gs[1, 2]], low_beams, [True, False, False] + ): + ax0 = F.fig.add_subplot(pos) + Gplot = Gk.sel(beam=k).gFT_PSD_data.squeeze() + dd2 = 10 * np.log10(Gplot) + dd2 = dd2.where(~np.isinf(dd2), np.nan) + plot_wavenumber_spectrogram( + ax0, dd2, clev_log, title=k + " unsmoothed", plot_photon_density=True + ) + plt.xlim(xlims) + if pflag: + plt.ylabel("Wave length\n(meters)") + plt.legend() + ax0 = F.fig.add_subplot(gs[2, 0]) -if "y_data" in Gx.sel(beam="gt3r").keys(): - print("ydata is ", ("y_data" in Gx.sel(beam="gt3r").keys())) -else: - print("ydata is ", ("y_data" in Gx.sel(beam="gt3r").keys())) - MT.json_save("B03_fail", plot_path, {"reason": "no y_data"}) - print("failed, exit") - exit() + plot_wavenumber_spectrogram( + ax0, + dd, + clev_log, + title="smoothed weighted mean \n10 $\log_{10}( (m/m)^2 m )$", + plot_photon_density=True, + ) + plt.xlim(xlims) -fltostr, numtostr = MT.float_to_str, MT.num_to_str + line_styles = ["--", "-", "--"] + for k_max, style in zip(k_max_range, line_styles): + ax0.axhline(2 * np.pi / k_max, color="red", linestyle=style, linewidth=0.5) -font_for_print() + if pflag: + plt.ylabel("Wave length\n(meters)") + plt.legend() -MT.mkdirs_r(plot_path + "B03_spectra/") + pos = gs[2, 1] + ax0 = F.fig.add_subplot(pos) + plt.title("Photons density ($m^{-1}$)", loc="left") -x_pos_sel = np.arange(Gk.x.size)[~np.isnan(Gk.mean("beam").mean("k").gFT_PSD_data.data)] -x_pos_max = ( - Gk.mean("beam") - .mean("k") - .gFT_PSD_data[~np.isnan(Gk.mean("beam").mean("k").gFT_PSD_data)] - .argmax() - .data -) -xpp = x_pos_sel[[int(i) for i in np.round(np.linspace(0, x_pos_sel.size - 1, 4))]] -xpp = np.insert(xpp, 0, x_pos_max) + for k in all_beams: + I = Gk.sel(beam=k)["gFT_PSD_data"] + plt.plot(Gplot.x / 1e3, I.N_photons / I.L.data, label=k, linewidth=0.8) + plt.plot( + Gplot.x / 1e3, + G_gFT_wmean.N_per_stancil / 3 / I.L.data, + c="black", + label="ave Photons", + linewidth=0.8, + ) + plt.xlim(xlims) + plt.xlabel("Distance from the Ice Edge (km)") -for i in xpp: - F = M.figure_axis_xy(6, 8, container=True, view_scale=0.8) + pos = gs[2, 2] - plt.suptitle( - "gFT Model and Spectrograms | x=" + str(Gk.x[i].data) + " \n" + track_name, - y=0.95, - ) - gs = GridSpec(5, 6, wspace=0.2, hspace=0.7) + ax0 = F.fig.add_subplot(pos) + ax0.set_yscale("log") - ax0 = F.fig.add_subplot(gs[0:2, :]) - col_d = color_schemes.__dict__["rels"] + plt.title("Peak Spectal Power", loc="left") - neven = True - offs = 0 + x0 = Gk.x[0].data for k in all_beams: - Gx_1 = Gx.isel(x=i).sel(beam=k) - Gk_1 = Gk.isel(x=i).sel(beam=k) - - plot_model_eta( - Gx_1, - ax0, - offset=offs, - linestyle="-", - color=col_d[k], - linewidth=0.4, - alpha=1, - zorder=12, + I = Gk.sel(beam=k)["gFT_PSD_data"] + plt.scatter( + I.x.data / 1e3, + I.sel(k=slice(k_max_range[0], k_max_range[2])).integrate("k").data, + s=0.5, + marker=".", + color="red", + alpha=0.3, ) - ylims = -np.nanstd(Gx_1.y_data) * 3, np.nanstd(Gx_1.y_data) * 3 - - # oringial data - eta_1 = plot_data_eta( - Gx_1, offset=offs, linestyle="-", c="k", linewidth=1, alpha=0.5, zorder=11 + I = Gfft.sel(beam=k) + plt.scatter( + (x0 + I.x.data) / 1e3, + I.power_spec.sel(k=slice(k_max_range[0], k_max_range[2])).integrate("k"), + s=0.5, + marker=".", + c="blue", + alpha=0.3, ) - # reconstruct in gaps - FT = gFT.generalized_Fourier(Gx_1.eta + Gx_1.x, None, Gk_1.k) - _ = FT.get_H() - FT.p_hat = np.concatenate([Gk_1.gFT_cos_coeff, Gk_1.gFT_sin_coeff]) - plt.plot( - Gx_1.eta, - FT.model() + offs, - "-", - c="orange", - linewidth=0.3, - alpha=1, - zorder=2, - ) + Gplot = G_fft_wmean.squeeze() + Gplot = Gplot.power_spec[ + :, Gplot.N_per_stancil >= Gplot.N_per_stancil.max().data * 0.9 + ] + plt.plot( + (x0 + Gplot.x) / 1e3, + Gplot.sel(k=slice(k_max_range[0], k_max_range[2])).integrate("k"), + ".", + markersize=1.5, + c="blue", + label="FFT", + ) + + Gplot = G_gFT_wmean.squeeze() + plt.plot( + Gplot.x / 1e3, + Gplot.sel(k=slice(k_max_range[0], k_max_range[2])).integrate("k"), + ".", + markersize=1.5, + c="red", + label="gFT", + ) + + plt.ylabel("1e-3 $(m)^2~m$") + plt.legend() - if neven: - neven = False - offs += 0.3 - else: - neven = True - offs += 0.6 + F.save_light(path=plot_path, name="B03_specs_L" + str(Lmeters)) - dx = eta_1.diff("eta").mean().data + Gk.sel(beam=k).gFT_PSD_data.plot() - eta_ticks = np.linspace(Gx_1.eta.data[0], Gx_1.eta.data[-1], 11) + if "y_data" in Gx.sel(beam="gt3r").keys(): + print("ydata is ", ("y_data" in Gx.sel(beam="gt3r").keys())) + else: + print("ydata is ", ("y_data" in Gx.sel(beam="gt3r").keys())) + MT.json_save("B03_fail", plot_path, {"reason": "no y_data"}) + echo("failed, exit", "red") + exit() - ax0.set_xticks(eta_ticks) - ax0.set_xticklabels(eta_ticks / 1e3) - plt.xlim(eta_1[0].data - 40 * dx, eta_1[-1].data + 40 * dx) - plt.title("Model reconst.", loc="left") + fltostr, _ = MT.float_to_str, MT.num_to_str - plt.ylabel("relative slope (m/m)") - plt.xlabel( - "segment distance $\eta$ (km) @ x=" + fltostr(Gx_1.x.data / 1e3, 2) + "km" + font_for_print() + + MT.mkdirs_r(plot_path / "B03_spectra/") + + x_pos_sel = np.arange(Gk.x.size)[ + ~np.isnan(Gk.mean("beam").mean("k").gFT_PSD_data.data) + ] + x_pos_max = ( + Gk.mean("beam") + .mean("k") + .gFT_PSD_data[~np.isnan(Gk.mean("beam").mean("k").gFT_PSD_data)] + .argmax() + .data ) + xpp = x_pos_sel[[int(i) for i in np.round(np.linspace(0, x_pos_sel.size - 1, 4))]] + xpp = np.insert(xpp, 0, x_pos_max) - # spectra - # define threshold - k_thresh = 0.085 - ax1_list = list() - dd_max = list() - for pos, kgroup, lflag in zip( - [gs[2, 0:2], gs[2, 2:4], gs[2, 4:]], - [["gt1l", "gt1r"], ["gt2l", "gt2r"], ["gt3l", "gt3r"]], - [True, False, False], - ): - ax11 = F.fig.add_subplot(pos) - ax11.tick_params(labelleft=lflag) - ax1_list.append(ax11) - for k in kgroup: - Gx_1 = Gx.isel(x=i).sel(beam=k) - Gk_1 = Gk.isel(x=i).sel(beam=k) + for i in xpp: + F = M.figure_axis_xy(6, 8, container=True, view_scale=0.8) - klim = Gk_1.k[0], Gk_1.k[-1] + plt.suptitle( + "gFT Model and Spectrograms | x=" + str(Gk.x[i].data) + " \n" + track_name, + y=0.95, + ) + gs = GridSpec(5, 6, wspace=0.2, hspace=0.7) - if "l" in k: - dd = Gk_1.gFT_PSD_data - plt.plot(Gk_1.k, dd, color="gray", linewidth=0.5, alpha=0.5) + ax0 = F.fig.add_subplot(gs[0:2, :]) + col_d = color_schemes.__dict__["rels"] - dd = Gk_1.gFT_PSD_data.rolling(k=10, min_periods=1, center=True).mean() - plt.plot(Gk_1.k, dd, color=col_d[k], linewidth=0.8) - # handle the 'All-NaN slice encountered' warning - if np.all(np.isnan(dd.data)): - dd_max.append(np.nan) + neven = True + offs = 0 + for k in all_beams: + Gx_1 = Gx.isel(x=i).sel(beam=k) + Gk_1 = Gk.isel(x=i).sel(beam=k) + + plot_model_eta( + Gx_1, + ax0, + offset=offs, + linestyle="-", + color=col_d[k], + linewidth=0.4, + alpha=1, + zorder=12, + ) + + # oringial data + eta_1 = plot_data_eta( + Gx_1, + offset=offs, + linestyle="-", + c="k", + linewidth=1, + alpha=0.5, + zorder=11, + ) + + # reconstruct in gaps + FT = gFT.generalized_Fourier(Gx_1.eta + Gx_1.x, None, Gk_1.k) + _ = FT.get_H() + FT.p_hat = np.concatenate([Gk_1.gFT_cos_coeff, Gk_1.gFT_sin_coeff]) + plt.plot( + Gx_1.eta, + FT.model() + offs, + "-", + c="orange", + linewidth=0.3, + alpha=1, + zorder=2, + ) + + if neven: + neven = False + offs += 0.3 else: - dd_max.append(np.nanmax(dd.data)) - - plt.xlim(klim) - if lflag: - plt.ylabel("$(m/m)^2/k$") - plt.title("Energy Spectra", loc="left") - - plt.xlabel("wavenumber k (2$\pi$ m$^{-1}$)") - - ax11.axvline(k_thresh, linewidth=1, color="gray", alpha=1) - ax11.axvspan(k_thresh, klim[-1], color="gray", alpha=0.5, zorder=12) - - if not np.all(np.isnan(dd_max)): - max_vale = np.nanmax(dd_max) - for ax in ax1_list: - ax.set_ylim(0,max_vale * 1.1) + neven = True + offs += 0.6 - ax0 = F.fig.add_subplot(gs[-2:, :]) + dx = eta_1.diff("eta").mean().data - neven = True - offs = 0 - for k in all_beams: - Gx_1 = Gx.isel(x=i).sel(beam=k) - Gk_1 = Gk.isel(x=i).sel(beam=k) + eta_ticks = np.linspace(Gx_1.eta.data[0], Gx_1.eta.data[-1], 11) - ylims = -np.nanstd(Gx_1.y_data) * 3, np.nanstd(Gx_1.y_data) * 3 + ax0.set_xticks(eta_ticks) + ax0.set_xticklabels(eta_ticks / 1e3) + plt.xlim(eta_1[0].data - 40 * dx, eta_1[-1].data + 40 * dx) + plt.title("Model reconst.", loc="left") - # oringial data - eta_1 = plot_data_eta( - Gx_1, offset=offs, linestyle="-", c="k", linewidth=1.5, alpha=0.5, zorder=11 + plt.ylabel("relative slope (m/m)") + plt.xlabel( + "segment distance $\eta$ (km) @ x=" + fltostr(Gx_1.x.data / 1e3, 2) + "km" ) - # reconstruct in gaps - FT = gFT.generalized_Fourier(Gx_1.eta + Gx_1.x, None, Gk_1.k) - _ = FT.get_H() - FT.p_hat = np.concatenate([Gk_1.gFT_cos_coeff, Gk_1.gFT_sin_coeff]) + # spectra + # define threshold + k_thresh = 0.085 + ax1_list = list() + dd_max = list() + for pos, kgroup, lflag in zip( + [gs[2, 0:2], gs[2, 2:4], gs[2, 4:]], + [["gt1l", "gt1r"], ["gt2l", "gt2r"], ["gt3l", "gt3r"]], + [True, False, False], + ): + ax11 = F.fig.add_subplot(pos) + ax11.tick_params(labelleft=lflag) + ax1_list.append(ax11) + for k in kgroup: + Gx_1 = Gx.isel(x=i).sel(beam=k) + Gk_1 = Gk.isel(x=i).sel(beam=k) + + klim = Gk_1.k[0], Gk_1.k[-1] + + if "l" in k: + dd = Gk_1.gFT_PSD_data + plt.plot(Gk_1.k, dd, color="gray", linewidth=0.5, alpha=0.5) + + dd = Gk_1.gFT_PSD_data.rolling(k=10, min_periods=1, center=True).mean() + plt.plot(Gk_1.k, dd, color=col_d[k], linewidth=0.8) + # handle the 'All-NaN slice encountered' warning + if np.all(np.isnan(dd.data)): + dd_max.append(np.nan) + else: + dd_max.append(np.nanmax(dd.data)) + + plt.xlim(klim) + if lflag: + plt.ylabel("$(m/m)^2/k$") + plt.title("Energy Spectra", loc="left") + + plt.xlabel("wavenumber k (2$\pi$ m$^{-1}$)") + + ax11.axvline(k_thresh, linewidth=1, color="gray", alpha=1) + ax11.axvspan(k_thresh, klim[-1], color="gray", alpha=0.5, zorder=12) + + if not np.all(np.isnan(dd_max)): + max_vale = np.nanmax(dd_max) + for ax in ax1_list: + ax.set_ylim(0, max_vale * 1.1) - p_hat_k = np.concatenate([Gk_1.k, Gk_1.k]) - k_mask = p_hat_k < k_thresh - FT.p_hat[~k_mask] = 0 + ax0 = F.fig.add_subplot(gs[-2:, :]) - plt.plot( - Gx_1.eta, - FT.model() + offs, - "-", - c=col_d[k], - linewidth=0.8, - alpha=1, - zorder=12, - ) + neven = True + offs = 0 + for k in all_beams: + Gx_1 = Gx.isel(x=i).sel(beam=k) + Gk_1 = Gk.isel(x=i).sel(beam=k) + + # oringial data + eta_1 = plot_data_eta( + Gx_1, + offset=offs, + linestyle="-", + c="k", + linewidth=1.5, + alpha=0.5, + zorder=11, + ) + + # reconstruct in gaps + FT = gFT.generalized_Fourier(Gx_1.eta + Gx_1.x, None, Gk_1.k) + _ = FT.get_H() + FT.p_hat = np.concatenate([Gk_1.gFT_cos_coeff, Gk_1.gFT_sin_coeff]) + + p_hat_k = np.concatenate([Gk_1.k, Gk_1.k]) + k_mask = p_hat_k < k_thresh + FT.p_hat[~k_mask] = 0 + + plt.plot( + Gx_1.eta, + FT.model() + offs, + "-", + c=col_d[k], + linewidth=0.8, + alpha=1, + zorder=12, + ) + + if neven: + neven = False + offs += 0.3 + else: + neven = True + offs += 0.6 - if neven: - neven = False - offs += 0.3 - else: - neven = True - offs += 0.6 + dx = eta_1.diff("eta").mean().data - dx = eta_1.diff("eta").mean().data + eta_ticks = np.linspace(Gx_1.eta.data[0], Gx_1.eta.data[-1], 11) - eta_ticks = np.linspace(Gx_1.eta.data[0], Gx_1.eta.data[-1], 11) + ax0.set_xticks(eta_ticks) + ax0.set_xticklabels(eta_ticks / 1e3) + plt.xlim(eta_1[1000].data - 40 * dx, eta_1[-1000].data + 40 * dx) + plt.title("Low-Wavenumber Model reconst.", loc="left") - ax0.set_xticks(eta_ticks) - ax0.set_xticklabels(eta_ticks / 1e3) - plt.xlim(eta_1[1000].data - 40 * dx, eta_1[-1000].data + 40 * dx) - plt.title("Low-Wavenumber Model reconst.", loc="left") + plt.ylabel("relative slope (m/m)") + plt.xlabel( + "segment distance $\eta$ (km) @ x=" + fltostr(Gx_1.x.data / 1e3, 2) + "km" + ) - plt.ylabel("relative slope (m/m)") - plt.xlabel( - "segment distance $\eta$ (km) @ x=" + fltostr(Gx_1.x.data / 1e3, 2) + "km" + F.save_pup( + path=str(plot_path) + "B03_spectra/", name="B03_freq_reconst_x" + str(i) + ) + + MT.json_save( + "B03_success", + plot_path, + {"time": "time.asctime( time.localtime(time.time()) )"}, ) - F.save_pup(path=plot_path + "B03_spectra/", name="B03_freq_reconst_x" + str(i)) -MT.json_save( - "B03_success", plot_path, {"time": "time.asctime( time.localtime(time.time()) )"} -) +if __name__ == "__main__": + step3app = makeapp(run_B03_plot_spectra_ov, name="plotspectra") + step3app() diff --git a/src/icesat2_tracks/app.py b/src/icesat2_tracks/app.py new file mode 100755 index 00000000..b47ee9eb --- /dev/null +++ b/src/icesat2_tracks/app.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python +""" +Main CLI for icesat2waves. +""" +from typer import Typer, Option +from icesat2_tracks.analysis_db.B01_SL_load_single_file import ( + run_B01_SL_load_single_file as _loadfile, +) +from icesat2_tracks.analysis_db.B02_make_spectra_gFT import ( + run_B02_make_spectra_gFT as _makespectra, +) +from icesat2_tracks.analysis_db.B03_plot_spectra_ov import ( + run_B03_plot_spectra_ov as _plotspectra, +) +from icesat2_tracks.analysis_db.A02c_IOWAGA_thredds_prior import ( + run_A02c_IOWAGA_thredds_prior as _threddsprior, +) + +from icesat2_tracks.clitools import ( + validate_track_name, + validate_batch_key, + validate_output_dir, + validate_track_name_steps_gt_1, +) + + +app = Typer(add_completion=False) +validate_track_name_gt_1_opt = Option(..., callback=validate_track_name_steps_gt_1) +validate_batch_key_opt = Option(..., callback=validate_batch_key) +validate_output_dir_opt = Option(None, callback=validate_output_dir) + + +def run_job( + analysis_func, + track_name: str, + batch_key: str, + ID_flag: bool = True, + output_dir: str = validate_output_dir_opt, +): + analysis_func( + track_name, + batch_key, + ID_flag, + output_dir, + ) + + +@app.command(help=_loadfile.__doc__) +def loadfile( + track_name: str = Option(..., callback=validate_track_name), + batch_key: str = validate_batch_key_opt, + ID_flag: bool = True, + output_dir: str = validate_output_dir_opt, +): + run_job(_loadfile, track_name, batch_key, ID_flag, output_dir) + + +@app.command(help=_makespectra.__doc__) +def makespectra( + track_name: str = validate_track_name_gt_1_opt, + batch_key: str = validate_batch_key_opt, + ID_flag: bool = True, + output_dir: str = validate_output_dir_opt, +): + run_job(_makespectra, track_name, batch_key, ID_flag, output_dir) + + +@app.command(help=_plotspectra.__doc__) +def plotspectra( + track_name: str = validate_track_name_gt_1_opt, + batch_key: str = validate_batch_key_opt, + ID_flag: bool = True, + output_dir: str = validate_output_dir_opt, +): + run_job(_plotspectra, track_name, batch_key, ID_flag, output_dir) + + +@app.command(help=_threddsprior.__doc__) +def threddsprior( + track_name: str = validate_track_name_gt_1_opt, + batch_key: str = validate_batch_key_opt, + ID_flag: bool = True, + output_dir: str = validate_output_dir_opt, +): + run_job(_threddsprior, track_name, batch_key, ID_flag, output_dir) + + +if __name__ == "__main__": + app() diff --git a/src/icesat2_tracks/clitools.py b/src/icesat2_tracks/clitools.py new file mode 100644 index 00000000..b13f3ede --- /dev/null +++ b/src/icesat2_tracks/clitools.py @@ -0,0 +1,119 @@ +import os +import re +import sys +from contextlib import contextmanager +from pathlib import Path + +import typer +from termcolor import colored + + +@contextmanager +def suppress_stdout(): + with open(os.devnull, "w") as devnull: + old_stdout = sys.stdout + sys.stdout = devnull + try: + yield + finally: + sys.stdout = old_stdout + + +# Callbacks for typer +def validate_pattern_wrapper( + ctx: typer.Context, + param: typer.CallbackParam, + value: str, + pattern: str, + error_message: str, +) -> str: + if not re.match(pattern, value): + raise typer.BadParameter(error_message) + return value + + +def validate_track_name( + ctx: typer.Context, param: typer.CallbackParam, value: str +) -> str: + pattern = r"\d{4}(0[1-9]|1[0-2])(0[1-9]|[12][0-9]|3[01])([01][0-9]|2[0-3])([0-5][0-9]){2}_\d{8}_\d{3}_\d{2}" + error_message = "track_name must be in the format: YYYYMMDDHHMMSS_XXXXXXXX_XXX_XX" + return validate_pattern_wrapper( + ctx, + param, + value, + pattern, + error_message, + ) + + +def validate_batch_key( + ctx: typer.Context, param: typer.CallbackParam, value: str +) -> str: + pattern = r".*_.*" + error_message = "batch_key must be in the format 'SH_testSLsinglefile2'" + return validate_pattern_wrapper( + ctx, + param, + value, + pattern, + error_message, + ) + + +def validate_output_dir( + ctx: typer.Context, param: typer.CallbackParam, value: str +) -> str: + path = Path(value).resolve() + if not path.is_dir(): + raise typer.BadParameter(f"{path} does not exist") + return str(path) + + +def echo(text: str, color: str = "green"): + typer.echo(colored(text, color)) + + +def echoparam(text: str, value, textcolor: str = "green", valuecolor: str = "white"): + # add tab to text and center around the : + text = "\t" + text + text = f"{text:<12}" + echo(f"{colored(text,textcolor)}: {colored(value, valuecolor)}") + + +def report_input_parameters(heading: str = "** Input parameters:", **kargs): + echo(heading) + for key in kargs: + if key != "args": + echoparam(key, kargs[key]) + + +def update_paths_mconfig(output_dir, mconfig): + if output_dir: + workdir, plotsdir = [ + Path(output_dir, mconfig["paths"][key]) for key in ["work", "plot"] + ] + + return workdir, plotsdir + + +def validate_track_name_steps_gt_1( + ctx: typer.Context, param: typer.CallbackParam, value: str +) -> str: + pattern = r".*_\d{4}(0[1-9]|1[0-2])(0[1-9]|[12][0-9]|3[01])_\d{8}" + error_message = "track_name must be in the format: any_characters_YYYYMMDD_12345678" + return validate_pattern_wrapper( + ctx, + param, + value, + pattern, + error_message, + ) + + +def makeapp(f, name): + """ + Make a typer app from a function. + """ + app = typer.Typer(add_completion=False, name=name) + app.command()(f) + return app diff --git a/src/icesat2_tracks/local_modules/m_colormanager_ph3.py b/src/icesat2_tracks/local_modules/m_colormanager_ph3.py index 17163538..2daddf4f 100644 --- a/src/icesat2_tracks/local_modules/m_colormanager_ph3.py +++ b/src/icesat2_tracks/local_modules/m_colormanager_ph3.py @@ -44,7 +44,8 @@ def json_load(name, path, verbose=False): with open(full_name, 'r') as ifile: data=json.load(ifile) if verbose: - print('loaded from: ',full_name) + # print('loaded from: ',full_name) # removed for cli usage, CP. TODO: delete later + pass return data @@ -54,7 +55,7 @@ class color: def __init__(self, path=None, name=None): self.white=(1,1,1) if (path is not None) & (name is not None): - print('color theme: '+name) + # print('color theme: '+name) # removed for cli usage, CP. TODO: delete later try: theme=json_load(name, path, verbose=True) for k, v in theme.items(): diff --git a/tests/plots.tar.gz b/tests/plots.tar.gz new file mode 100644 index 00000000..51e25d7a --- /dev/null +++ b/tests/plots.tar.gz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:87562959d28ac1e03e1597be80c41c33878315c7097ca57e4b4c16e2be48902b +size 1615170 diff --git a/tests/test_steps.py b/tests/test_steps.py new file mode 100644 index 00000000..53d62b96 --- /dev/null +++ b/tests/test_steps.py @@ -0,0 +1,309 @@ +#!/usr/bin/env python +from datetime import datetime +from pathlib import Path +import shutil +import subprocess +import tarfile +from tempfile import mkdtemp + + +def checkpaths(paths): + result = [Path(pth).is_file() for pth in paths] + print("\n") + # report which files are missing (in red) and which are present (in green) + for pth, res in zip(paths, result): + if res: + print(f"\033[92m{pth} is present\033[0m") + else: + print(f"\033[91m{pth} is missing\033[0m") + return all(result) + + +def get_all_filenames(directory): + """ + Get a list of all file names in a directory + """ + return [p.name for p in Path(directory).rglob("*")] + + +def check_file_exists(directory, prefix, stepno: int = 4): + # Get a list of all files in the directory + files = get_all_filenames(targetdirs[str(stepno)] / directory) + # Check if there is a file with the specified prefix + file_exists = any(file.startswith(prefix) for file in files) + return file_exists + + +def delete_pdf_files(directory): + files = [file for file in Path(directory).iterdir() if file.suffix == ".pdf"] + delete_files(files) + + +def delete_files(file_paths): + for file_path in file_paths: + delete_file(file_path) + + +def delete_file(file_path): + path = Path(file_path) + if path.exists(): + path.unlink() + + +def getoutputdir(script): + outputdir = script.index("--output-dir") + 1 + return script[outputdir] + + +def extract_tarball(outputdir, tarball_path): + tar = tarfile.open(Path(tarball_path)) + tar.extractall(Path(outputdir), filter="data") + tar.close() + + +def run_test(script, paths, delete_paths=True, suppress_output=True): + # configuration + outputdir = getoutputdir(script) + + # update paths to check + paths = [Path(outputdir, pth) for pth in paths] + + if delete_paths: + delete_files(paths) + + kwargs = {"check": True} + if suppress_output: + kwargs["stdout"] = subprocess.DEVNULL + kwargs["stderr"] = subprocess.DEVNULL + + # run the script + subprocess.run(script, **kwargs) + + # run the tests + result = checkpaths(paths) + + return result + + +def makepathlist(dir, files): + return [Path(dir, f) for f in files] + + +# The scriptx variables are the scripts to be tested. The pathsx variables are the paths to the files that should be produced by the scripts. The scripts are run and the paths are checked to see whether the expected files were produced. If the files were produced, the test passes. If not, the test fails. + + +script1 = [ + "python", + "src/icesat2_tracks/analysis_db/B01_SL_load_single_file.py", + "--track-name", + "20190502052058_05180312_005_01", + "--batch-key", + "SH_testSLsinglefile2", + "--output-dir", +] +paths1 = [ + "plots/SH/SH_testSLsinglefile2/SH_20190502_05180312/B01_track.png.png", + "plots/SH/SH_testSLsinglefile2/SH_20190502_05180312/B01b_ATL06_corrected.png.png", + "plots/SH/SH_testSLsinglefile2/SH_20190502_05180312/B01b_beam_statistics.png.png", + "work/SH_testSLsinglefile2/A01b_ID/A01b_ID_SH_20190502_05180312.json", + "work/SH_testSLsinglefile2/B01_regrid/SH_20190502_05180312_B01_binned.h5", +] + + +script2 = [ + "python", + "src/icesat2_tracks/analysis_db/B02_make_spectra_gFT.py", + "--track-name", + "SH_20190502_05180312", + "--batch-key", + "SH_testSLsinglefile2", + "--output-dir", +] +paths2 = [ + "work/SH_testSLsinglefile2/B02_spectra/B02_SH_20190502_05180312_params.h5", + "work/SH_testSLsinglefile2/B02_spectra/B02_SH_20190502_05180312_gFT_x.nc", + "work/SH_testSLsinglefile2/B02_spectra/B02_SH_20190502_05180312_gFT_k.nc", + "work/SH_testSLsinglefile2/B02_spectra/B02_SH_20190502_05180312_FFT.nc", +] + +script3 = [ + "python", + "src/icesat2_tracks/analysis_db/B03_plot_spectra_ov.py", + "--track-name", + "SH_20190502_05180312", + "--batch-key", + "SH_testSLsinglefile2", + "--id-flag", + "--output-dir", +] + +_root = "plots/SH/SH_testSLsinglefile2/SH_20190502_05180312" +_paths3 = [ + "B03_specs_L25000.0.png", + "B03_specs_coord_check.png", + "B03_success.json", +] +paths3 = makepathlist(_root, _paths3) + +script4 = [ + "python", + "src/icesat2_tracks/analysis_db/A02c_IOWAGA_thredds_prior.py", + "--track-name", + "SH_20190502_05180312", + "--batch-key", + "SH_testSLsinglefile2", + "--id-flag", + "--output-dir", +] +paths4 = [ + "plots/SH/SH_testSLsinglefile2/SH_20190502_05180312/A02_SH_2_hindcast_data.pdf", + "plots/SH/SH_testSLsinglefile2/SH_20190502_05180312/A02_SH_2_hindcast_prior.pdf", +] +dir4, prefix4 = ( + "work/SH_testSLsinglefile2/A02_prior/", + "A02_SH_20190502_05180312_hindcast", +) + +# TODO: step 5 +script5 = [ + "python", + "src/icesat2_tracks/analysis_db/B04_angle.py", + "SH_20190502_05180312", + "SH_testSLsinglefile2", + "True", +] +paths5 = [ + "plots/SH/SH_testSLsinglefile2/SH_20190502_05180312/B04_success.json", + "plots/SH/SH_testSLsinglefile2/SH_20190502_05180312/B04_prior_angle.png", + "plots/SH/SH_testSLsinglefile2/SH_20190502_05180312/B04_marginal_distributions.pdf", + "plots/SH/SH_testSLsinglefile2/SH_20190502_05180312/B04_data_avail.pdf", + "work/SH_testSLsinglefile2/B04_angle/B04_SH_20190502_05180312_res_table.h5", + "work/SH_testSLsinglefile2/B04_angle/B04_SH_20190502_05180312_marginals.nc", +] + +scripts = [script1, script2, script3, script4, script5] +targetdirs = ( + dict() +) # to be populated in setup_module with the target dirs for each step +__outdir = [] # to be populated in setup_module with the temp dir for all steps + + +def setup_module(): + """ + Set up the module for testing. + + This function makes a temporary directory with subdirectories with the required input data for each step. + + ```shell + $ tree -L 1 tests/tmpcpn_6tne + tests/tmpcpn_6tne + ├── step1 + ├── step2 + ├── step3 + ├── step4 + └── step5 + + 5 directories, 0 files + ``` + + When running in parallel using the xdist plugin, each worker will have its own copy of all the input data. This is necessary because the tests are run in parallel and the input data is modified by the tests. This way, the teardown function can delete the temporary directory for each worker without affecting the other workers. + """ + + homedir = Path(__file__).parent + input_data_dir = homedir / "testdata" + timestamp = datetime.now().strftime("%Y%m%d%H%M%S") + _outdir = mkdtemp(dir=homedir, suffix=timestamp) # temp dir for all steps + __outdir.append(_outdir) + + # make temp dirs for each step in _outdir + # the one for step1 with no input data + tmpstep1 = Path(_outdir) / "step1" + tmpstep1.mkdir() + script1.append(tmpstep1) + + # make temp dirs for steps 2,... with input data + for tarball in input_data_dir.glob("*.tar.gz"): + source_script, for_step_num = tarball.stem.split("-for-step-") + for_step_num = for_step_num.split(".")[0] + targetdir = Path(_outdir) / f"step{for_step_num}" + targetdirs[for_step_num] = targetdir + extract_tarball(targetdir, tarball) + + # Extracted files are in targetdir/script_name. Move them to its parent targetdir. Delete the script_name dir. + parent = targetdir / "work" + + # Rename and update parent to targetdir / script_name + new_parent = Path(targetdir, source_script) + parent.rename(new_parent) + + for child in new_parent.iterdir(): + if child.is_dir(): + shutil.move(str(child), Path(child.parent).parent) + shutil.rmtree(new_parent) + + # add the target dirs to the scripts + for i, script in enumerate(scripts[1:], start=2): + script.append(targetdirs[str(i)]) + + # throw in tmpstep1 in targetdirs to clean up later + targetdirs["1"] = tmpstep1 + + +def teardown_module(): + """ + Clean up after testing is complete. + """ + shutil.rmtree(__outdir[-1]) + + +def test_step1(): + # Step 1: B01_SL_load_single_file.py ~ 2 minutes + assert run_test(script1, paths1, delete_paths=False) # passing + + +def test_step2(): + # Step 2: B02_make_spectra_gFT.py ~ 2 min + assert run_test(script2, paths2) # passing + + +def check_B03_freq_reconst_x(): + outputdir = getoutputdir(script3) + directory = Path( + outputdir, "plots/SH/SH_testSLsinglefile2/SH_20190502_05180312B03_spectra/" + ) + files = get_all_filenames(directory) + + # Check there are 5 pdf files + return len([f for f in files if f.endswith("pdf")]) == 5 + + +def test_step3(): + # Step 3: B03_plot_spectra_ov.py ~ 11 sec + # This script has stochastic behavior, so the files produced don't always have the same names but the count of pdf files is constant for the test input data. + t1 = run_test(script3, paths3) + t2 = check_B03_freq_reconst_x() + assert t1 + assert t2 + + +def test_step4(): + # Step 4: A02c_IOWAGA_thredds_prior.py ~ 23 sec + t1 = run_test(script4, paths4) + t2 = check_file_exists(dir4, prefix4) + assert t1 + assert t2 + + +# TODO: step 5 after first merge of PR +# def test_step5(): +# # Step 5: B04_angle.py ~ 9 min +# assert run_test(script5, paths5) + +if __name__ == "__main__": + setup_module() + # test_step1() # passing + # test_step2() # passing + test_step3() # passing + test_step4() # passing + # test_step5() + teardown_module() diff --git a/tests/testdata/A02c_IOWAGA_thredds_prior-for-step-5.tar.gz b/tests/testdata/A02c_IOWAGA_thredds_prior-for-step-5.tar.gz new file mode 100644 index 00000000..02358666 --- /dev/null +++ b/tests/testdata/A02c_IOWAGA_thredds_prior-for-step-5.tar.gz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:025d7d31692273fec660fe9402e34dc62ba2ded613e19616dda7c8e87b4bcef1 +size 23512596 diff --git a/tests/testdata/B01_SL_load_single_file-for-step-2.tar.gz b/tests/testdata/B01_SL_load_single_file-for-step-2.tar.gz new file mode 100644 index 00000000..b6b48cce --- /dev/null +++ b/tests/testdata/B01_SL_load_single_file-for-step-2.tar.gz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dac3b06877b9301191d5eeb30eaad69ebc4cde26d8f96da1ed3c99e41d0a3b0e +size 11682469 diff --git a/tests/testdata/B02_make_spectra_gFT-for-step-3.tar.gz b/tests/testdata/B02_make_spectra_gFT-for-step-3.tar.gz new file mode 100644 index 00000000..6b5041f4 --- /dev/null +++ b/tests/testdata/B02_make_spectra_gFT-for-step-3.tar.gz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:327110408aea28e61a691a7e7e5df0db13f8e73801c7ac9ddec118c27e66e0e3 +size 22822613 diff --git a/tests/testdata/B03_plot_spectra_ov-for-step-4.tar.gz b/tests/testdata/B03_plot_spectra_ov-for-step-4.tar.gz new file mode 100644 index 00000000..a168e8e1 --- /dev/null +++ b/tests/testdata/B03_plot_spectra_ov-for-step-4.tar.gz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1ef492d4b31f5ef22e33fc6831d255bbfdfac70ce504f4f09a031c8e7a041fde +size 23456276 diff --git a/tests/work.tar.gz b/tests/work.tar.gz new file mode 100644 index 00000000..b41173ff --- /dev/null +++ b/tests/work.tar.gz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e5bd795d8f3bf949f43ee4b13b2682cd3430e51ce0e6324951eef03704d8e2df +size 27797713