From 45cf1f4659a8fc3ca32e40ea4d84404a24b8e467 Mon Sep 17 00:00:00 2001 From: Carlos Paniagua Date: Mon, 4 Mar 2024 14:08:36 -0500 Subject: [PATCH] chore: delete analyis_publish/ from root Closes #149 --- analyis_publish/PB01_regrid_filter.py | 607 ----------- .../PB03_plot_reconstruction_vid.py | 377 ------- analyis_publish/PB03_plot_spectra_3x3_ov.py | 333 ------ analyis_publish/PB03_plot_spectra_case_v1.py | 412 -------- analyis_publish/PB03_plot_spectra_case_v2.py | 374 ------- analyis_publish/PB04_angle.py | 930 ----------------- analyis_publish/PB04_angle_paper.py | 964 ------------------ analyis_publish/PB04_angle_priors.py | 560 ---------- analyis_publish/PB05_define_angle.py | 503 --------- analyis_publish/PB05_define_angle_supl.py | 453 -------- analyis_publish/PB06_plot_reconstruction.py | 495 --------- .../PB06_plot_reconstruction_simple.py | 496 --------- analyis_publish/PB07_plot_N_and_data.py | 331 ------ .../SB02_directional_distortion.py | 140 --- .../SB05_2d_wavefield_emulator_puplish.py | 375 ------- 15 files changed, 7350 deletions(-) delete mode 100644 analyis_publish/PB01_regrid_filter.py delete mode 100644 analyis_publish/PB03_plot_reconstruction_vid.py delete mode 100644 analyis_publish/PB03_plot_spectra_3x3_ov.py delete mode 100644 analyis_publish/PB03_plot_spectra_case_v1.py delete mode 100644 analyis_publish/PB03_plot_spectra_case_v2.py delete mode 100644 analyis_publish/PB04_angle.py delete mode 100644 analyis_publish/PB04_angle_paper.py delete mode 100644 analyis_publish/PB04_angle_priors.py delete mode 100644 analyis_publish/PB05_define_angle.py delete mode 100644 analyis_publish/PB05_define_angle_supl.py delete mode 100644 analyis_publish/PB06_plot_reconstruction.py delete mode 100644 analyis_publish/PB06_plot_reconstruction_simple.py delete mode 100644 analyis_publish/PB07_plot_N_and_data.py delete mode 100644 analyis_publish/SB02_directional_distortion.py delete mode 100644 analyis_publish/SB05_2d_wavefield_emulator_puplish.py diff --git a/analyis_publish/PB01_regrid_filter.py b/analyis_publish/PB01_regrid_filter.py deleted file mode 100644 index d6d2cd54..00000000 --- a/analyis_publish/PB01_regrid_filter.py +++ /dev/null @@ -1,607 +0,0 @@ -# %% -import os, sys -#execfile(os.environ['PYTHONSTARTUP']) - -""" -This file open a ICEsat2 track applied filters and corections and returns smoothed photon heights on a regular grid in an .nc file. -This is python 3 -""" - -exec(open(os.environ['PYTHONSTARTUP']).read()) -exec(open(STARTUP_2021_IceSAT2).read()) - -#%matplotlib inline - -import ICEsat2_SI_tools.convert_GPS_time as cGPS -import h5py -import ICEsat2_SI_tools.io as io -import ICEsat2_SI_tools.spectral_estimates as spec - -import time -import imp -import copy -import spicke_remover -import datetime -import generalized_FT as gFT -from scipy.ndimage.measurements import label - -fig_sizes = mconfig['fig_sizes']['Cryosphere'] - - -#import s3fs -# %% -ID_name, batch_key, test_flag = io.init_from_input(sys.argv) # loads standard experiment -#ID_name, batch_key, test_flag = '20190605061807_10380310_004_01', 'SH_batch01', False -#ID_name, batch_key, test_flag = '20190601094826_09790312_004_01', 'SH_batch01', False -#ID_name, batch_key, test_flag = '20190207111114_06260210_004_01', 'SH_batch02', False -#ID_name, batch_key, test_flag = '20190208152826_06440210_004_01', 'SH_batch01', False -#ID_name, batch_key, test_flag = '20190213133330_07190212_004_01', 'SH_batch02', False -#ID_name, batch_key, test_flag = '20190210143705_06740210_004_01', 'SH_batch02', False - -#ID_name, batch_key, test_flag = '20190215184558_07530210_004_01', 'SH_batch02', False - -# local best case: -#ID_name, batch_key, test_flag = '20190219073735_08070210_004_01', 'SH_batch02', False - - -# ID_name, batch_key, test_flag = 'SH_20190219_08070212', 'SH_publish', True -# ID_name, batch_key, test_flag = 'SH_20190502_05160312', 'SH_publish', True -# ID_name, batch_key, test_flag = 'SH_20190502_05180312', 'SH_publish', True - -ID_name, batch_key, ID_flag = 'SH_20190224_08800210', 'SH_publish', True - -ID, _, hemis, batch = io.init_data(ID_name, batch_key, ID_flag, mconfig['paths']['work'], ) - -#print(ID_name, batch_key, test_flag) -#ID_name= '20190605061807_10380310_004_01' -ATlevel= 'ATL03' - - - -load_path_scratch = mconfig['paths']['scratch'] +'/'+ batch_key +'/' -load_path_work = mconfig['paths']['work'] +'/'+ batch_key +'/' - -#load_file = 'A01c_'+ATlevel+'_'+ID_name -#load_file_str = load_path + load_file+'.h5' - -# load_path = mconfig['paths']['work'] +'/B01_regrid_'+hemis+'/' -# load_file = load_path + 'processed_' + ATlevel + '_' + ID_name + '.h5' - -plot_path = mconfig['paths']['plot'] + '/'+hemis+'/'+batch_key+'/publish/' + ID_name + '/' -MT.mkdirs_r(plot_path) -# %% - -all_beams = mconfig['beams']['all_beams'] -high_beams = mconfig['beams']['high_beams'] -low_beams = mconfig['beams']['low_beams'] - -#B00 = io.getATL03_beam(load_path_scratch +'/'+ID['tracks']['ATL03'][0]+ '.h5') - -#B00_hdf5 = h5py.File(load_path_scratch +'/A01c_ATL03_'+ID_name+ '_corrected.h5', 'r') - -B0_hdf5 = h5py.File(load_path_scratch +'/A01c_ATL03_'+ID_name+ '_corrected.h5', 'r') -B2_hdf5 = h5py.File(load_path_work +'B01_regrid'+'/'+ID_name + '_B01_regridded.h5', 'r') -B3_hdf5 = h5py.File(load_path_work +'B01_regrid'+'/'+ID_name + '_B01_binned.h5', 'r') - -B0, B2, B3 = dict(), dict(), dict() -for b in all_beams: - B0[b] = io.get_beam_hdf_store(B0_hdf5[b]) - B2[b] = io.get_beam_hdf_store(B2_hdf5[b]) - B3[b] = io.get_beam_hdf_store(B3_hdf5[b]) - -B0_hdf5.close(), B2_hdf5.close(), B2_hdf5.close() - -# B0 = io.load_pandas_table_dict(ID_name + '_B01_corrected' , load_path) -#B1 = io.load_pandas_table_dict(ID_name + '_B01_new_coords' , load_path) -#B2 = io.load_pandas_table_dict(ID_name + '_B01_regridded', load_path) # rhis is the rar photon data -#B3 = io.load_pandas_table_dict(ID_name + '_B01_binned' , load_path) # - - -load_file_ATL10 = 'processed_' + 'ATL07-02' + '_20190219063727_08070201_005_01.h5' - -# %% - -B07, B07_c = dict(), dict() -for k in all_beams: - B07[k] = io.getATL07_beam(load_path_scratch + ID['tracks']['ATL07']+'.h5', beam= k) - #B07_c[k] = io.getATL07_height_corrections(load_path_scratch + load_file_ATL10, beam= k) - - -dist_list = np.array([np.nan, np.nan]) -for I in B2.values(): - #B2[k]['dist'] = B2[k]['x'] - dist_list = np.vstack([dist_list, [I['x'].min(), I['x'].max()] ] ) - -track_dist_bounds = [ np.nanmin(dist_list[:, 0], 0) , np.nanmax(dist_list[:, 1], 0) ] - - - -# %% -xscale= 1e3 -F= M.figure_axis_xy(fig_sizes['two_column'][0], fig_sizes['two_column'][1]* 0.8, view_scale= 0.6) - -# for k,I in B2.items(): -# dist= I['x'] - track_dist_bounds[0] -# plt.plot( dist/xscale , I['across_track_distance']/xscale , '.', color= col.black , alpha = 0.7, markersize = 0.3) - - -ATL07min, ATL07max = list(),list() -for k,I in B2.items(): - dist= I['x'] - track_dist_bounds[0] - plt.plot( dist/xscale , I['across_track_distance']/xscale , '.' , color = col.rels[k] , markersize = 0.1) - I2= B07[k] - I2['dist'] = np.interp(I2['time']['delta_time'], I['delta_time'][::-1] , dist[::-1]) - I2['across_track_distance'] = np.interp(I2['time']['delta_time'], I['delta_time'][::-1] , I['across_track_distance'][::-1]) - plt.plot( I2['dist']/xscale , I2['across_track_distance']/xscale , '.' , color = 'black' , markersize = 0.2, alpha =0.3) - - ATL07min.append(I2['dist'].min()), ATL07max.append(I2['dist'].max()) - -F.ax.axvline(np.array(ATL07min).min()/xscale , color='gray', zorder= 2, linewidth = 0.8) -F.ax.axvline(np.array(ATL07max).max()/xscale , color='gray', zorder= 2, linewidth = 0.8) -F.ax.axvspan(np.array(ATL07min).min()/xscale, np.array(ATL07max).max()/xscale, color='gray', zorder= 0, alpha= 0.4) - -#TT = Ii[[ 'lats', 'lons'] ] -def pstr(TT): - return str(np.round(TT['lons'], 3)) + '$^{\circ}$E\n' + str(np.round(TT['lats'], 3)) + '$^{\circ}$N' - -#print(pstr(TT)) - -for k in high_beams: - - Ii = B2[k].iloc[0] - dist= track_dist_bounds[1] - track_dist_bounds[0] - plt.text(dist/xscale+ 5, Ii.across_track_distance/xscale , pstr(Ii[[ 'lats', 'lons'] ]), ha ='left', va ='center' ) - - Ii = B2[k].iloc[-1] - dist= Ii.x - track_dist_bounds[0] - plt.text(0- 8, Ii.across_track_distance/xscale , pstr(Ii[[ 'lats', 'lons'] ]), ha ='right', va ='center' ) - -plt.text(np.mean([np.array(ATL07min).min()/xscale,np.array(ATL07max).max()/xscale]) - 8, 1.5 +(B2['gt2l']['across_track_distance'].mean()/xscale) , 'ATL07', ha ='center', va ='center', fontsize=12 ) - - -F.ax.axvline(0/xscale , color='black', zorder= 2, linewidth = 0.8) -F.ax.axvline((track_dist_bounds[1] - track_dist_bounds[0])/xscale , color='gray', zorder= 2, linewidth = 0.8) -F.ax.axhline(0, color='black', zorder= 2, linewidth = 0.8) - -plt.text(0-5, 0+0.2 , 'origin', horizontalalignment ='right', zorder= 12 ) -plt.plot(0, 0, '.', color = 'black', markersize= 10 , zorder= 12) - - -plt.xlim(-200, (track_dist_bounds[1] - track_dist_bounds[0])/xscale +50 ) -#plt.ylim(-3, 5 ) - -plt.title('Beams in the along-track coordinate system\n' + io.ID_to_str(ID_name), loc='left') -plt.xlabel('along track distance x (km)') -plt.ylabel('across track distance y (km)') - -#F.save_pup(path= plot_path , name='B01_ALT03_'+ID_name+'_regridded_tracks') -F.save_light(path= plot_path, name='B01_ALT03_'+ID_name+'_regridded_tracks') - -# %% - -key = 'gt2r' -#lead_color= col.rels[key] -lead_color= col.rels['group2'] -ALT07_color= col.rels['aug1'] -MT.mkdirs_r(plot_path) -T2 = B2[key].copy() -T2['dist'] = T2['x'] - track_dist_bounds[0] -T3 = B3[key].copy() - -T07 = B07[key] - -# # %% -T07['dist'] = np.interp(T07['time']['delta_time'], T3['delta_time'] , T3['dist']) - -# T07.T -# T3.T -# -#font_for_pres() -# -# plt.plot(T07['time']['delta_time'] , T07['ref']['longitude'], '.b', markersize= 2) -# plt.plot( T3['delta_time'], T3['lons'], '.', color='red', markersize= 6, zorder=0) -# -# # regrid T07 on T03 dist -# -# plt.plot(T2_large['lons'], T2_large['heights_c'], '.', color='gray', markersize= 0.8) -# plt.plot(T3_large['lons'], T3_large['heights_c_weighted_mean'], '.r', markersize= 1) -# plt.plot(T07['ref']['longitude'],T07['heights']['height_segment_height'], '.b', markersize= 0.9) -# plt.xlim(T3_large['lons'].min(), T3_large['lons'].max()) -# #plt.ylim(T3_large['lats'].min(), T3_large['lats'].max()) -# -# -# - -x_key= 'dist' -latlims = (T3['dist'].iloc[0] , T3['dist'].iloc[-1] ) -dl = 2500 -#chunk_list = np.arange(latlims[0],latlims[1], dl ) -#chunk_list = sample( list(np.arange(latlims[0],latlims[1],dl )[0:80]) ,10) -chunk_list = np.arange( latlims[0], latlims[1], dl )[::1] - - -#for ll in chunk_list: -font_for_print() -xscale=1e3 - -# %% -for chunk_list_i in np.arange(chunk_list.size)[0:1]: - - chunk_list_i= 252 - # for chunk_list_i in np.arange(chunk_list_i-5, chunk_list_i+5, 1): - fn = copy.copy(lstrings) - F = M.figure_axis_xy(fig_sizes['two_column'][0], fig_sizes['two_column'][1], view_scale=0.8, container =True) - - gs = GridSpec(3,8, wspace=0.1, hspace=0.7)#figure=fig, - - ax1 = F.fig.add_subplot(gs[0, :]) #plt.subplot(1, 6, fp) - #ax1.tick_params(labelbottom=False) - ll_large = chunk_list[chunk_list_i]+2000 - tt_large = ll_large + 12000 - - T2_large = T2[ (T2['dist'] > ll_large) & (T2['dist'] < tt_large) ] - T3_large = T3[ (T3['dist'] > ll_large) & (T3['dist'] < tt_large) ] - T07_large= T07[ (T07['dist'] > ll_large) & (T07['dist'] < tt_large) ] - - - plt.plot( T2_large[x_key]/xscale, T2_large['heights_c'], 'k.', markersize= 0.5, alpha =0.8 ) - plt.plot( T3_large[x_key]/xscale, T3_large['heights_c_weighted_mean'] , '.', color=lead_color, linewidth=0.5, markersize=1,alpha=0.9, label='x-gauss weighted mean') - - - grad_y_offset = -1.25 - uncertainty_y_offset =-1 - AT07_bool_offset = -0.75 - AT07_cat_offset = 0 - - - col.colormaps2(5) - #set(T07_large['heights']['height_segment_type']) - - htype_cmap = [col.orange, col.cascade3, col.cascade2, col.cascade1] - htype_list = ['cloud_covered','other', 'specular_lead_low_w_bkg', 'specular_lead_low','specular_lead_high_w_bkg', 'specular_lead_high', 'dark_lead_smooth_w_bkg', 'dark_lead_smooth', 'dark_lead_rough_w_bkg' ,'dark_lead_rough', 'off_pointing'] - - htype_list= ['cloud_covered','other', 'specular_lead', 'dark_lead' , 'other'] - htype_list - for htype, hcolor, htype_str in zip( [0, 1, (2, 5), (6, 9)] , htype_cmap , htype_list ): - - if type(htype) is tuple: - imask = (T07_large['heights']['height_segment_type'] >= htype[0]) & (T07_large['heights']['height_segment_type'] <= htype[1]) - else: - imask = T07_large['heights']['height_segment_type'] == htype - pdata = T07_large[imask] - plt.plot( pdata[x_key]/xscale, pdata['heights']['height_segment_height'] + AT07_cat_offset, '.', color =hcolor, markersize=0.8,alpha=0.9, label='ATL07 height | ' +htype_str) - - for htype, hcolor, htype_str, hsize in zip( [0, 1] , [col.gridcolor, col.red] , ['sea ice', 'ssh'] , [1, 5]): - - pdata = T07_large[T07_large['heights']['height_segment_ssh_flag'] == htype] - plt.plot( pdata[x_key]/xscale, pdata['heights']['height_segment_height']*0+AT07_bool_offset, '.', color =hcolor, markersize=0.8,alpha=0.9, label='ATL07 height | ' +htype_str) - - #plt.xlabel('Meters from the Sea Ice Edge') - plt.ylabel('Photon height (m)') - - ax2 = F.fig.add_subplot(gs[1:, 0:6]) #plt.subplot(1, 6, fp) - #ax1.tick_params(labelbottom=False) - - ll = chunk_list[chunk_list_i]+ 4000 - tt = ll+ 3000 - - T2_small = T2[ (T2['dist'] > ll) & (T2['dist'] < tt) ] - T3_small = T3[ (T3['dist'] > ll) & (T3['dist'] < tt) ] - T07_small= T07[ (T07['dist'] > ll) & (T07['dist'] < tt) ] - - plt.plot( T2_small[x_key]/xscale, T2_small['heights_c'], 'k.', markersize= 0.5, alpha =0.8 , label='ATL03 photon heights' ) - plt.plot( T3_small[x_key]/xscale, T3_small['heights_c_weighted_mean'] , '.-', color=lead_color, linewidth=0.5, markersize=2,alpha=0.9, label='Gaussian-weighted mean') - - for htype, hcolor, htype_str in zip( [0, 1, (2, 5), (6, 9), -1] , htype_cmap , htype_list ): - - if type(htype) is tuple: - imask = (T07_small['heights']['height_segment_type'] >= htype[0]) & (T07_small['heights']['height_segment_type'] <= htype[1]) - else: - imask = T07_small['heights']['height_segment_type'] == htype - - pdata = T07_small[ imask ] - plt.plot( pdata[x_key]/xscale, pdata['heights']['height_segment_height'] +AT07_cat_offset , '.', color =hcolor, markersize=1.7,alpha=1, label= str(htype) +' '+ htype_str) - - #plt.plot( T07_small[x_key]/xscale, T07_small['heights']['height_segment_height'] +AT07_cat_offset+0.1 , '.', color ='green', markersize=1.7,alpha=1, label= str(htype) +' '+ htype_str) - - for htype, hcolor, htype_str, hsize in zip( [0, 1] , [col.gridcolor, col.red] , ['sea ice', 'ssh'] , [1.5, 5]): - - pdata = T07_small[T07_small['heights']['height_segment_ssh_flag'] == htype] - plt.plot( pdata[x_key]/xscale, pdata['heights']['height_segment_height']*0+AT07_bool_offset, '.', color =hcolor, markersize=hsize, alpha=1, label= 'ATL07 ' + htype_str ) - - - - #plt.plot(T3_small[x_key], T3_small['heights_c_std'] - 1.8, 'k-', linewidth=0.5,alpha=1) - #uncertainty_y_offset - - - box_lims= T3_small['dist'].min()+30, T3_small['dist'].max() - ax1.axvspan(box_lims[0]/xscale, box_lims[-1]/xscale , color =col.gridcolor, alpha = 0.4, zorder= 0) - ax2.axvspan(box_lims[0]/xscale, box_lims[-1]/xscale ,color =col.gridcolor, alpha = 0.4, zorder= 0) - - #ax2.set_xlim( T3_small['dist'].min(), T3_small['dist'].max() ) - - - hkey = 'heights_c_weighted_mean' - x = T3['dist'] - #xlims = x.iloc[0], x.iloc[-1] - dd = np.copy(T3[hkey]) - - - dd_error = np.copy(T3['heights_c_std']) - dd_error[np.isnan(dd_error)] = 100 - #plt.hist(1/dd_weight, bins=40) - #plt.plot(x, dd, 'gray', label='displacement (m) ') - - # compute slope spectra !! - dd = np.gradient(dd) - dd, _ = spicke_remover.spicke_remover(dd, spreed=10, verbose=False) - dd_nans = (np.isnan(dd) ) + (T3['N_photos'] <= 5) - - dd_filled = np.copy(dd) - dd_filled[dd_nans] = 0 - #win = create_weighted_window(dd_filled) - - # using gappy data - dd_no_nans = dd[~dd_nans] # windowing is applied here - x_no_nans = x[~dd_nans] - dd_error_no_nans = dd_error[~dd_nans] - - # using gappy data - T2_grad_x = x[ (x > ll_large) & (x< tt_large) ] - T2_grad_dd = dd[ (x > ll_large) & (x< tt_large) ] - - T3_grad_x = x_no_nans[ (x_no_nans > ll) & (x_no_nans< tt) ] - T3_grad_dd = dd_no_nans[ (x_no_nans > ll) & (x_no_nans< tt) ] - - T3_grad_x = x[ (x > ll) & (x< tt) ] - T3_grad_dd = dd[ (x > ll) & (x< tt) ] - - - #T3_grad_dd[dd_nans] - ax1.plot(T2_grad_x/xscale, T2_grad_dd +grad_y_offset, '-', color= col.cascade1, linewidth = 0.6, label='slope data (m/m)') - ax1.fill_between( T3_grad_x/xscale, T3_grad_dd +grad_y_offset, y2=grad_y_offset, color=col.cascade3,alpha=0.4) - ax1.axhline(grad_y_offset, linewidth= 0.5 , color = 'black') - - ax2.plot( T3_grad_x/xscale, T3_grad_dd +grad_y_offset, '-', color= col.cascade1, linewidth = 0.6, label='slope data (m/m)') - ax2.fill_between( T3_grad_x/xscale, T3_grad_dd+ grad_y_offset, y2=grad_y_offset, color=col.cascade3,alpha=0.5) - ax2.axhline(grad_y_offset, linewidth= 0.5 , color = 'black') - - - #plt.fill_between( T3_large[x_key]/xscale, uncertainty_y_offset+ T3_large['heights_c_std']/2 , y2=uncertainty_y_offset -T3_large['heights_c_std']/2, color=col.cascade2,alpha=1) - #ax1.axhline(uncertainty_y_offset, linewidth= 0.5 , color = 'black') - dx = np.median(np.diff(T3_small['dist'])) - #sum(np.isnan(T3_small['heights_c_std'])) - plt.fill_between( T3_small[x_key]/xscale, uncertainty_y_offset + T3_small['heights_c_std'] /dx , y2=uncertainty_y_offset-T3_small['heights_c_std']/dx, color=col.cascade2,alpha=1, label='uncertrainty') - ax2.axhline(uncertainty_y_offset, linewidth= 0.5 , color = 'black') - - ax1.spines['bottom'].set_visible(False) - ax1.spines['top'].set_visible(True) - ax1.spines['top'].set_linewidth(0.2) - - ax1.xaxis.set_ticks_position('top') - ax1.xaxis.set_label_position('top') - ax1.set_xlabel('Along track distance x (km)') - - xlims_large = T2_large['dist'].min()/xscale, T2_large['dist'].max()/xscale - ax1.set_xlim(xlims_large[0], xlims_large[1] ) - - ax1.spines['left'].set_position(('outward', 10)) - y_ticks = MT.tick_formatter( np.arange(-2, 1.4, 1), interval=1, rounder=1, expt_flag=False, shift=0 ) - ax1.set_yticks(y_ticks[1]) - ax1.set_yticklabels(y_ticks[0]) - - #ax1.xaxis.label.set_color(col.gray) - ax1.tick_params(axis='both', colors=col.gray) - ax1.spines['left'].set_color(col.gray) - - - x_ticks = MT.tick_formatter( np.arange(np.round(xlims_large[0]) , np.round(xlims_large[1]), 1), interval=2, rounder=1, expt_flag=False, shift=1 ) - ax1.set_xticks(x_ticks[1]) - ax1.set_xticklabels(x_ticks[0]) - - - ax2.spines['bottom'].set_visible(False) - ax2.spines['top'].set_visible(True) - ax2.spines['top'].set_linewidth(0.2) - ax2.xaxis.set_ticks_position('top') - ax2.xaxis.set_label_position('top') - - ax2.spines['left'].set_position(('outward', 10)) - - - plt.ylabel('Photon height (m)') - - - ax2.set_yticks(y_ticks[1]) - ax2.set_yticklabels(y_ticks[0]) - - x_ticks = MT.tick_formatter( np.arange(np.round(xlims_large[0]) , np.round(xlims_large[1]), 1), interval=1, rounder=1, expt_flag=False, shift=1 ) - - ax2.set_xticks(x_ticks[1]) - ax2.set_xticklabels(x_ticks[0]) - ax2.set_xlim( box_lims[0]/xscale, box_lims[-1]/xscale ) - - ax2.tick_params(axis='both', colors=col.gray) - ax2.spines['left'].set_color(col.gray) - ax2.set_xlabel('Along track distance x (km)') - - ax2.legend(loc='center left', bbox_to_anchor=(1, 0.5)) - ax1.set_title(next(fn), loc= 'left', y =1.2) - ax2.set_title(next(fn), loc= 'left', y =1.1) - plt.suptitle('Example Photon Heights in the Marginal Ice Zone\n' + io.ID_to_str(ID_name), x=0.125, y = 1.03, ha='left') - - F.save_light(path= plot_path, name='B01_ALT03_signal_process_'+ID_name +'_'+ str(chunk_list_i)) - F.save_pup(path= plot_path, name='B01_ALT03_signal_process_'+ID_name) - - - -# %% - -# Introductionary figure -font_for_print() -F = M.figure_axis_xy(8, 1.1, view_scale=0.8, container =True) - -gs = GridSpec(3,1, wspace=0.1, hspace=0.7)#figure=fig, -ax1 = F.fig.add_subplot(gs[0:2, :]) #plt.subplot(1, 6, fp) -#ax1 = F.ax - -key = 'gt2r' -T2 = B2[key].copy() -T2['dist'] = T2['x'] - track_dist_bounds[0] -T2_large = T2[ (T2['dist'] > ll_large) & (T2['dist'] < tt_large) ] - - -#plt.title('Beam ' + str(key), loc='left') -#, s= 1, marker='o', color='black', alpha =0.02, edgecolors= 'none' ) -plt.plot( T2_large[x_key]/xscale, T2_large['heights_c'], 'o', color= col.rels[key], markersize= 0.4, alpha =0.04 ) -#plt.plot( T3_large[x_key]/xscale, T3_large['heights_c_weighted_mean'] , '.', color=lead_color, linewidth=0.5, markersize=1,alpha=0.9, label='x-gauss weighted mean +1') - -y_ticks = MT.tick_formatter( np.arange(0, 2+1, 1), interval=1, rounder=1, expt_flag=False, shift=0 ) -ax1.spines['bottom'].set_visible(False) -# ax1.spines['top'].set_visible(True) -ax1.spines['bottom'].set_linewidth(0.5) -ax1.spines['left'].set_linewidth(0.5) -ax1.xaxis.set_ticks_position('bottom') -ax1.xaxis.set_label_position('bottom') -ax1.tick_params(bottom=False, labelbottom= False) -#ax1.axhline(0, linewidth= 0.5 , color = 'black') -# ax1.set_yticks(y_ticks[1]) -# ax1.set_yticklabels(y_ticks[0]) -ax1.set_ylim(0, 2) -ax1.set_xlim(xlims_large[0], xlims_large[1] ) -plt.ylabel('meters') - - -ax1 = F.fig.add_subplot(gs[1:3, :]) #plt.subplot(1, 6, fp) - -key = 'gt2l' -T2 = B2[key].copy() -T2['dist'] = T2['x'] - track_dist_bounds[0] -T2_large = T2[ (T2['dist'] > ll_large) & (T2['dist'] < tt_large) ] - -#plt.title('Beam ' + str(key), loc='left') -#, s= 1, marker='o', color='black', alpha =0.02, edgecolors= 'none' ) -plt.plot( T2_large[x_key]/xscale, T2_large['heights_c'], 'o', color= col.rels[key], markersize= 0.4, alpha =0.04 ) -#plt.plot( T3_large[x_key]/xscale, T3_large['heights_c_weighted_mean'] , '.', color=lead_color, linewidth=0.5, markersize=1,alpha=0.9, label='x-gauss weighted mean +1') - - -xlims_large = T2_large['dist'].min()/xscale, T2_large['dist'].max()/xscale -ax1.set_xlim(xlims_large[0], xlims_large[1] ) -#plt.xlim(T2_large[x_key][0]/xscale, T2_large[x_key][-1]/xscale) -#plt.xlabel('Meters from the Sea Ice Edge') -ax1.set_xlabel('Along track distance x (km)') -#ax2.xaxis.set_ticks_position('top') -#ax2.xaxis.set_label_position('top') - -ax1.set_facecolor((1.0, 1.00, 1.00, 0)) -ax1.spines['bottom'].set_visible(False) -ax1.spines['right'].set_visible(True) -ax1.spines['left'].set_visible(False) -ax1.spines['bottom'].set_linewidth(0.5) -ax1.spines['right'].set_linewidth(0.5) -ax1.xaxis.set_ticks_position('bottom') -ax1.xaxis.set_label_position('bottom') -ax1.tick_params(bottom=False, right = True, left = False, labelright= True, labelleft= False, labelbottom= True) -#ax1.axhline(0, linewidth= 0.5 , color = 'black') -ax1.set_yticks(y_ticks[1]) -ax1.set_yticklabels(y_ticks[0]) -ax1.set_ylim(-2, 2) -# plt.ylabel('meters') - -F.save_light(path= plot_path, name='B01_ALT03_intro_'+ID_name) -F.save_pup(path= plot_path, name='B01_ALT03_intro_'+ID_name) - - -# %% -# GFT alternation figure - -dl = 25e3 -chunk_list = np.arange( np.floor(latlims[0]/1e3/10) *1e4 , latlims[1], dl )[::1] - -chunk_list_i= 1 -ll_large = chunk_list[chunk_list_i]#+2000 -tt_large = ll_large + 25e3 *3 - - -font_for_print() - -F = M.figure_axis_xy(fig_sizes['two_column'][0], fig_sizes['two_column'][1]*0.25, view_scale=0.8, container =True) - -gs = GridSpec(3,1, wspace=0.1, hspace=0.7)#figure=fig, -ax1 = F.fig.add_subplot(gs[0:2, :]) #plt.subplot(1, 6, fp) -#ax1 = F.ax - -key = 'gt2l' -T2 = B2[key].copy() -T2['dist'] = T2['x'] - track_dist_bounds[0] -T2_large = T2[ (T2['dist'] > ll_large) & (T2['dist'] < tt_large) ] -xlims_large = T2_large['dist'].min()/xscale, T2_large['dist'].max()/xscale - - -#plt.title('Beam ' + str(key), loc='left') -#, s= 1, marker='o', color='black', alpha =0.02, edgecolors= 'none' ) -plt.plot( T2_large[x_key]/xscale, T2_large['heights_c'], 'o', color= col.rels[key], markersize= 0.4, alpha =0.04 ) -#plt.plot( T3_large[x_key]/xscale, T3_large['heights_c_weighted_mean'] , '.', color=lead_color, linewidth=0.5, markersize=1,alpha=0.9, label='x-gauss weighted mean +1') - -y_ticks = MT.tick_formatter( np.arange(0, 2+1, 1), interval=1, rounder=1, expt_flag=False, shift=0 ) -ax1.spines['bottom'].set_visible(False) -# ax1.spines['top'].set_visible(True) -ax1.spines['bottom'].set_linewidth(0.5) -ax1.spines['left'].set_linewidth(0.5) -ax1.xaxis.set_ticks_position('bottom') -ax1.xaxis.set_label_position('bottom') -#ax1.tick_params(bottom=False, labelbottom= False) -#ax1.axhline(0, linewidth= 0.5 , color = 'black') -# ax1.set_yticks(y_ticks[1]) -# ax1.set_yticklabels(y_ticks[0]) -ax1.set_ylim(-2, 2) -ax1.set_xlim(xlims_large[0], xlims_large[1] ) -plt.ylabel('meters') - -#plt.grid('horizontal') - -x_ticks = MT.tick_formatter( np.arange(ll_large/1e3, tt_large/1e3, 12.5) , interval=1, rounder=1, expt_flag=False, shift=0 ) -ax1.set_xticks(x_ticks[1]) -ax1.set_xticklabels(x_ticks[0]) -ax1.axhline(0, linewidth= 0.5 , color = 'black') -ax1.grid(which='major', axis='x', linestyle='-', color='black', linewidth= 0.5) - -# ax1 = F.fig.add_subplot(gs[1:3, :]) #plt.subplot(1, 6, fp) -# -# key = 'gt2l' -# T2 = B2[key].copy() -# T2['dist'] = T2['x'] - track_dist_bounds[0] -# T2_large = T2[ (T2['dist'] > ll_large) & (T2['dist'] < tt_large) ] -# -# #plt.title('Beam ' + str(key), loc='left') -# #, s= 1, marker='o', color='black', alpha =0.02, edgecolors= 'none' ) -# plt.plot( T2_large[x_key]/xscale, T2_large['heights_c'], 'o', color= col.rels[key], markersize= 0.4, alpha =0.04 ) -# #plt.plot( T3_large[x_key]/xscale, T3_large['heights_c_weighted_mean'] , '.', color=lead_color, linewidth=0.5, markersize=1,alpha=0.9, label='x-gauss weighted mean +1') - - -# xlims_large = T2_large['dist'].min()/xscale, T2_large['dist'].max()/xscale -#ax1.set_xlim(xlims_large[0], xlims_large[1] ) -#plt.xlim(T2_large[x_key][0]/xscale, T2_large[x_key][-1]/xscale) -#plt.xlabel('Meters from the Sea Ice Edge') -ax1.set_xlabel('Along track distance x (km)') -#ax2.xaxis.set_ticks_position('top') -#ax2.xaxis.set_label_position('top') - -# ax1.set_facecolor((1.0, 1.00, 1.00, 0)) -# ax1.spines['bottom'].set_visible(False) -# ax1.spines['right'].set_visible(True) -# ax1.spines['left'].set_visible(False) -# ax1.spines['bottom'].set_linewidth(0.5) -# ax1.spines['right'].set_linewidth(0.5) -# ax1.xaxis.set_ticks_position('bottom') -# ax1.xaxis.set_label_position('bottom') -#ax1.tick_params(bottom=False, right = True, left = False, labelright= True, labelleft= False, labelbottom= True) -#ax1.axhline(0, linewidth= 0.5 , color = 'black') -#ax1.set_yticks(y_ticks[1]) -#ax1.set_yticklabels(y_ticks[0]) -#ax1.set_ylim(-2, 2) -# plt.ylabel('meters') - -F.save_light(path= plot_path, name='B01_ALT03_GFT_alter_'+ID_name) -F.save_pup(path= plot_path, name='B01_ALT03_GFT_alter_'+ID_name) diff --git a/analyis_publish/PB03_plot_reconstruction_vid.py b/analyis_publish/PB03_plot_reconstruction_vid.py deleted file mode 100644 index e3f09f18..00000000 --- a/analyis_publish/PB03_plot_reconstruction_vid.py +++ /dev/null @@ -1,377 +0,0 @@ - -import os, sys -#execfile(os.environ['PYTHONSTARTUP']) - -""" -This file open a ICEsat2 track applied filters and corections and returns smoothed photon heights on a regular grid in an .nc file. -This is python 3 -""" - -exec(open(os.environ['PYTHONSTARTUP']).read()) -exec(open(STARTUP_2021_IceSAT2).read()) - -#%matplotlib inline - -import ICEsat2_SI_tools.convert_GPS_time as cGPS -import h5py -import ICEsat2_SI_tools.io as io -import ICEsat2_SI_tools.spectral_estimates as spec - -import time -import imp -import copy -import spicke_remover -import datetime -import generalized_FT as gFT -from scipy.ndimage.measurements import label - -xr.set_options(display_style='text') -#import s3fs -# %% -ID_name, batch_key, ID_flag = io.init_from_input(sys.argv) # loads standard experiment -#ID_name, batch_key, ID_flag = '20190605061807_10380310_004_01', 'SH_batch01', False -#ID_name, batch_key, ID_flag = '20190601094826_09790312_004_01', 'SH_batch01', False -#ID_name, batch_key, ID_flag = '20190207111114_06260210_004_01', 'SH_batch02', False -#ID_name, batch_key, ID_flag = '20190208152826_06440210_004_01', 'SH_batch01', False -#ID_name, batch_key, ID_flag = '20190213133330_07190212_004_01', 'SH_batch02', False -#ID_name, batch_key, ID_flag = '20190207002436_06190212_004_01', 'SH_batch02', False -#ID_name, batch_key, ID_flag = '20190206022433_06050212_004_01', 'SH_batch02', False - - -#ID_name, batch_key, ID_flag = '20190215184558_07530210_004_01', 'SH_batch02', False -ID_name, batch_key, ID_flag = '20190219073735_08070210_004_01', 'SH_batch02', False -ID_name, batch_key, ID_flag = 'SH_20190219_08070210', 'SH_publish', True - - -ID, _, hemis, batch = io.init_data(ID_name, batch_key, ID_flag, mconfig['paths']['work'], ) -#print(ID_name, batch_key, ID_flag) -hemis, batch = batch_key.split('_') - -ATlevel= 'ATL03' - -load_path_scratch = mconfig['paths']['scratch'] +'/'+ batch_key +'/' -load_path_work = mconfig['paths']['work'] +'/'+ batch_key +'/' - - -#B0_hdf5 = h5py.File(load_path_scratch +'/A01c_ATL03_'+ID_name+ '_corrected.h5', 'r') -B2_hdf5 = h5py.File(load_path_work +'B01_regrid'+'/'+ID_name + '_B01_regridded.h5', 'r') -B3_hdf5 = h5py.File(load_path_work +'B01_regrid'+'/'+ID_name + '_B01_binned.h5', 'r') - -B0, B2, B3 = dict(), dict(), dict() -for b in all_beams: - #B0[b] = io.get_beam_hdf_store(B0_hdf5[b]) - B2[b] = io.get_beam_hdf_store(B2_hdf5[b]) - B3[b] = io.get_beam_hdf_store(B3_hdf5[b]) - -B2_hdf5.close(), B2_hdf5.close() - -load_path = mconfig['paths']['work']+ batch_key +'/B02_spectra/' -load_file = load_path + 'B02_' + ID_name #+ '.nc' -#MT.mkdirs_r(plot_path) - -Gk = xr.open_dataset(load_file+'_gFT_k.nc') -Gx = xr.open_dataset(load_file+'_gFT_x.nc') -Gfft = xr.open_dataset(load_file+'_FFT.nc') -# print(Gk) -# print(Gx) - -all_beams = mconfig['beams']['all_beams'] -high_beams = mconfig['beams']['high_beams'] -low_beams = mconfig['beams']['low_beams'] - -# %% check paths (again) -col.colormaps2(21) -col_dict= col.rels - -# define simple routines -def add_info(D, Dk, ylims): - eta = D.eta + D.x - N_per_stancil, ksize = Dk.N_per_stancil.data , Dk.k.size - plt.text(eta[0].data, ylims[-1], ' N='+numtostr(N_per_stancil) + ' N/2M= '+ fltostr(N_per_stancil/2/ksize, 1)) - -# Single views -def plot_data_eta(D, offset = 0 , **kargs ): - eta_1 = D.eta + D.x - y_data = D.y_model +offset - plt.plot(eta_1,y_data , **kargs) - return eta_1 - -def plot_model_eta(D, ax, offset = 0, **kargs ): - eta = D.eta + D.x - y_data = D.y_model+offset - plt.plot(eta ,y_data , **kargs) - - ax.axvline(eta[0].data, linewidth=0.1, color=kargs['color'], alpha=0.5) - ax.axvline(eta[-1].data, linewidth=0.1, color=kargs['color'], alpha=0.5) - -# %% -fltostr = MT.float_to_str -numtostr = MT.num_to_str -font_for_print() - -#for i in x_pos_sel[::2]: -#i =x_pos_sel[20] -#MT.mkdirs_r(plot_path+'B03_spectra/') - -x_pos_sel = np.arange(Gk.x.size)[~np.isnan(Gk.mean('beam').mean('k').gFT_PSD_data.data)] -x_pos_max = Gk.mean('beam').mean('k').gFT_PSD_data[~np.isnan(Gk.mean('beam').mean('k').gFT_PSD_data)].argmax().data -xpp = x_pos_sel[ [int(i) for i in np.round(np.linspace(0, x_pos_sel.size-1, 4))]] -xpp = np.insert(xpp, 0, x_pos_max) - -#for i in xpp[2:3]: -# %% -i = 5 -#i=6 - -k = all_beams[0] -#k = 'gt2l' - -plot_path = mconfig['paths']['plot'] + '/vids/'+batch_key+'/' + ID_name + '_'+k+'_x'+str(i)+'_B03/' -MT.mkdirs_r(plot_path) - -num_count=1 -k_list = np.concatenate([ np.arange(0.005, 0.14, 0.001)[::-1], np.arange(0.005, 0.14, 0.001) ]) -for k_thresh in k_list: - -# %% - print(num_count) - k_thresh = 0.12 * 1 - F = M.figure_axis_xy(5.5, 6.5, container =True, view_scale= 0.8) - - plt.suptitle('ALT03 Decomposition\nID: '+ ID_name, y = 0.93, x = 0.13, horizontalalignment ='left') - #Photon height reconstruction | x='+str(Gk.x[i].data)+' \n' + ID_name, y = 0.95) - gs = GridSpec(12+4,6, wspace=0, hspace=0.2)#figure=fig, - - ax0 = F.fig.add_subplot(gs[0:6, :]) - col_d = col.__dict__['rels'] - - dx = Gx.eta.diff('eta').mean().data - neven = True - offs = 0 - Gx_1 = Gx.isel(x= i).sel(beam = k) - Gk_1 = Gk.isel(x= i).sel(beam = k) - - dist_stencil = Gx_1.eta + Gx_1.x - dist_stencil_lims = dist_stencil[0].data, dist_stencil[-1].data - - # cutting Table data - mask_x_bin = ( (B3[k]['dist'] >= dist_stencil_lims[0]) & (B3[k]['dist'] <= dist_stencil_lims[1]) ) - T3_sel = B3[k].loc[mask_x_bin] - #T3_sel.shape - mask_x_true = (B2[k]['x_true'] >= T3_sel['x_true'].min()) & (B2[k]['x_true'] <= T3_sel['x_true'].max()) - T2_sel = B2[k].loc[mask_x_true] - - ### slope data - T3 = B3[k]#.loc[mask_x_bin] - dd = np.copy(T3['heights_c_weighted_mean']) - dd = np.gradient(dd) - dd, _ = spicke_remover.spicke_remover(dd, spreed=10, verbose=False) - dd_nans = (np.isnan(dd) ) + (T3['N_photos'] <= 5) - # dd_no_nans = dd[~dd_nans] # windowing is applied here - # x_no_nans = T3['dist'][~dd_nans] - dd[dd_nans] = np.nan# windowing is applied here - xx = T3['dist'] - xx[dd_nans] = np.nan - - #plt.plot( xx , dd, color=col.green,alpha=0.8, linewidth =0.3) - #B3[k]['dist'] - - #plot_model_eta(Gx_1, ax0, offset= offs, linestyle='-', color=col_d[k], linewidth=0.4, alpha=1, zorder=12 , label = 'GFT model') - # ylims= -np.nanstd(Gx_1.y_data)*3, np.nanstd(Gx_1.y_data)*3 - # #add_info(Gx_1, Gk_1 , ylims ) - - lead_color = col.cascade1#col_d[k] - - # oringial data - eta_1= plot_data_eta(Gx_1 , offset= offs , linestyle= '-', c=col.gray,linewidth=2, alpha =1, zorder=11, label = 'Mean photon height slope') - - # reconstruct slope model - # introduce frequency filter: - gFT_cos_coeff_sel = np.copy(Gk_1.gFT_cos_coeff) - gFT_sin_coeff_sel = np.copy(Gk_1.gFT_sin_coeff) - gFT_cos_coeff_sel[Gk_1.k > k_thresh] = 0 - gFT_sin_coeff_sel[Gk_1.k > k_thresh] = 0 - - - FT = gFT.generalized_Fourier(Gx_1.eta + Gx_1.x, None,Gk_1.k ) - _ = FT.get_H() - FT.b_hat=np.concatenate([ gFT_cos_coeff_sel, gFT_sin_coeff_sel ]) - plt.plot(Gx_1.eta + Gx_1.x, FT.model()+offs ,'-', c=lead_color, linewidth=0.5, alpha=1,zorder= 12, label = 'GFT slope model') - plt.legend(loc=1) - - ax1 = F.fig.add_subplot(gs[6:10, :]) - - ### height decomposition - # plotting observed datazx - #T3_sel['heights_c_weighted_mean'] - plt.plot( T3_sel['dist'] , T3_sel['heights_c_weighted_mean'], '-' , color =col_d[k], linewidth = 0.8, label = 'observed $h_c$ mean') - - T2_sel['dist'] = np.interp(T2_sel['x_true'], T3_sel['x_true'], T3_sel['dist'] ) - plt.scatter( T2_sel['dist'] , T2_sel['heights_c'], s= 1, marker='o', color='black', alpha =0.02, edgecolors= 'none' ) - - # reconstructued data by integration - #height_model = np.cumsum(FT.model()) + T3_sel['heights_c_weighted_mean'].iloc[0] - #plt.plot( Gx_1.eta + Gx_1.x, height_model, linewidth = 0.6 , color = 'red', label = 'real space integral') - - FT_int = gFT.generalized_Fourier(Gx_1.eta + Gx_1.x, None,Gk_1.k ) - _ = FT_int.get_H() - FT_int.b_hat = np.concatenate([ -gFT_sin_coeff_sel /Gk_1.k, gFT_cos_coeff_sel/Gk_1.k ]) - - height_model2 = FT_int.model() /dx# + T3_sel['heights_c_weighted_mean'].iloc[0] - - - dist_nanmask = np.isnan(Gx_1.y_data) - height_data = np.interp(dist_stencil, T3_sel['dist'], T3_sel['heights_c_weighted_mean']) #[~np.isnan(Gx_1.y_data)] - - def fit_offset(x, data, model, nan_mask, deg): - - #x, data, model, nan_mask, deg = dist_stencil, height_data, height_model2, dist_nanmask, 1 - p_offset = np.polyfit(x[~nan_mask], data[~nan_mask] - model[~nan_mask], deg) - p_offset[-1] = 0 - poly_offset = np.polyval(p_offset,x ) - return poly_offset - - poly_offset = fit_offset(dist_stencil, height_data, height_model2, dist_nanmask, 1) - - #plt.plot(dist_stencil, height_model2 ,'-', c='orange', linewidth=0.6, alpha=1,zorder= 12, label = 'spectral int model') - #plt.plot(dist_stencil, poly_offset ,'-', c=col.gridcolor, linewidth=0.6, alpha=1,zorder= 12, label = 'offset') - plt.plot(dist_stencil, height_model2 + poly_offset ,'-', c=lead_color, linewidth=0.8, alpha=1,zorder= 12, label = 'GFT height model + correction') - plt.legend(loc = 1, ncol =3) - - - ax2 = F.fig.add_subplot(gs[10:13, :]) - - height_residual = T2_sel['heights_c'] - np.interp(T2_sel['dist'], dist_stencil, height_model2 + poly_offset) - plt.scatter(T2_sel['dist'], height_residual, s= 1, marker='o', color='black', alpha =0.02, edgecolors= 'none' ) - - heights_c_weighted_mean_stancil = np.interp(dist_stencil, T3_sel['dist'], T3_sel['heights_c_weighted_mean'] ) - height_residual_mean = (heights_c_weighted_mean_stancil - height_model2) - poly_offset - height_residual_mean[dist_nanmask] = np.nan - - plt.plot( dist_stencil , height_residual_mean , color =col.rascade1, linewidth = 0.5, label = 'residual $h_c$') - plt.fill_between(dist_stencil , height_residual_mean, color= col.cascade2, edgecolor = None, alpha = 0.4, zorder= 0) - plt.legend(loc = 1) - - - # for pos, kgroup, lflag in zip([ gs[2, 0:2], gs[2, 2:4], gs[2, 4:]], [, ['gt2l', 'gt2r'], ['gt3l', 'gt3r']], [True, False, False] ): - - ax41 = F.fig.add_subplot(gs[3:5, 4:]) - #ax41.tick_params(labelleft=lflag) - - dd = Gk_1.gFT_PSD_data#.rolling(k=10, min_periods= 1, center=True).mean() - plt.plot(Gk_1.k, dd, color='gray', linewidth=.5 ,alpha= 0.5 ) - - dd = Gk_1.gFT_PSD_data.rolling(k=10, min_periods= 1, center=True).mean() - plt.plot(Gk_1.k, dd, color=lead_color, linewidth=.8 ) - - klim= Gk_1.k[0], Gk_1.k[-1] - plt.xlim(klim) - - plt.ylabel('$(m/m)^2/k$') - #plt.title('Spectra', loc ='left')s - #plt.xlabel('k (2$\pi$ m$^{-1}$)') - plt.ylim(dd.min(),np.nanmax(dd.data) * 1.5) - - ax41.axvline(k_thresh, linewidth=1, color='black', alpha=1) - ax41.axvspan(k_thresh , klim[-1], color='black', alpha=0.5, zorder=12) - ax41.set_facecolor((1.0, 1.00, 1.00, 0.8)) - - #plt.show() - - #F.save_light(path=plot_path+'B03_spectra/', name = 'B03_freq_reconst_x'+str(i)) - #MT.json_save('B03_success', plot_path, {'time':'time.asctime( time.localtime(time.time()) )'}) - - stencil_pos = spec.create_chunk_boundaries_unit_lengths(1000, (dist_stencil[0], dist_stencil[-1]), ov=0, iter_flag=False).T - - V0_list, V1_list, V2_list, V3_list = list(), list(), list(), list() - no_nan_sum= list() - for s in stencil_pos: - V0_list.append( T2_sel['heights_c'].loc[M.cut_nparray( np.array(T2_sel['dist']), s[0], s[-1]) ].var() ) - V1_list.append( T3_sel['heights_c_weighted_mean'].loc[M.cut_nparray( np.array(T3_sel['dist']), s[0], s[-1]) ].var() ) - V2_list.append( np.nanvar(height_model2[M.cut_nparray( dist_stencil, s[0], s[-1])]) ) - V3_list.append( np.nanvar(height_residual_mean[M.cut_nparray( dist_stencil, s[0], s[-1])]) ) - - no_nan_sum.append( (~dist_nanmask[M.cut_nparray( dist_stencil, s[0], s[-1])].data).sum()) - - - ax3 = F.fig.add_subplot(gs[-2:, :]) - - plt.title('Variance Decomposition', loc='left') - V0_list, V1_list, V2_list = np.array(V0_list),np.array(V1_list),np.array(V2_list), - no_nan_sum = np.array(no_nan_sum) - no_nan_sum = no_nan_sum/no_nan_sum.max() - - edge_pos = np.insert(stencil_pos[:,0], stencil_pos[:,0].size, stencil_pos[:,-1][-1]) - plt.stairs(no_nan_sum * V0_list/V0_list, edge_pos, baseline=0, fill=True, color= col.black, alpha=0.6, label = 'photon variance') - plt.stairs(no_nan_sum * V1_list/V0_list, edge_pos, baseline=0, fill=True, color= col_d[k] , label = 'mean photon variance') - plt.stairs(no_nan_sum * V2_list/V0_list, edge_pos, baseline=0, fill=True, color= lead_color, label = 'wave variance') - plt.stairs(no_nan_sum * (V3_list/V0_list+ V2_list/V0_list) , edge_pos, baseline=no_nan_sum * V2_list/V0_list, fill=True, color= col.green, label = 'residual variance') - - plt.legend(ncol= 4, bbox_to_anchor=(-0.02, 0), loc= 2) - - # residual - #ax0.set_xticks(eta_ticks) - #ax0.set_xticklabels(eta_ticks/1e3) - #ax0.set_ylabel('Slope (m/m)') - #ax1.spines['top'].set_visible(True) - #ax1.spines['top'].set_linewidth(0.2) - - #ax1.xaxis.set_ticks_position('top') - #ax1.xaxis.set_label_position('top') - ax0.set_ylabel('Slope (m/m)') - ax1.set_ylabel('Photon Height (m)') - ax2.set_ylabel('Photon Height (m)') - - #ax2.spines['bottom'].set_visible(True) - ax2.tick_params(labelbottom=True, bottom=True) - ax2.set_xlabel('Distance from the ice Edge (km)') - - eta_ticks = np.arange(dist_stencil[0], dist_stencil[-1]+ 500, 500) - eta_tick_labels, eta_ticks = MT.tick_formatter(eta_ticks[1::4]/1e3, interval= 3, expt_flag= False, shift=0) - ax2.set_xticks(eta_ticks*1e3) - ax2.set_xticklabels(eta_tick_labels) - ax2.set_ylim(-0.01, max(height_residual)) - - - y_tick_labels, y_ticks = MT.tick_formatter(np.arange(-0.1, 0.1+ 0.05, 0.05), interval= 2, expt_flag= False, shift=0) - ax0.set_yticks(y_ticks) - ax0.set_yticklabels(y_tick_labels) - ylim_slope= np.round(Gx_1.y_data.std().data*4 * 10)/10 - ax0.set_ylim(-1* ylim_slope ,ylim_slope) - - y_tick_labels, y_ticks = MT.tick_formatter(np.arange(-0.5, 3, 0.5), interval= 2, expt_flag= False, shift=1) - ax1.set_yticks(y_ticks) - ax1.set_yticklabels(y_tick_labels) - ax1.set_ylim(-0.4, 1.5) - - ax2.set_yticks(y_ticks) - ax2.set_yticklabels(y_tick_labels) - ax2.set_ylim(0, 2.8) - - ax3.set_yticks(y_ticks) - ax3.set_yticklabels(y_tick_labels) - ax3.set_ylim(0, 1) - - xlims= eta_1[0].data+ 0 * dx, eta_1[-1].data- 500 * dx - #xlims= eta_1[0].data+ 0 * dx, eta_1[-1].data- 0 * dx - - for axx in [ax0, ax1, ax2, ax3]: - - axx.set_xlim(xlims ) - axx.axhline(0, linewidth =0.5, color=col.black) - axx.spines['bottom'].set_visible(False) - axx.tick_params(labelbottom=False, bottom=False) - - - F.save_light(path= plot_path, name='B03_decomposition_'+str(num_count).zfill(4)) - #F.save_pup(path= plot_path, name='B02_decomposition_'+k+'_x'+str(i)+'_'+ID_name) - num_count +=1 -# %% - -V0_photon_var = T2_sel['heights_c'].var() -V1_mean_photon_var = T3_sel['heights_c_weighted_mean'].var() -V2_wave_model_var = np.nanvar(height_residual_mean) - -V0_photon_var/ V0_photon_var -V1_mean_photon_var/V0_photon_var -V2_wave_model_var/V0_photon_var diff --git a/analyis_publish/PB03_plot_spectra_3x3_ov.py b/analyis_publish/PB03_plot_spectra_3x3_ov.py deleted file mode 100644 index 7cd52025..00000000 --- a/analyis_publish/PB03_plot_spectra_3x3_ov.py +++ /dev/null @@ -1,333 +0,0 @@ - -# %% -import os, sys -#execfile(os.environ['PYTHONSTARTUP']) - -""" -This file open a ICEsat2 track applied filters and corections and returns smoothed photon heights on a regular grid in an .nc file. -This is python 3 -""" - -exec(open(os.environ['PYTHONSTARTUP']).read()) -exec(open(STARTUP_2021_IceSAT2).read()) - -#%matplotlib inline - -import ICEsat2_SI_tools.convert_GPS_time as cGPS -import h5py -import ICEsat2_SI_tools.io as io -import ICEsat2_SI_tools.spectral_estimates as spec - -import time -import imp -import copy -import spicke_remover -import datetime -import generalized_FT as gFT -from scipy.ndimage.measurements import label - -# %% -track_name, batch_key, test_flag = io.init_from_input(sys.argv) # loads standard experiment -#track_name, batch_key, test_flag = '20190605061807_10380310_004_01', 'SH_batch01', False -#track_name, batch_key, test_flag = '20190601094826_09790312_004_01', 'SH_batch01', False -#track_name, batch_key, test_flag = '20190207111114_06260210_004_01', 'SH_batch02', False -#track_name, batch_key, test_flag = '20190208152826_06440210_004_01', 'SH_batch01', False -#track_name, batch_key, test_flag = '20190213133330_07190212_004_01', 'SH_batch02', False -#track_name, batch_key, test_flag = '20190207002436_06190212_004_01', 'SH_batch02', False -#track_name, batch_key, test_flag = '20190206022433_06050212_004_01', 'SH_batch02', False - - -#track_name, batch_key, test_flag = '20190219073735_08070210_004_01', 'SH_batch02', False -#track_name, batch_key, test_flag = '20190502021224_05160312_004_01', 'SH_batch02', False - -#track_name, batch_key, test_flag = 'SH_20190224_08800210', 'SH_publish', False -#track_name, batch_key, test_flag = 'SH_20190219_08070210', 'SH_publish', False -track_name, batch_key, test_flag = 'SH_20190502_05160312', 'SH_publish', False -#track_name, batch_key, test_flag = 'SH_20190502_05180312', 'SH_publish', False - - -#print(track_name, batch_key, test_flag) -hemis, batch = batch_key.split('_') - -load_path = mconfig['paths']['work'] +batch_key +'/B02_spectra/' -load_file = load_path + 'B02_' + track_name #+ '.nc' -#plot_path = mconfig['paths']['plot'] + '/'+hemis+'/'+batch_key+'/' + track_name + '/' -plot_path = mconfig['paths']['plot'] + '/'+hemis+'/'+batch_key+'/publish/' + track_name + '/' -MT.mkdirs_r(plot_path) - -Gk = xr.open_dataset(load_file+'_gFT_k.nc') -Gx = xr.open_dataset(load_file+'_gFT_x.nc') -Gfft = xr.open_dataset(load_file+'_FFT.nc') - -# %% -all_beams = mconfig['beams']['all_beams'] -high_beams = mconfig['beams']['high_beams'] -low_beams = mconfig['beams']['low_beams'] -#Gfilt = io.load_pandas_table_dict(track_name + '_B01_regridded', load_path) # rhis is the rar photon data -#Gd = io.load_pandas_table_dict(track_name + '_B01_binned' , load_path) # - -col.colormaps2(31, gamma=1) -col_dict= col.rels - - -# %% -def dict_weighted_mean(Gdict, weight_key): - """ - returns the weighted meean of a dict of xarray, data_arrays - weight_key must be in the xr.DataArrays - """ - #Gdict = G_rar_fft - #weight_key='N_per_stancil' - - akey = list( Gdict.keys() )[0] - GSUM = Gdict[akey].copy() - GSUM.data = np.zeros(GSUM.shape) - N_per_stancil = GSUM.N_per_stancil * 0 - N_photons = np.zeros(GSUM.N_per_stancil.size) - - counter= 0 - for k,I in Gdict.items(): - #print(k) - I =I.squeeze() - print(len(I.x) ) - if len(I.x) !=0: - GSUM += I.where( ~np.isnan(I), 0) * I[weight_key] #.sel(x=GSUM.x) - N_per_stancil += I[weight_key] - if 'N_photons' in GSUM.coords: - N_photons += I['N_photons'] - counter+=1 - - GSUM = GSUM / N_per_stancil - - if 'N_photons' in GSUM.coords: - GSUM.coords['N_photons'] = (('x', 'beam'), np.expand_dims(N_photons, 1) ) - - GSUM['beam'] = ['weighted_mean'] - GSUM.name='power_spec' - - return GSUM - - -#G_gFT_wmean = (Gk['gFT_PSD_model'].where( ~np.isnan(Gk['gFT_PSD_model']), 0) * Gk['N_per_stancil']).sum('beam')/ Gk['N_per_stancil'].sum('beam') - -G_gFT_wmean = (Gk.where( ~np.isnan(Gk['gFT_PSD_model']), 0) * Gk['N_per_stancil']).sum('beam')/ Gk['N_per_stancil'].sum('beam') -G_gFT_wmean['N_photons'] = Gk['N_photons'].sum('beam') - -G_fft_wmean = (Gfft.where( ~np.isnan(Gfft), 0) * Gfft['N_per_stancil']).sum('beam')/ Gfft['N_per_stancil'].sum('beam') -G_fft_wmean['N_per_stancil'] = Gfft['N_per_stancil'].sum('beam') - - -# %% plot -#col.colormaps2(31, gamma=1) -#col.plot() - -def plot_wavenumber_spectrogram(ax, Gi, clev, plot_photon_density=True , cmap=None ): - - if Gi.k[0] ==0: - Gi= Gi.sel(k=Gi.k[1:]) - x_lambda= 2 * np.pi/Gi.k - if cmap is None: - cmap = col.white_base_blgror #plt.cm. - if clev is None: - clev = None, None - - css= plt.pcolormesh(Gi.x/1e3, x_lambda , Gi, cmap=cmap , vmin = clev[0], vmax = clev[-1]) - ax.set_yscale('log') - # plt.colorbar(orientation='vertical', pad=0.06, label='Spectral Power (m^2/m)') - - if plot_photon_density: - - plt.plot(Gi.x/1e3, x_lambda[-1] + (Gi.N_per_stancil/Gi.N_per_stancil.max() ) * 10 , c='black', linewidth= 0.8, label='NAN-density' ) - plt.fill_between(Gi.x/1e3, x_lambda[-1] + (Gi.N_per_stancil/Gi.N_per_stancil.max() ) * 10, 0, color='gray', alpha = 0.3) - ax.axhline(30, color='black', linewidth=0.3) - - #plt.xlabel('Distance from the Ice Edge (km)') - plt.ylim(x_lambda[-1], x_lambda[0]) - - return css - -#Gplot = G.rolling(x=5, min_periods= 1, center=True).mean() -#Gmean = G_gFT_wmean.rolling(x=2, min_periods= 1, center=True).mean() - -#Gmean = G_gFT_wmean['gFT_PSD_data'].rolling(k=5, center=True).mean() - - -Gmean,_ = gFT.rebin(G_gFT_wmean['gFT_PSD_data'], 10) - -#Gmean = Gmean.where(~np.isnan(Gmean), 0) - -# define mean first for colorbar -#Gplot = G_gFT_wmean.squeeze()#.rolling(k=10, min_periods= 1, center=True).median().rolling(x=3, min_periods= 1, center=True).median() -dd = 10 * np.log10(Gmean) -dd= dd.where(~np.isinf(dd), np.nan ) -clev_log = M.clevels( [dd.quantile(0.01).data*1, dd.quantile(0.98).data * 1], 31)* 1 - - -try: - k_max_range = Gmean.k[Gmean.isel(x= slice(0, 5)).mean('x').argmax().data].data* 0.75, Gmean.k[Gmean.isel(x= slice(0, 5)).mean('x').argmax().data].data* 1, Gmean.k[Gmean.isel(x= slice(0, 5)).mean('x').argmax().data].data* 1.25 -except: - k_max_range = Gmean.k[Gmean.isel(x= slice(0, 20)).mean('x').argmax().data].data* 0.75, Gmean.k[Gmean.isel(x= slice(0, 20)).mean('x').argmax().data].data* 1, Gmean.k[Gmean.isel(x= slice(0, 20)).mean('x').argmax().data].data* 1.25 - - -# % -font_for_print() -fn = copy.copy(lstrings) - -F = M.figure_axis_xy(6.5, 5.6, container= True, view_scale =1) -Lmeters = Gk.L.data[0] - - -plt.suptitle('Generalized Fourier Transform Slope Spectral Power\n' + io.ID_to_str(track_name), y = 0.98) -gs = GridSpec(7,3, wspace=0.25, hspace=1.2)#figure=fig, -#clev=np.arange(0, 6, 0.1)*3 - -#%matplotlib inline - - - -#clev = M.clevels( [Gmean.quantile(0.6).data * 1e4, Gmean.quantile(0.99).data * 1e4], 31)/ 1e4 - -xlims= Gmean.x[0]/1e3, Gmean.x[-1]/1e3 - -k =high_beams[0] -for pos, k, pflag in zip([gs[0:2, 0],gs[0:2, 1],gs[0:2, 2] ], high_beams, [True, False, False] ): - ax0 = F.fig.add_subplot(pos) - ax0.tick_params(labelbottom=False) - Gplot = Gk.sel(beam = k).gFT_PSD_model.squeeze()#rolling(k=5, x=2, min_periods= 1, center=True).mean() - #Gplot.mean('x').plot() - - dd2 = 10 * np.log10(Gplot) - dd2= dd2.where(~np.isinf(dd2), np.nan ) - plot_wavenumber_spectrogram(ax0, dd2, clev_log, plot_photon_density=False ) - plt.title(next(fn)+k, color= col_dict[k], loc= 'left') - plt.xlim(xlims) - # - if pflag: - plt.ylabel('Wave length (m)') - plt.legend() - -for pos, k, pflag in zip([gs[2:4, 0],gs[2:4, 1],gs[2:4, 2] ], low_beams, [True, False, False] ): - ax0 = F.fig.add_subplot(pos) - ax0.tick_params(labelbottom=False) - Gplot = Gk.sel(beam = k).gFT_PSD_model.squeeze()#.rolling(k=10, x=2, min_periods= 1, center=True).mean() - #Gplot.mean('x').plot() - dd2 = 10 * np.log10(Gplot) - dd2= dd2.where(~np.isinf(dd2), np.nan ) - plot_wavenumber_spectrogram(ax0, dd2, clev_log, plot_photon_density=False ) - plt.title(next(fn)+k, color= col_dict[k], loc= 'left') - plt.xlim(xlims) - # - if pflag: - plt.ylabel('Wave length (m)') - plt.legend() - - -pos = gs[4:6, 0] -ax0 = F.fig.add_subplot(pos) -plt.title(next(fn)+'Photons density ($m^{-1}$)', loc='left') - -max_list = list() -for k in all_beams: - I = Gk.sel(beam = k)['gFT_PSD_model'] - plt.plot(Gplot.x/1e3, I.N_photons/I.L.data, color= col_dict[k], linewidth=0.9) - max_list.append((I.N_photons/I.L.data).max()) - -plt.plot(Gplot.x/1e3, G_gFT_wmean['N_photons']/I.L.data/6, c='black', label='mean photons density' , linewidth=0.8) -plt.xlim(xlims) -plt.ylim(2,max(max_list)*1.3) -plt.legend(ncol= 3,loc=1) -plt.xlabel('Distance from the ice edge (km)') - - -ax0 = F.fig.add_subplot(gs[4:6, 1]) - -css = plot_wavenumber_spectrogram(ax0, dd, clev_log , plot_photon_density= False) -plt.title(next(fn)+'Beam weighted mean', loc= 'left') #\n10 $\log_{10}( (m/m)^2 m )$ -plt.xlim(xlims) - -# plt.plot(Gplot.x/1e3, 10* nan_list +20 , c='black', label='NAN-density' ) -# ax0.axhline(30, color='black', linewidth=0.5) - -# ax0.axhline(2* np.pi/k_max_range[0], color='black', linestyle= '--', linewidth= 0.5) -# ax0.axhline(2* np.pi/k_max_range[1], color='black', linestyle= '-', linewidth= 0.8) -# ax0.axhline(2* np.pi/k_max_range[2], color='black', linestyle= '--', linewidth= 0.5) -# ax0.axhspan(2* np.pi/k_max_range[0], 2* np.pi/k_max_range[2], color='gray', alpha = 0.4) - - -plt.xlabel('Distance from the ice edge (km)') -#plt.ylabel('Wave length (m)') -plt.legend(loc=1) - - - -cbaxes = F.fig.add_subplot(gs[-1, 1]) -cbaxes.axis('off') -cbpos = cbaxes.get_position() -#cbaxes2 = F.fig.add_axes([cbpos.x0,cbpos.y0,cbpos.width/5,cbpos.height]) -cbaxes2 = F.fig.add_axes([cbpos.x0,cbpos.y0+ 2*cbpos.height/4,cbpos.width,cbpos.height/3]) -cb = plt.colorbar(css , cax =cbaxes2, orientation= 'horizontal') - -cb.set_label('Power($(m/m)^2/k$)') -cb.outline.set_visible(False) -#cbaxes2.tick_params(axis='both', colors=col.gray) - - - -Lpoints= Gk.Lpoints.mean('beam').data -N_per_stancil = Gk.N_per_stancil.mean('beam').data#[0:-2] - -G_error_model =dict() -G_error_data =dict() - -for bb in Gk.beam.data: - I = Gk.sel(beam= bb) - b_bat_error = np.concatenate([ I.model_error_k_cos.data , I.model_error_k_sin.data ]) - Z_error = gFT.complex_represenation(b_bat_error, Gk.k.size, Lpoints) - PSD_error_data, PSD_error_model = gFT.Z_to_power_gFT(Z_error, np.diff(Gplot.k)[0],N_per_stancil , Lpoints ) - - #np.expand_dims(PSD_error_model, axis =) - G_error_model[bb] = xr.DataArray(data = PSD_error_model, coords = I.drop('N_per_stancil').coords, name='gFT_PSD_model_error' ).expand_dims('beam') - G_error_data[bb] = xr.DataArray(data = PSD_error_data, coords = I.drop('N_per_stancil').coords, name='gFT_PSD_data_error' ).expand_dims('beam') - -gFT_PSD_model_error_mean = xr.merge(G_error_model.values(),compat='override').gFT_PSD_model_error -gFT_PSD_data_error_mean = xr.merge(G_error_data.values(),compat='override').gFT_PSD_data_error - -PSD_model_error_mean = ( gFT_PSD_model_error_mean.where( ~np.isnan(gFT_PSD_model_error_mean), 0) * Gk['N_per_stancil']).sum('beam')/Gk['N_per_stancil'].sum('beam') -PSD_data_error_mean = ( gFT_PSD_data_error_mean.where( ~np.isnan(gFT_PSD_data_error_mean), 0) * Gk['N_per_stancil']).sum('beam')/Gk['N_per_stancil'].sum('beam') - - -dd2 = 10 * np.log10(PSD_data_error_mean) -#dd2 = PSD_data_error_mean -#dd= np.where(~np.isinf(dd), dd, np.nan ) -dd2= dd2.where(~np.isinf(dd2), np.nan ) - -pos = gs[4:6, 2] -ax0 = F.fig.add_subplot(pos) -ax0.set_yscale('log') -#clev_log - -clev_log = M.clevels( [dd2.quantile(0.01).data*0.8, dd2.quantile(0.98).data*0.9 ], 31) -plt.cm.OrRd -css_err = plot_wavenumber_spectrogram(ax0, dd2, clev_log, plot_photon_density= False, cmap= plt.cm.OrRd) -plt.title(next(fn)+'Mean error', loc= 'left') #\n10 $\log_{10}( (m/m)^2 m )$ -plt.xlabel('Distance from the ice edge (km)') -plt.xlim(xlims) -#plt.colorbar() - - -cbaxes = F.fig.add_subplot(gs[-1, 2]) -cbaxes.axis('off') -cbpos = cbaxes.get_position() -#cbaxes2 = F.fig.add_axes([cbpos.x0,cbpos.y0,cbpos.width/5,cbpos.height]) -cbaxes2 = F.fig.add_axes([cbpos.x0,cbpos.y0+ 2*cbpos.height/4,cbpos.width,cbpos.height/3]) -cb = plt.colorbar(css_err , cax =cbaxes2, orientation= 'horizontal') - -cb.set_label('Power($(m/m)^2/k$)') -cb.outline.set_visible(False) -#cbaxes2.tick_params(axis='both', colors=col.gray) - - -F.save_light(path=plot_path, name = 'PB03_specs_ov_'+str(track_name)) -F.save_pup(path=plot_path, name = 'PB03_specs_ov_'+str(track_name)) - -# %% diff --git a/analyis_publish/PB03_plot_spectra_case_v1.py b/analyis_publish/PB03_plot_spectra_case_v1.py deleted file mode 100644 index 7d6a681c..00000000 --- a/analyis_publish/PB03_plot_spectra_case_v1.py +++ /dev/null @@ -1,412 +0,0 @@ - -import os, sys -#execfile(os.environ['PYTHONSTARTUP']) - -""" -This file open a ICEsat2 track applied filters and corections and returns smoothed photon heights on a regular grid in an .nc file. -This is python 3 -""" - -exec(open(os.environ['PYTHONSTARTUP']).read()) -exec(open(STARTUP_2021_IceSAT2).read()) - -#%matplotlib inline - -import ICEsat2_SI_tools.convert_GPS_time as cGPS -import h5py -import ICEsat2_SI_tools.io as io -import ICEsat2_SI_tools.spectral_estimates as spec - -import time -import imp -import copy -import spicke_remover -import datetime -import generalized_FT as gFT -from scipy.ndimage.measurements import label - -imp.reload(M_color) -col=M_color.color(path=mconfig['paths']['analysis']+'../config/', name='color_def') - -xr.set_options(display_style='text') -#import s3fs -# %% -track_name, batch_key, test_flag = io.init_from_input(sys.argv) # loads standard experiment -#track_name, batch_key, test_flag = '20190605061807_10380310_004_01', 'SH_batch01', False -#track_name, batch_key, test_flag = '20190601094826_09790312_004_01', 'SH_batch01', False -#track_name, batch_key, test_flag = '20190207111114_06260210_004_01', 'SH_batch02', False -#track_name, batch_key, test_flag = '20190208152826_06440210_004_01', 'SH_batch01', False -#track_name, batch_key, test_flag = '20190213133330_07190212_004_01', 'SH_batch02', False -#track_name, batch_key, test_flag = '20190207002436_06190212_004_01', 'SH_batch02', False -#track_name, batch_key, test_flag = '20190206022433_06050212_004_01', 'SH_batch02', False - - -#track_name, batch_key, test_flag = '20190502050734_05180310_004_01', 'SH_batch02', False - -# local track -#track_name, batch_key, test_flag = '20190219073735_08070210_004_01', 'SH_batch02', False - -track_name, batch_key, test_flag = 'SH_20190219_08070210', 'SH_publish', True - -x_pos= 3 - -#print(track_name, batch_key, test_flag) -hemis, batch = batch_key.split('_') - -load_path = mconfig['paths']['work'] +batch_key +'/B02_spectra/' -load_file = load_path + 'B02_' + track_name #+ '.nc' -#plot_path = mconfig['paths']['plot'] + '/'+hemis+'/'+batch_key+'/' + track_name + '/' -plot_path = mconfig['paths']['plot'] + '/'+hemis+'/'+batch_key+'/publish/' + track_name + '/' -MT.mkdirs_r(plot_path) - -Gk = xr.open_dataset(load_file+'_gFT_k.nc') -Gx = xr.open_dataset(load_file+'_gFT_x.nc') - -Gfft = xr.open_dataset(load_file+'_FFT.nc') - - -# %% -all_beams = mconfig['beams']['all_beams'] -high_beams = mconfig['beams']['high_beams'] -low_beams = mconfig['beams']['low_beams'] -#Gfilt = io.load_pandas_table_dict(track_name + '_B01_regridded', load_path) # rhis is the rar photon data -#Gd = io.load_pandas_table_dict(track_name + '_B01_binned' , load_path) # - -col.colormaps2(21) - -# %% check paths (again) -# G_gFT_wmean = (Gk['gFT_PSD_model'].where( ~np.isnan(Gk['gFT_PSD_model']), 0) * Gk['N_per_stancil']).sum('beam')/ Gk['N_per_stancil'].sum('beam') -# G_gFT_wmean['N_per_stancil'] = Gk['N_per_stancil'].sum('beam') - -# G_fft_wmean = (Gfft.where( ~np.isnan(Gfft), 0) * Gfft['N_per_stancil']).sum('beam')/ Gfft['N_per_stancil'].sum('beam') -# G_fft_wmean['N_per_stancil'] = Gfft['N_per_stancil'].sum('beam') - - -# %% define simple routines -def add_info(D, Dk, ylims): - eta = D.eta + D.x - N_per_stancil, ksize = Dk.N_per_stancil.data , Dk.k.size - plt.text(eta[0].data, ylims[-1], ' N='+numtostr(N_per_stancil) + ' N/2M= '+ fltostr(N_per_stancil/2/ksize, 1) ) - - -# %% Single views - -def plot_data_eta(D, offset = 0 , **kargs ): - eta_1 = D.eta# + D.x - y_data = D.y_model +offset - plt.plot(eta_1,y_data , **kargs) - return eta_1 - -def plot_model_eta(D, ax, offset = 0, **kargs ): - eta = D.eta #+ D.x - y_data = D.y_model+offset - plt.plot(eta ,y_data , **kargs) - - # ax.axvline(eta[0].data, linewidth=0.1, color=kargs['color'], alpha=0.5) - # ax.axvline(eta[-1].data, linewidth=0.1, color=kargs['color'], alpha=0.5) - -if ('y_data' in Gx.sel(beam = 'gt3r').keys()): - print('ydata is ', ('y_data' in Gx.sel(beam = 'gt3r').keys()) ) -else: - print('ydata is ', ('y_data' in Gx.sel(beam = 'gt3r').keys()) ) - MT.json_save('B03_fail', plot_path, {'reason':'no y_data'}) - print('failed, exit') - exit() - - - -# %% - -# derive spectral errors: -Lpoints= Gk.Lpoints.mean('beam').data -N_per_stancil = Gk.N_per_stancil.mean('beam').data#[0:-2] - -G_error_model =dict() -G_error_data =dict() - -for bb in Gk.beam.data: - I = Gk.sel(beam= bb) - b_bat_error = np.concatenate([ I.model_error_k_cos.data , I.model_error_k_sin.data ]) - Z_error = gFT.complex_represenation(b_bat_error, Gk.k.size, Lpoints) - PSD_error_data, PSD_error_model = gFT.Z_to_power_gFT(Z_error, np.diff(Gk.k)[0],N_per_stancil , Lpoints ) - - #np.expand_dims(PSD_error_model, axis =) - G_error_model[bb] = xr.DataArray(data = PSD_error_model, coords = I.drop('N_per_stancil').coords, name='gFT_PSD_model_error' ).expand_dims('beam') - G_error_data[bb] = xr.DataArray(data = PSD_error_data, coords = I.drop('N_per_stancil').coords, name='gFT_PSD_data_error' ).expand_dims('beam') - - -Gk['gFT_PSD_model_err'] = xr.concat(G_error_model.values(), dim='beam') -Gk['gFT_PSD_data_err'] = xr.concat(G_error_data.values(), dim='beam') - - - -# %% -fltostr = MT.float_to_str -numtostr = MT.num_to_str - -font_for_print() - - -#for i in x_pos_sel[::2]: -#i =x_pos_sel[20] -MT.mkdirs_r(plot_path+'B03_spectra/') - -x_pos_sel = np.arange(Gk.x.size)[~np.isnan(Gk.mean('beam').mean('k').gFT_PSD_data.data)] -x_pos_max = Gk.mean('beam').mean('k').gFT_PSD_data[~np.isnan(Gk.mean('beam').mean('k').gFT_PSD_data)].argmax().data -xpp = x_pos_sel[ [int(i) for i in np.round(np.linspace(0, x_pos_sel.size-1, 10))]] -xpp = np.insert(xpp, 0, x_pos_max) - -key = 'gt2r' -#lead_color= col.rels[key] -lead_color= col.rels['group2'] - -x_pos =2 -i =xpp[x_pos] -#for i in xpp: - -#i = xpp[0] -font_for_print() -xscale=1e3 -F = M.figure_axis_xy(fig_sizes['two_column_square'][0], fig_sizes['two_column_square'][1], view_scale=0.8, container =True) - -#plt.suptitle('gFT Model and Spectrograms | x='+str(Gk.x[i].data)+' \n' + track_name, y = 0.95) -gs = GridSpec(14,6, wspace=0.1, hspace=5)#figure=fig, - - -col_d = col.__dict__['rels'] -beam_group = ['gt2l', 'gt2r'] -for k, gss in zip(beam_group, [gs[0:3, :], gs[2:5, :]] ): - - - ax0 = F.fig.add_subplot(gss) - - Gx_1 = Gx.isel(x= i).sel(beam = k) - Gk_1 = Gk.isel(x= i).sel(beam = k) - x_sel= Gx_1.x - - plot_model_eta(Gx_1, ax0, offset= 0, linestyle='-', color=col_d[k], linewidth=0.8, alpha=1, zorder=12 ) - ylims= -np.nanstd(Gx_1.y_data)*3, np.nanstd(Gx_1.y_data)*3 - #add_info(Gx_1, Gk_1 , ylims ) - - # oringial data - eta_1= plot_data_eta(Gx_1 , offset= 0.05 , linestyle= '-', c='k',linewidth=1, alpha =0.5, zorder=11) - - # reconstruct in gaps - FT = gFT.generalized_Fourier(Gx_1.eta + Gx_1.x, None,Gk_1.k ) - _ = FT.get_H() - FT.b_hat=np.concatenate([ Gk_1.gFT_cos_coeff, Gk_1.gFT_sin_coeff ]) - plt.plot(Gx_1.eta, FT.model() ,'-', c='orange', linewidth=0.3, alpha=1,zorder= 2) - - if 'l' in k: - ax0.spines['left'].set_visible(True) - ax0.spines['right'].set_visible(False) - ax0.spines['top'].set_linewidth(0.2) - ax0.spines['left'].set_color(col.gray) - - ax0.spines['bottom'].set_visible(False) - ax0.tick_params(labelbottom=False, bottom=False) - - elif 'r' in k: - ax0.spines['left'].set_visible(False) - ax0.spines['right'].set_visible(True) - ax0.yaxis.set_ticks_position('right') - ax0.yaxis.set_label_position('right') - ax0.spines['right'].set_color(col.gray) - - ax0.tick_params(axis='both', colors=col.gray) - - ax0.set_facecolor((1.0, 1, 1, 0)) - - ax0.axhline(0.05, linewidth=0.5, color=col.black, alpha=0.5) - ax0.axhline(0, linewidth=0.5, color=col_d[k], alpha=1) - - ax0.spines['bottom'].set_linewidth(0.2) - ax0.spines['bottom'].set_visible(False) - #ax0.spines['bottom'].set_position(('data', 0)) - #plt.grid() - dx = eta_1.diff('eta').mean().data - plt.xlim( eta_1[0].data+0 * dx, eta_1[-1].data+ 0 * dx ) - plt.ylabel('relative slope (m/m)') - plt.ylim(-0.12, 0.12) - -#eta_ticks = np.linspace(Gx_1.eta.data[0], Gx_1.eta.data[-1], 11) -eta_ticks_labels, eta_ticks = MT.tick_formatter(np.arange(-12000, 12000+1500, 4*1500)/1e3, interval= 1, rounder=0, expt_flag= False, shift=0) - -ax0.set_xticks(eta_ticks*1e3) -ax0.set_xticklabels(eta_ticks_labels) - -plt.xlim( eta_1[0].data - 40 * dx, eta_1[-1].data+ 40 * dx ) -plt.xlabel('segment distance $\eta$ (km) @ X='+ numtostr(Gx_1.x.data/1e3)+' km') - - -# make spectral error -# Lpoints= Gk.Lpoints.sel(beam = beam_group).mean('beam').data -# N_per_stancil = Gk.N_per_stancil.sel(beam = beam_group).mean('beam').data[0:-2] -# b_bat_error = np.concatenate([Gplot.model_error_k_cos.data, Gplot.model_error_k_sin.data ]) -# Z_error = gFT.complex_represenation(b_bat_error, Gplot.k.size, Lpoints) -# PSD_error_data, PSD_error_model = gFT.Z_to_power_gFT(Z_error,np.diff(Gplot.k)[0],N_per_stancil , Lpoints ) -#PSD_error_data.shape -#Gk['PSD_error_data'] = ( ('x', 'k'), PSD_error_data) - - -# spectra -# define threshold -k_thresh = 0.085 -ax1_list = list() -dd_max=list() - -for pos, k, lflag in zip([ gs[6:9, 0:3], gs[9:12, 0:3] ], beam_group, [True, False] ): - - ax11 = F.fig.add_subplot(pos) - ax11.tick_params(labelleft=True) - ax1_list.append(ax11) - - Gx_1 = Gx.isel(x= i).sel(beam = k) - Gk_1 = Gk.isel(x= i).sel(beam = k) - Gfft_1 = Gfft.isel(x= i).sel(beam = k) - - klim= Gk_1.k[0], Gk_1.k[-1] - - dd = Gk_1.gFT_PSD_data#.rolling(k=10, min_periods= 1, center=True).mean() - plt.plot(Gk_1.k, dd, color=col_d[k], linewidth=.5 ,alpha= 0.5, zorder=5 ) - - dd = Gk_1.gFT_PSD_data.rolling(k=10, min_periods= 1, center=True).mean() - plt.plot(Gk_1.k, dd, color=col_d[k], linewidth=1, zorder=8 , label='GFT Spec') - #plt.plot(Gk_1.k, dd, color=col.gridcolor, linewidth=2.4, zorder=6 ) - - # dd_err = Gk_1.gFT_PSD_model_err.rolling(k=10, min_periods= 1, center=True).mean() - # plt.fill_between(Gk_1.k, dd- 20*dd_err, dd+ 20*dd_err, color=col_d[k], linewidth=1, zorder=8 , label='GFT Spec') - - dd_fft = Gfft_1.power_spec.rolling(k=10, min_periods= 1, center=True).mean() - plt.plot(Gfft_1.k, dd_fft, color=col.gray, linewidth=0.5, zorder=5, label='FFT Spec' ) - - dd_max.append(np.nanmax(dd.data)) - plt.xlim(klim) - plt.title('Beam ' + k + ' Single Estimate', loc='left') - - if lflag: - #plt.title('Energy Spectra', loc ='left') - ax11.tick_params(labelbottom=False, bottom=True) - else: - plt.xlabel('wavenumber k (2$\pi$ m$^{-1}$)') - - plt.ylabel('10$^{-2}$ $(m/m)^2/k$') - - #plt.ylim(dd.min(), max(dd_max) * 1.1) - # ax11.axvline(k_thresh, linewidth=1, color='gray', alpha=1) - # ax11.axvspan(k_thresh , klim[-1], color='gray', alpha=0.5, zorder=12) - - ax11.spines['left'].set_color(col.gray) - ax11.spines['bottom'].set_color(col.gray) - ax11.tick_params(axis='both', colors=col.gray) - - - x_ticks_labels, x_ticks = MT.tick_formatter(np.arange(0.02, 0.12, 0.02), interval= 2, rounder=0, expt_flag= False, shift=0) - ax11.set_xticks(x_ticks) - ax11.set_xticklabels(x_ticks_labels) - -plt.legend() - -if ~np.isnan(np.nanmax(dd_max)): - for ax in ax1_list: - ax.set_ylim(0, np.nanmax(dd_max) * 1.1) - - y_ticks_labels, y_ticks = MT.tick_formatter(np.arange(0, np.nanmax(dd_max)*1e2, 0.5), interval= 2, rounder=0, expt_flag= False, shift=0) - ax.set_yticks(y_ticks/1e2) - ax.set_yticklabels(y_ticks_labels) - -# plot k-x data - -Gplot= Gk.sel(beam = beam_group, x=Gk.x[0:-2]).mean('beam').rolling(k=10, x=2, min_periods= 1, center=True).median() - -# define 2nd part of the axis -ax1 = F.fig.add_subplot(gs[6:9, 3:]) -ax2 = F.fig.add_subplot(gs[9:12, 3:]) -cbaxes = F.fig.add_subplot(gs[-2:, 3:]) - - -F.fig.add_subplot(ax1) -dd = 10 * np.log10(Gplot.gFT_PSD_data) -dd = dd.where(~np.isinf(dd), np.nan ) - -clev_log = M.clevels( [dd.quantile(0.01).data * 0.5, dd.quantile(0.98).data * 2.5], 31)* 1 - -#plot_wavenumber_spectrogram(ax1, dd, clev_log, title =k + ' unsmoothed', plot_photon_density=False) - - -col.colormaps2(31, gamma=1) -#col.plot() -col.white_base_blgror -x_lambda= 2 * np.pi/dd.k -css = plt.pcolormesh(dd.x/1e3, x_lambda , dd, cmap=col.white_base_blgror , vmin = clev_log[0], vmax = clev_log[-1]) - -#plt.xlabel('Distance from the Ice Edge (km)') -plt.ylim(x_lambda[-1], x_lambda[0]) - - -xlims= Gplot.x[0]/1e3, Gplot.x[-1]/1e3 -plt.xlim(xlims) -plt.ylabel('Wave length\n(meters)') -plt.title('Mean Spectrogram', loc='left') - - - -F.fig.add_subplot(ax2) - -dd = 10 * np.log10(Gplot.gFT_PSD_data_err) -dd= np.where(~np.isinf(dd), dd, np.nan ) - -#clev = M.clevels( [np.percentile(dd, 0.01)* 0.75,np.percentile(dd, 0.98) * 1], 31)* 1 - -x_lambda= 2 * np.pi/Gplot.k -plt.pcolormesh(Gplot.x/1e3, x_lambda , dd, cmap=col.white_base_blgror , vmin = clev_log[0], vmax = clev_log[-1]) - -plt.ylim(x_lambda[-1], x_lambda[0]) - -xlims= Gplot.x[0]/1e3, Gplot.x[-1]/1e3 -plt.xlim(xlims) -plt.ylabel('Wave length\n(meters)') -plt.xlabel('Distance from the Ice Edge (km)') -plt.title('Mean Error', loc='left') - -for axx in [ax1, ax2]: - axx.set_yscale('log') - axx.spines['left'].set_visible(False) - axx.spines['right'].set_visible(True) - axx.yaxis.set_ticks_position('right') - axx.yaxis.set_label_position('right') - axx.spines['right'].set_color(col.gray) - - axx.tick_params(axis='both', colors=col.gray) - axx.set_facecolor((1.0, 1, 1, 0)) - axx.axhline(0, linewidth=0.1, color=col.gray, alpha=0.8) - axx.spines['bottom'].set_linewidth(0.2) - axx.axvline(x_sel/1e3, linewidth= 0.8, color= col_d[k], zorder=12) - axx.axvline(x_sel/1e3, linewidth= 2, color= col.black, zorder=10) - - x_ticks_labels, x_ticks = MT.tick_formatter(np.arange( (Gplot.x[0]/1e4).round(0)*10 ,(Gplot.x[-1]/1e4).round(0)*10, 20), interval= 2, rounder=0, expt_flag= False, shift=1) - axx.set_xticks(x_ticks) - axx.set_xticklabels(x_ticks_labels) - - - - -ax1.tick_params(labelbottom=False, bottom=True) - -# cbaxes.spines['left'].set_visible(False) - -cbaxes.axis('off') -cbpos = cbaxes.get_position() -cbaxes2 = F.fig.add_axes([cbpos.x0,cbpos.y0+ 1.5*cbpos.height/6,cbpos.width,cbpos.height/6]) -cb = plt.colorbar(css , cax =cbaxes2, orientation= 'horizontal') - -cb.set_label('Power($(m/m)^2/k$)') -cb.outline.set_visible(False) -cbaxes2.tick_params(axis='both', colors=col.gray) - -#plt.gca().spines['top'].set_visible(False) -# cbaxes2.spines['bottom'].set_visible(False) -# cbaxes2.spines['top'].set_visible(False) - -# F.save_light(path= plot_path, name='B03_gFT_exmpl_x'+str(i)+'_'+track_name) -# F.save_pup(path= plot_path, name='B03_gFT_exmpl_x'+str(i)+'_'+track_name) diff --git a/analyis_publish/PB03_plot_spectra_case_v2.py b/analyis_publish/PB03_plot_spectra_case_v2.py deleted file mode 100644 index 22426c45..00000000 --- a/analyis_publish/PB03_plot_spectra_case_v2.py +++ /dev/null @@ -1,374 +0,0 @@ -# %% -import os, sys -#execfile(os.environ['PYTHONSTARTUP']) - -""" -This file open a ICEsat2 track applied filters and corections and returns smoothed photon heights on a regular grid in an .nc file. -This is python 3 -""" - -exec(open(os.environ['PYTHONSTARTUP']).read()) -exec(open(STARTUP_2021_IceSAT2).read()) - -#%matplotlib inline - -import ICEsat2_SI_tools.convert_GPS_time as cGPS -import h5py -import ICEsat2_SI_tools.io as io -import ICEsat2_SI_tools.spectral_estimates as spec - -import time -import imp -import copy -import spicke_remover -import datetime -import generalized_FT as gFT -from scipy.ndimage.measurements import label - -imp.reload(M_color) -col=M_color.color(path=mconfig['paths']['analysis']+'../config/', name='color_def') - -xr.set_options(display_style='text') -#import s3fs -# %% -track_name, batch_key, test_flag = io.init_from_input(sys.argv) # loads standard experiment -#track_name, batch_key, test_flag = '20190605061807_10380310_004_01', 'SH_batch01', False -#track_name, batch_key, test_flag = '20190601094826_09790312_004_01', 'SH_batch01', False -#track_name, batch_key, test_flag = '20190207111114_06260210_004_01', 'SH_batch02', False -#track_name, batch_key, test_flag = '20190208152826_06440210_004_01', 'SH_batch01', False -#track_name, batch_key, test_flag = '20190213133330_07190212_004_01', 'SH_batch02', False -#track_name, batch_key, test_flag = '20190207002436_06190212_004_01', 'SH_batch02', False -#track_name, batch_key, test_flag = '20190206022433_06050212_004_01', 'SH_batch02', False - - -#track_name, batch_key, test_flag = '20190502050734_05180310_004_01', 'SH_batch02', False - -# local track -#track_name, batch_key, test_flag = '20190219073735_08070210_004_01', 'SH_batch02', False - -track_name, batch_key, test_flag = 'SH_20190219_08070210', 'SH_publish', True - -x_pos= 3 - -#print(track_name, batch_key, test_flag) -hemis, batch = batch_key.split('_') - -load_path = mconfig['paths']['work'] +batch_key +'/B02_spectra/' -load_file = load_path + 'B02_' + track_name #+ '.nc' -#plot_path = mconfig['paths']['plot'] + '/'+hemis+'/'+batch_key+'/' + track_name + '/' -plot_path = mconfig['paths']['plot'] + '/'+hemis+'/'+batch_key+'/publish/' + track_name + '/' -MT.mkdirs_r(plot_path) - -Gk = xr.open_dataset(load_file+'_gFT_k.nc') -Gx = xr.open_dataset(load_file+'_gFT_x.nc') - -Gfft = xr.open_dataset(load_file+'_FFT.nc') - - -# %% -all_beams = mconfig['beams']['all_beams'] -high_beams = mconfig['beams']['high_beams'] -low_beams = mconfig['beams']['low_beams'] -#Gfilt = io.load_pandas_table_dict(track_name + '_B01_regridded', load_path) # rhis is the rar photon data -#Gd = io.load_pandas_table_dict(track_name + '_B01_binned' , load_path) # - -col.colormaps2(21) - -# %% check paths (again) -# G_gFT_wmean = (Gk['gFT_PSD_model'].where( ~np.isnan(Gk['gFT_PSD_model']), 0) * Gk['N_per_stancil']).sum('beam')/ Gk['N_per_stancil'].sum('beam') -# G_gFT_wmean['N_per_stancil'] = Gk['N_per_stancil'].sum('beam') - -# G_fft_wmean = (Gfft.where( ~np.isnan(Gfft), 0) * Gfft['N_per_stancil']).sum('beam')/ Gfft['N_per_stancil'].sum('beam') -# G_fft_wmean['N_per_stancil'] = Gfft['N_per_stancil'].sum('beam') - - -# %% define simple routines -def add_info(D, Dk, ylims): - eta = D.eta + D.x - N_per_stancil, ksize = Dk.N_per_stancil.data , Dk.k.size - plt.text(eta[0].data, ylims[-1], ' N='+numtostr(N_per_stancil) + ' N/2M= '+ fltostr(N_per_stancil/2/ksize, 1) ) - - -# %% Single views - -def plot_data_eta(D, offset = 0 , **kargs ): - eta_1 = D.eta# + D.x - y_data = D.y_model +offset - plt.plot(eta_1,y_data , **kargs) - return eta_1 - -def plot_model_eta(D, ax, offset = 0, **kargs ): - eta = D.eta #+ D.x - y_data = D.y_model+offset - plt.plot(eta ,y_data , **kargs) - - # ax.axvline(eta[0].data, linewidth=0.1, color=kargs['color'], alpha=0.5) - # ax.axvline(eta[-1].data, linewidth=0.1, color=kargs['color'], alpha=0.5) - -if ('y_data' in Gx.sel(beam = 'gt3r').keys()): - print('ydata is ', ('y_data' in Gx.sel(beam = 'gt3r').keys()) ) -else: - print('ydata is ', ('y_data' in Gx.sel(beam = 'gt3r').keys()) ) - MT.json_save('B03_fail', plot_path, {'reason':'no y_data'}) - print('failed, exit') - exit() - - - -# %% - -# derive spectral errors: -Lpoints= Gk.Lpoints.mean('beam').data -N_per_stancil = Gk.N_per_stancil.mean('beam').data#[0:-2] - -G_error_model =dict() -G_error_data =dict() - -for bb in Gk.beam.data: - I = Gk.sel(beam= bb) - b_bat_error = np.concatenate([ I.model_error_k_cos.data , I.model_error_k_sin.data ]) - Z_error = gFT.complex_represenation(b_bat_error, Gk.k.size, Lpoints) - PSD_error_data, PSD_error_model = gFT.Z_to_power_gFT(Z_error, np.diff(Gk.k)[0],N_per_stancil , Lpoints ) - - #np.expand_dims(PSD_error_model, axis =) - G_error_model[bb] = xr.DataArray(data = PSD_error_model, coords = I.drop('N_per_stancil').coords, name='gFT_PSD_model_error' ).expand_dims('beam') - G_error_data[bb] = xr.DataArray(data = PSD_error_data, coords = I.drop('N_per_stancil').coords, name='gFT_PSD_data_error' ).expand_dims('beam') - - -Gk['gFT_PSD_model_err'] = xr.concat(G_error_model.values(), dim='beam') -Gk['gFT_PSD_data_err'] = xr.concat(G_error_data.values(), dim='beam') - - - -# %% -fltostr = MT.float_to_str -numtostr = MT.num_to_str - -font_for_print() - - -#for i in x_pos_sel[::2]: -#i =x_pos_sel[20] -MT.mkdirs_r(plot_path+'B03_spectra/') - -x_pos_sel = np.arange(Gk.x.size)[~np.isnan(Gk.mean('beam').mean('k').gFT_PSD_data.data)] -x_pos_max = Gk.mean('beam').mean('k').gFT_PSD_data[~np.isnan(Gk.mean('beam').mean('k').gFT_PSD_data)].argmax().data -xpp = x_pos_sel[ [int(i) for i in np.round(np.linspace(0, x_pos_sel.size-1, 10))]] -xpp = np.insert(xpp, 0, x_pos_max) - -key = 'gt2r' -#lead_color= col.rels[key] -lead_color= col.rels['group2'] - -x_pos =2 -i =xpp[x_pos] -#for i in xpp: -fn = copy.copy(lstrings) -#i = xpp[0] -font_for_print() -xscale=1e3 -F = M.figure_axis_xy(fig_sizes['two_column_square'][0], fig_sizes['two_column_square'][1], view_scale=0.8, container =True) - -#plt.suptitle('gFT Model and Spectrograms | x='+str(Gk.x[i].data)+' \n' + track_name, y = 0.95) -gs = GridSpec(14,6, wspace=0.6, hspace=1)#figure=fig, - - -col_d = col.__dict__['rels'] -beam_group = ['gt2l', 'gt2r'] -for k, gss in zip(beam_group, [gs[0:3, :], gs[2:5, :]] ): - - - ax0 = F.fig.add_subplot(gss) - - Gx_1 = Gx.isel(x= i).sel(beam = k) - Gk_1 = Gk.isel(x= i).sel(beam = k) - x_sel= Gx_1.x - - plot_model_eta(Gx_1, ax0, offset= 0, linestyle='-', color=col_d[k], linewidth=0.8, alpha=1, zorder=12 ) - ylims= -np.nanstd(Gx_1.y_data)*3, np.nanstd(Gx_1.y_data)*3 - #add_info(Gx_1, Gk_1 , ylims ) - - # oringial data - eta_1= plot_data_eta(Gx_1 , offset= 0.05 , linestyle= '-', c='k',linewidth=1, alpha =0.5, zorder=11) - - # reconstruct in gaps - FT = gFT.generalized_Fourier(Gx_1.eta + Gx_1.x, None,Gk_1.k ) - _ = FT.get_H() - FT.p_hat=np.concatenate([ Gk_1.gFT_cos_coeff, Gk_1.gFT_sin_coeff ]) - plt.plot(Gx_1.eta, FT.model() ,'-', c='orange', linewidth=0.3, alpha=1,zorder= 2) - - if 'l' in k: - ax0.spines['left'].set_visible(True) - ax0.spines['right'].set_visible(False) - ax0.spines['top'].set_linewidth(0.2) - ax0.spines['left'].set_color(col.gray) - - ax0.spines['bottom'].set_visible(False) - ax0.tick_params(labelbottom=False, bottom=False) - ax0.set_title(next(fn) + 'Data and Model', loc= 'left') - elif 'r' in k: - ax0.spines['left'].set_visible(False) - ax0.spines['right'].set_visible(True) - ax0.yaxis.set_ticks_position('right') - ax0.yaxis.set_label_position('right') - ax0.spines['right'].set_color(col.gray) - - ax0.tick_params(axis='both', colors=col.gray) - - ax0.set_facecolor((1.0, 1, 1, 0)) - - ax0.axhline(0.05, linewidth=0.5, color=col.black, alpha=0.5) - ax0.axhline(0, linewidth=0.5, color=col_d[k], alpha=0.4) - - ax0.spines['bottom'].set_linewidth(0.2) - ax0.spines['bottom'].set_visible(False) - #ax0.spines['bottom'].set_position(('data', 0)) - #plt.grid() - dx = eta_1.diff('eta').mean().data - plt.xlim( eta_1[0].data+0 * dx, eta_1[-1].data+ 0 * dx ) - plt.ylabel('relative slope (m/m)') - plt.ylim(-0.12, 0.12) - -#eta_ticks = np.linspace(Gx_1.eta.data[0], Gx_1.eta.data[-1], 11) -eta_ticks_labels, eta_ticks = MT.tick_formatter(np.arange(-12000, 12000+1500, 4*1500)/1e3, interval= 1, rounder=0, expt_flag= False, shift=0) - -ax0.set_xticks(eta_ticks*1e3) -ax0.set_xticklabels(eta_ticks_labels) - -plt.xlim( eta_1[0].data - 40 * dx, eta_1[-1].data+ 40 * dx ) -plt.xlabel('segment distance $\eta$ (km) @ X='+ numtostr(Gx_1.x.data/1e3)+' km') - - -# make spectral error -# Lpoints= Gk.Lpoints.sel(beam = beam_group).mean('beam').data -# N_per_stancil = Gk.N_per_stancil.sel(beam = beam_group).mean('beam').data[0:-2] -# b_bat_error = np.concatenate([Gplot.model_error_k_cos.data, Gplot.model_error_k_sin.data ]) -# Z_error = gFT.complex_represenation(b_bat_error, Gplot.k.size, Lpoints) -# PSD_error_data, PSD_error_model = gFT.Z_to_power_gFT(Z_error,np.diff(Gplot.k)[0],N_per_stancil , Lpoints ) -#PSD_error_data.shape -#Gk['PSD_error_data'] = ( ('x', 'k'), PSD_error_data) - -# define 2nd part of the axis -ax1 = F.fig.add_subplot(gs[7:10, 3:]) -ax2 = F.fig.add_subplot(gs[11:14, 3:]) - - -# spectra -# define threshold -k_thresh = 0.085 -ax1_list = list() -dd_max=list() -err_max = 0 - -for pos, k, lflag in zip([ gs[7:10, 0:3], gs[11:14, 0:3] ], beam_group, [True, False] ): - - ax11 = F.fig.add_subplot(pos) - ax11.tick_params(labelleft=True) - ax1_list.append(ax11) - - Gx_1 = Gx.isel(x= i).sel(beam = k) - Gk_1 = Gk.isel(x= i).sel(beam = k) - Gfft_1 = Gfft.isel(x= i).sel(beam = k) - - - - - dd = Gk_1.gFT_PSD_data#.rolling(k=10, min_periods= 1, center=True).mean() - plt.plot(Gk_1.k, dd, color=col_d[k], linewidth=.5 ,alpha= 0.5, zorder=5 ) - - - #dd = Gk_1.gFT_PSD_data.rolling(k=10, min_periods= 1, center=True).mean() - k_low_limits =Gk_1.gFT_PSD_data.k[::10] - k_low = ( k_low_limits+ k_low_limits.diff('k')[0]/2).data[0:-1] - - dd = Gk_1.gFT_PSD_data.groupby_bins('k' , k_low_limits).mean() - #plt.plot(k_low, dd, color=col_d[k], linewidth=1, zorder=8 , label='GFT Spec') - plt.stairs(dd, k_low_limits, color=col_d[k], linewidth=1, zorder=8 , label='GFT Spec') - #plt.plot(Gk_1.k, dd, color=col.gridcolor, linewidth=2.4, zorder=6 ) - - dd_fft = Gfft_1.power_spec.groupby_bins('k' , k_low_limits).mean()#.rolling(k=10, min_periods= 1, center=True).mean() - #plt.plot(k_low, dd_fft, color=col.gray, linewidth=0.5, zorder=5, label='FFT Spec' ) - plt.stairs(dd_fft, k_low_limits, color=col.gray, linewidth=0.5, zorder=5, label='FFT Spec' ) - - klim= k_low[0], Gk_1.k[-1] - dd_max.append(np.nanmax(dd.data)) - plt.xlim(klim) - plt.title(next(fn) +'Beam ' + k + ' Spectral Estimate', loc='left', y= 1.1) - - if lflag: - #plt.title('Energy Spectra', loc ='left') - pass #ax11.tick_params(labelbottom=False, bottom=True) - else: - plt.xlabel('wavenumber k (2$\pi$ m$^{-1}$)') - - plt.ylabel('10$^{-2}$ $(m/m)^2/k$') - - #plt.ylim(dd.min(), max(dd_max) * 1.1) - # ax11.axvline(k_thresh, linewidth=1, color='gray', alpha=1) - # ax11.axvspan(k_thresh , klim[-1], color='gray', alpha=0.5, zorder=12) - - ax11.spines['left'].set_color(col.gray) - ax11.spines['bottom'].set_color(col.gray) - ax11.tick_params(axis='both', colors=col.gray) - - - x_ticks_labels, x_ticks = MT.tick_formatter(np.arange(0, 0.12, 0.02), interval= 2, rounder=0, expt_flag= False, shift=1) - ax11.set_xticks(x_ticks) - ax11.set_xticklabels(x_ticks_labels) - ax11.set_xlim(Gfft_1.k.min(), x_ticks[-1]) - - #dd_err = Gk_1.gFT_PSD_model_err.rolling(k=10, min_periods= 1, center=True).mean() - dd_err = Gk_1.gFT_PSD_model_err.groupby_bins('k' , k_low_limits).mean() - ax1.fill_between(k_low, dd_err, color=col_d[k], linewidth=1, zorder=8 , alpha = 0.5) - ax1.plot(k_low, dd_err, color=col_d[k], linewidth=1, zorder=8 , label=k) - err_max = dd_err.max() if dd_err.max() > err_max else err_max - - ax2.hist( 2 *(Gx_1.y_data - Gx_1.y_model)/ Gx_1.y_data.std(), bins= 40, color = col_d[k], alpha= 0.5, density=True, stacked=True) - -# from scipy import stats -# x2_data = np.arange(-5, 5, 0.001) -# y2_data = stats.norm.pdf(x2_data, 0, 1) -# ax2.plot(x2_data, y2_data) - - -plt.legend() -#help(plt.hist) -if ~np.isnan(np.nanmax(dd_max)): - for ax in ax1_list: - ax.set_ylim(0, np.nanmax(dd_max) * 1.1) - - y_ticks_labels, y_ticks = MT.tick_formatter(np.arange(0, np.nanmax(dd_max)*1e2, 0.5), interval= 2, rounder=0, expt_flag= False, shift=0) - ax.set_yticks(y_ticks/1e2) - ax.set_yticklabels(y_ticks_labels) - -# plot k-x data - -ax1.spines['left'].set_color(col.gray) -ax1.spines['bottom'].set_color(col.gray) -ax1.tick_params(axis='both', colors=col.gray) -ax1.legend() -ax1.set_xticks(x_ticks) -ax1.set_xticklabels(x_ticks_labels) -ax1.set_xlim(k_low.min(), x_ticks[-1]) -ax1.set_ylim(0, err_max*1.05) -ax1.set_title(next(fn) +'Spectral Error', loc='left', y= 1.1) - - -ax2.spines['left'].set_color(col.gray) -ax2.spines['bottom'].set_color(col.gray) -ax2.tick_params(axis='both', colors=col.gray) - -hist_x_ticks_labels, hist_x_ticks = MT.tick_formatter(np.arange(-3, 3.5, 0.5)* 2, interval= 2, rounder=0, expt_flag= False, shift=1) -ax2.set_xticks(hist_x_ticks) -ax2.set_xticklabels(hist_x_ticks_labels) -ax2.set_xlim(-4, 4) -ax2.set_title(next(fn) +'PDF of Residual $\mathbf{r}$ ', loc='left', y= 1.1) -ax2.set_xlabel('Normalized Error') -ax2.axvline(0, color=col.black, linewidth= 0.5, alpha = 0.5) - - - -F.save_light(path= plot_path, name='B03_gFT_exmpl_x_v2_'+str(i)+'_'+track_name) -F.save_pup(path= plot_path, name='B03_gFT_exmpl_x_v2_'+str(i)+'_'+track_name) - -# %% diff --git a/analyis_publish/PB04_angle.py b/analyis_publish/PB04_angle.py deleted file mode 100644 index dba69ef7..00000000 --- a/analyis_publish/PB04_angle.py +++ /dev/null @@ -1,930 +0,0 @@ - -import os, sys -#execfile(os.environ['PYTHONSTARTUP']) - -""" -This file open a ICEsat2 track applied filters and corections and returns smoothed photon heights on a regular grid in an .nc file. -This is python 3 -""" - -exec(open(os.environ['PYTHONSTARTUP']).read()) -exec(open(STARTUP_2021_IceSAT2).read()) - -#%matplotlib inline - -import ICEsat2_SI_tools.convert_GPS_time as cGPS -import h5py -import ICEsat2_SI_tools.io as io -import ICEsat2_SI_tools.spectral_estimates as spec - -import imp -import copy -import spicke_remover -import datetime -import concurrent.futures as futures - -from numba import jit - -from ICEsat2_SI_tools import angle_optimizer -import ICEsat2_SI_tools.wave_tools as waves -import concurrent.futures as futures - -import time - -from contextlib import contextmanager -col.colormaps2(21) - -@contextmanager -def suppress_stdout(): - with open(os.devnull, "w") as devnull: - old_stdout = sys.stdout - sys.stdout = devnull - try: - yield - finally: - sys.stdout = old_stdout - -col_dict = col.rels -#import s3fs -# %% -track_name, batch_key, test_flag = io.init_from_input(sys.argv) # loads standard experiment -#track_name, batch_key, test_flag = '20190605061807_10380310_004_01', 'SH_batch01', False -#track_name, batch_key, test_flag = '20190601094826_09790312_004_01', 'SH_batch01', False -#track_name, batch_key, test_flag = '20190207111114_06260210_004_01', 'SH_batch02', False -#track_name, batch_key, test_flag = '20190219073735_08070210_004_01', 'SH_batch02', False -#track_name, batch_key, test_flag = '20190215184558_07530210_004_01', 'SH_batch02', False - -# good track -#track_name, batch_key, test_flag = '20190502021224_05160312_004_01', 'SH_batch02', False -#track_name, batch_key, test_flag = '20190502050734_05180310_004_01', 'SH_batch02', False -#track_name, batch_key, test_flag = '20190210143705_06740210_004_01', 'SH_batch02', False - - -#track_name, batch_key, test_flag = '20190502021224_05160312_004_01', 'SH_batch02', False -track_name, batch_key, test_flag = '20190213133330_07190212_004_01', 'SH_batch02', False -#20190213133330_07190212_004_01 - -#print(track_name, batch_key, test_flag) -hemis, batch = batch_key.split('_') -#track_name= '20190605061807_10380310_004_01' -ATlevel= 'ATL03' - -save_path = mconfig['paths']['work'] + '/B04_angle_'+hemis+'/' -save_name = 'B04_'+track_name - -plot_path = mconfig['paths']['plot'] + '/'+hemis+'/'+batch_key+'/vids/' + track_name + '/' -MT.mkdirs_r(plot_path) -MT.mkdirs_r(save_path) -bad_track_path =mconfig['paths']['work'] +'bad_tracks/'+ batch_key+'/' -# %% - -all_beams = mconfig['beams']['all_beams'] -high_beams = mconfig['beams']['high_beams'] -low_beams = mconfig['beams']['low_beams'] -beam_groups = mconfig['beams']['groups'] - -#Gfilt = io.load_pandas_table_dict(track_name + '_B01_regridded', load_path) # rhis is the rar photon data - -load_path = mconfig['paths']['work'] +'/B01_regrid_'+hemis+'/' -G_binned = io.load_pandas_table_dict(track_name + '_B01_binned' , load_path) # - -load_path = mconfig['paths']['work'] +'/B02_spectra_'+hemis+'/' -Gx = xr.load_dataset(load_path+ '/B02_'+track_name + '_gFT_x.nc' ) # -Gk = xr.load_dataset(load_path+ '/B02_'+track_name + '_gFT_k.nc' ) # - - -# %% load prior information -load_path = mconfig['paths']['work'] +'/A02_prior_'+hemis+'/' -#track_name = '20190208104534_06410210_004_01' -try: - Prior = MT.load_pandas_table_dict('/A02b_'+track_name, load_path)['priors_hindcast'] -except: - print('Prior not founds exit') - MT.json_save('B04_fail', plot_path, {'time':time.asctime( time.localtime(time.time()) ) , 'reason': 'Prior not found'}) - print('exit()') - exit() - - -#### Define Prior -# Use partitions -# Prior2 = Prior.loc[['ptp0','ptp1','ptp2','ptp3','ptp4','ptp5']]['mean'] -# dominat_period = Prior2[Prior2.max() ==Prior2] -# aa = Prior.loc[['pdp0','pdp1','pdp2','pdp3','pdp4','pdp5']]['mean'].astype('float') -# dominant_dir = waves.get_ave_amp_angle(aa *0+1,aa )[1] -# dominant_dir_spread = Prior.loc[['pspr0','pspr1','pspr2','pspr3','pspr4','pspr5']]['mean'].median() -# -# prior_sel= {'alpha': ( dominant_dir *np.pi/180 , dominant_dir_spread *np.pi/180) } # to radiens -#prior_sel= {'alpha': ( -60 *np.pi/180 , dominant_dir_spread *np.pi/180) } # to radiens - - -Pperiod = Prior.loc[['ptp0','ptp1','ptp2','ptp3','ptp4','ptp5']]['mean'] -Pdir = Prior.loc[['pdp0','pdp1','pdp2','pdp3','pdp4','pdp5']]['mean'].astype('float') -Pspread = Prior.loc[['pspr0','pspr1','pspr2','pspr3','pspr4','pspr5']]['mean'] - -Pperiod = Pperiod[~np.isnan(list(Pspread))] -Pdir = Pdir[~np.isnan(list(Pspread))] -Pspread = Pspread[~np.isnan(list(Pspread))] - - -# reset dirs: -Pdir[Pdir > 180] = Pdir[Pdir > 180] - 360 -Pdir[Pdir < -180] = Pdir[Pdir < -180] + 360 - -# reorder dirs -dir_best = [0] -for dir in Pdir: - ip = np.argmin([ abs(dir_best[-1] - dir), abs(dir_best[-1] - (dir - 360 )), abs(dir_best[-1] - (dir + 360 )) ] ) - new_dir = np.array([ dir, (dir - 360 ) , (dir + 360 ) ])[ip] - dir_best.append(new_dir) -dir_best = np.array(dir_best[1:]) - -# %% - -Pwavenumber = (2 * np.pi / Pperiod )**2 / 9.81 -kk = Gk.k -dir_interp = np.interp(kk, Pwavenumber[Pwavenumber.argsort()] , dir_best[Pwavenumber.argsort()] ) -dir_interp_smth = M.runningmean(dir_interp, 30, tailcopy= True) -dir_interp_smth[-1] = dir_interp_smth[-2] - -spread_interp = np.interp(kk, Pwavenumber[Pwavenumber.argsort()] , Pspread[Pwavenumber.argsort()].astype('float') ) -spread_smth = M.runningmean(spread_interp, 30, tailcopy= True) -spread_smth[-1] = spread_smth[-2] - - -font_for_pres() - -F = M.figure_axis_xy(5, 4.5, view_scale= 0.5) -plt.subplot(2, 1, 1) -plt.title('Prior angle smoothed\n'+ track_name, loc ='left') - - -plt.plot( Pwavenumber , dir_best, '.r', markersize = 8) -plt.plot( kk , dir_interp, '-', color= 'red', linewidth = 0.8, zorder=11) -plt.plot( kk , dir_interp_smth , color=col.green1) - -plt.fill_between(kk, dir_interp_smth -spread_smth, dir_interp_smth +spread_smth, zorder= 1, color=col.green1, alpha = 0.2 ) -plt.ylabel('Angle (deg)') -#plt.xlabel('wavenumber ($2 \pi/\lambda$)') - -ax2 = plt.subplot(2, 1, 2) -plt.title('Prior angle adjusted ', loc ='left') - -# adjust angle def: -dir_interp_smth[dir_interp_smth> 180] = dir_interp_smth[dir_interp_smth> 180]- 360 -dir_interp_smth[dir_interp_smth< -180] = dir_interp_smth[dir_interp_smth< -180]+ 360 - -plt.fill_between(kk, dir_interp_smth -spread_smth, dir_interp_smth +spread_smth, zorder= 1, color=col.green1, alpha = 0.2 ) -plt.plot( kk , dir_interp_smth , '.', markersize = 1 , color=col.green1) - -ax2.axhline(85, color='gray', linewidth= 2) -ax2.axhline(-85, color='gray', linewidth= 2) - -plt.ylabel('Angle (deg)') -plt.xlabel('wavenumber ($2 \pi/\lambda$)') - -F.save_light(path= plot_path, name = 'B04_prior_angle') - - -# save -dir_interp_smth = xr.DataArray(data=dir_interp_smth * np.pi/180 , dims='k', coords ={'k':kk}, name='Prior_direction') -spread_smth = xr.DataArray(data=spread_smth* np.pi/180 , dims='k', coords ={'k':kk}, name='Prior_spread') -Prior_smth = xr.merge([dir_interp_smth, spread_smth]) - -# %% -prior_angle =Prior_smth.Prior_direction * 180/np.pi -if (abs(prior_angle) > 80).all(): - print('Prior angle is ', prior_angle.min().data, prior_angle.max().data, '. quit.') - dd_save = {'time' : time.asctime( time.localtime(time.time()) ), - 'angle': list([ float(prior_angle.min().data), float(prior_angle.max().data), float(prior_angle.median()) ]) } - MT.json_save('B04_fail', plot_path, dd_save) - print('exit()') - #exit() - -# Use fake -#prior_sel= {'alpha': ( 0.6 , dominant_dir_spread *np.pi/180) } # to radiens - -# Use mean direction -#prior_sel= {'alpha': ( Prior.loc['dp']['mean'] *np.pi/180 , Prior.loc['spr']['mean'] *np.pi/180) } - - -# define paramater range -params_dict = {'alpha': [ -0.85 * np.pi /2, 0.85 * np.pi /2, 5], - 'phase':[ 0 , 2*np.pi , 10]} - -alpha_dx = 0.04 -max_wavenumbers = 25 - -sample_flag = True -optimize_flag = False -brute_flag = False - -plot_flag = True - -Nworkers = 6 -N_sample_chain = 300 -N_sample_chain_burn = 30 - -max_x_pos = 8 -x_pos_jump = 2 - -def make_fake_data(xi,group ): - ki= Gk.k[0:2] - - bins = np.arange(params_dict['alpha'][0], params_dict['alpha'][1]+alpha_dx, alpha_dx) - bins_pos = (bins[0:-1] + np.diff(bins)/2) - marginal_stack = xr.DataArray( np.nan* np.vstack([bins_pos, bins_pos]).T, dims= ('angle', 'k'), coords = {'angle':bins_pos, 'k':ki.data } ) - - group_name = str('group' + group[0].split('gt')[1].split('l')[0]) - marginal_stack.coords['beam_group'] = group_name - marginal_stack.coords['x'] = xi - marginal_stack.name = 'marginals' - marginal_stack.expand_dims(dim = 'x', axis = 2).expand_dims(dim = 'beam_group', axis = 3) - return marginal_stack - -def define_wavenumber_weights_tot_var(dd, m = 3, variance_frac = 0.33, k_upper_lim= None, verbose=False): - - """ - return peaks of a power spectrum dd that in the format such that they can be used as weights for the frequencies based fitting - - inputs: - dd xarray with PSD as data amd coordindate wavenumber k - m running mean half-width in gridpoints - variance_frac (0 to 1) How much variance should be explained by the returned peaks - verbose if true it plots some stuff - - - return: - mask size of dd. where True the data is identified as having significant amplitude - k wanumbers where mask is true - dd_rm smoothed version of dd - positions postions where of significant data in array - """ - - if len(dd.shape) == 2: - dd_use = dd.mean('beam') - - if m is None: - dd_rm = dd_use.data#M.runningmean(dd, m, tailcopy=True) - else: - dd_rm = M.runningmean(dd_use, m, tailcopy=True) - - k = dd_use.k[~np.isnan(dd_rm)].data - dd_rm = dd_rm[~np.isnan(dd_rm)] - - orders = dd_rm.argsort()[::-1] - var_mask = dd_rm[orders].cumsum()/dd_rm.sum() < variance_frac - pos_cumsum = orders[var_mask] - #k_list = k[pos_cumsum] - #dd_list = dd_rm[pos_cumsum] - mask = var_mask[orders.argsort()] - if k_upper_lim is not None: - mask = (k < k_upper_lim) & mask - - if verbose: - - plt.plot(dd.k, dd, '-', color = col_dict[str(amp_data.beam[0].data)], markersize= 20, alpha = 0.6) - plt.plot(k, dd_rm, '-k', markersize= 20) - #print(k_list, dd_list) - plt.plot(k[mask], dd_rm[mask], '.r', markersize= 10, zorder=12) - if k_upper_lim is not None: - plt.gca().axvline(k_upper_lim, color= 'black') - - return mask, k, dd_rm, pos_cumsum - -def define_wavenumber_weights_threshold(dd, m = 3, Nstd= 2, verbose=False): - - if m is None: - dd_rm = dd#M.runningmean(dd, m, tailcopy=True) - else: - dd_rm = M.runningmean(dd, m, tailcopy=True) - - k = dd.k[~np.isnan(dd_rm)] - dd_rm = dd_rm[~np.isnan(dd_rm)] - - treshold = np.nanmean(dd_rm) + np.nanstd(dd_rm) *Nstd - mask = dd_rm > treshold - - - if verbose: - plt.plot(dd.k, dd, '-k', markersize= 20) - plt.plot(k, dd_rm, '-b', markersize= 20) - - k_list = k[mask] - dd_list = dd_rm[mask] - #print(k_list, dd_list) - plt.plot(k_list, dd_list, '.r', markersize= 10, zorder=12) - - return mask, k, dd_rm, np.arange(0, mask.size)[mask] - - - -# isolate x positions with data -data_mask = Gk.gFT_PSD_data.mean('k') -data_mask.coords['beam_group'] = ('beam', ['beam_group'+g[2] for g in data_mask.beam.data]) -data_mask_group = data_mask.groupby('beam_group').mean(skipna=False) -# these stancils are actually used -data_sel_mask = data_mask_group.sum('beam_group') !=0 - -x_list = data_sel_mask.x[data_sel_mask] # iterate over these x posistions -x_list_flag = ~np.isnan(data_mask_group.sel(x = x_list) )# flag that is False if there is no data - -#### limit number of x coordinates - -x_list = x_list[::x_pos_jump] -if len(x_list) > max_x_pos: - x_list = x_list[0:max_x_pos] -x_list_flag= x_list_flag.sel(x =x_list) - -# plot -font_for_print() -F = M.figure_axis_xy(5.5, 3, view_scale= 0.8) -plt.suptitle(track_name) -ax1 = plt.subplot(2, 1, 1) -plt.title('Data in Beam', loc= 'left') -plt.pcolormesh(data_mask.x/1e3, data_mask.beam, data_mask, cmap= plt.cm.OrRd) -for i in np.arange(1.5, 6, 2): - ax1.axhline(i, color= 'black', linewidth =0.5) -plt.xlabel('Distance from Ice Edge') - -ax2 = plt.subplot(2, 1, 2) -plt.title('Data in Group', loc= 'left') -plt.pcolormesh(data_mask.x/1e3, data_mask_group.beam_group, data_mask_group, cmap= plt.cm.OrRd) - -for i in np.arange(0.5, 3, 1): - ax2.axhline(i, color= 'black', linewidth =0.5) - -plt.plot( x_list/1e3, x_list*0 +0, '.', markersize= 2, color= col.cascade1 ) -plt.plot( x_list/1e3, x_list*0 +1, '.', markersize= 2, color= col.cascade1 ) -plt.plot( x_list/1e3, x_list*0 +2, '.', markersize= 2, color= col.cascade1 ) - -plt.xlabel('Distance from Ice Edge') - -F.save_pup(path= plot_path, name = 'B04_data_avail') - - -# %% -Marginals = dict() -L_collect = dict() -marginal_stack = dict() - -group_number = np.arange(len(beam_groups)) -ggg, xxx = np.meshgrid(group_number , x_list.data[2:3] ) - -for gi in zip(ggg.flatten(), xxx.flatten()): - print(gi) - - group, xi = beam_groups[gi[0]], gi[1] - - if bool(x_list_flag.sel(x= xi).isel(beam_group= gi[0]).data) is False: - print('no data, fill with dummy') - ikey = str(xi) +'_' + '_'.join(group) - Marginals[ikey] = make_fake_data(xi, group) - #print(Marginals[ikey].angle.data[::20]) - continue - - GGx = Gx.sel(beam= group).sel(x = xi) - GGk = Gk.sel(beam= group).sel(x = xi) - - ### define data - # normalize data - key = 'y_data' - amp_Z = (GGx[key] - GGx[key].mean(['eta'])) /GGx[key].std(['eta']) - N = amp_Z.shape[0] - - # define x,y positions - eta_2d = GGx.eta + GGx.x_coord - GGx.x_coord.mean() - nu_2d = GGx.eta * 0 + GGx.y_coord - GGx.y_coord.mean() - - # repack as np arrays - x_concat = eta_2d.data.T.flatten() - y_concat = nu_2d.data.T.flatten() - z_concat = amp_Z.data.flatten() - - x_concat= x_concat[~np.isnan(z_concat)] - y_concat= y_concat[~np.isnan(z_concat)] - z_concat= z_concat[~np.isnan(z_concat)] - N_data = x_concat.size - - if np.isnan(z_concat).sum() != 0: - raise ValueError('There are still nans') - - mean_dist = (nu_2d.isel(beam= 0) - nu_2d.isel(beam= 1)).mean().data - k_upper_lim = 2 *np.pi / ( mean_dist *1 ) - - print('k_upper_lim ', k_upper_lim) - # threshold method - #mask, k, weights, positions = define_wavenumber_weights_threshold( Gi.mean('dist_y')['gFT_PSD_data'], 3 , verbose= True) - #plt.plot(k[mask], weights[mask], 'g*', markersize=20) - # plt.show() - - #variance method - amp_data = np.sqrt(GGk.gFT_cos_coeff**2 + GGk.gFT_sin_coeff**2) - mask, k, weights, positions = define_wavenumber_weights_tot_var(amp_data, m= 1, k_upper_lim= k_upper_lim, variance_frac = 0.20 , verbose= False) - #plt.xlim( k[mask].min()*0.8 ,max(k_upper_lim, k[mask].max()*1.2) ) - #plt.xlim( k[mask].min()*0.8 ,k[mask].max()*1.4 ) - #plt.show() - - if (len(k[mask]) ==0): - print('no good k found, fill with dummy') - ikey = str(xi) +'_' + '_'.join(group) - Marginals[ikey] = make_fake_data(xi, group) - continue - - - k_list, weight_list = k[mask], weights[mask] - print('# of wavenumber: ' , len(k_list)) - - #### prepare loop - #imp.reload(angle_optimizer) - - SM = angle_optimizer.sample_with_mcmc(params_dict) - SM.set_objective_func(angle_optimizer.objective_func) - - SM.fitting_args = fitting_args = (x_concat, y_concat, z_concat) - - - # test: - k_prime_max= 0.02 #[mask][0] # chose a test wavenumber - amp_Z= 1 - prior_sel= {'alpha': ( Prior_smth.sel(k =k_prime_max, method='nearest').Prior_direction.data, - Prior_smth.sel(k =k_prime_max, method='nearest').Prior_spread.data) } - SM.fitting_kargs = fitting_kargs = {'prior': prior_sel , 'prior_weight' : 3 } - # test if it works - SM.params.add('K_prime', k_prime_max , vary=False , min=k_prime_max*0.5, max=k_prime_max*1.5) - SM.params.add('K_amp', amp_Z , vary=False , min=amp_Z*.0 , max=amp_Z*5) - try: - SM.test_objective_func() - except: - raise ValueError('Objective function test fails') - - #k_prime_max, Z_max = k_list[-1], weight_list[-1] - pk= 0 - for k_prime_max, Z_max in zip(k_list, weight_list): - - brute_flag = True - prior_sel= {'alpha': ( Prior_smth.sel(k =k_prime_max, method='nearest').Prior_direction.data, - Prior_smth.sel(k =k_prime_max, method='nearest').Prior_spread.data) } - - SM = angle_optimizer.sample_with_mcmc(params_dict) - SM.set_objective_func(angle_optimizer.objective_func) - - SM.fitting_args = fitting_args = (x_concat, y_concat, z_concat) - #print(prior_sel) - SM.fitting_kargs = fitting_kargs = {'prior': prior_sel , 'prior_weight' : 3 } - #SM.fitting_kargs = fitting_kargs = {'prior': None , 'prior_weight' : 1 } - - amp_Z = 1##z_concat.var()#Z_max#0.5 #amp_enhancement * abs(Z_max)**2 /N - - SM.params.add('K_prime', k_prime_max , vary=False , min=k_prime_max*0.5, max=k_prime_max*1.5) - SM.params.add('K_amp' , amp_Z , vary=False , min=amp_Z*.0 , max=amp_Z*5) - #print(SM.params.pretty_print()) - - with suppress_stdout(): - SM.sample(verbose= False, steps=N_sample_chain,progress= False, workers= None) - SM.optimize(verbose= False) - SM.brute(verbose= False) - - y_hist, bins, bins_pos = SM.get_marginal_dist('alpha', alpha_dx, burn = N_sample_chain_burn, plot_flag= False) - fitter = SM.fitter # MCMC results - z_model = SM.objective_func(fitter.params, *fitting_args , test_flag= True) - #cost = (fitter.residual**2).sum()/(z_concat**2).sum() - #cost_list.append( (fitter.residual**2).sum()/(z_concat**2).sum() ) - marginal_stack_i = xr.DataArray( y_hist, dims= ('angle'), coords = {'angle':bins_pos } ) - marginal_stack_i.coords['k'] = np.array(k_prime_max) #( ('k'), np.array(k_prime_max) ) - marginal_stack[k_prime_max] = marginal_stack_i - #marginal_stack_i.coords['weight'] = Z_max - - # no prior: - SM_nop = angle_optimizer.sample_with_mcmc(params_dict) - SM_nop.set_objective_func(angle_optimizer.objective_func) - - SM_nop.fitting_args = fitting_args = (x_concat, y_concat, z_concat) - - brute_flag = True - #print(prior_sel) - SM_nop.fitting_kargs = {'prior': None , 'prior_weight' : 0 } - #SM.fitting_kargs = fitting_kargs = {'prior': None , 'prior_weight' : 1 } - amp_Z = 1##z_concat.var()#Z_max#0.5 #amp_enhancement * abs(Z_max)**2 /N - - SM_nop.params.add('K_prime', k_prime_max , vary=False , min=k_prime_max*0.5, max=k_prime_max*1.5) - SM_nop.params.add('K_amp' , amp_Z , vary=False , min=amp_Z*.0 , max=amp_Z*5) - #print(SM.params.pretty_print()) - try: - SM_nop.test_objective_func() - except: - raise ValueError('Objective function test fails') - - with suppress_stdout(): - SM_nop.sample(verbose= False, steps=N_sample_chain,progress= False, workers= None) - SM_nop.optimize(verbose= False) - SM_nop.brute(verbose= False) - - y_hist_nop, bins_nop, bins_pos_nop = SM_nop.get_marginal_dist('alpha', alpha_dx, burn = N_sample_chain_burn, plot_flag= False) - fitter_nop = SM_nop.fitter # MCMC results - z_model_nop = SM.objective_func(fitter_nop.params, *fitting_args , test_flag= True) - - - z_model, fargs , key = z_model, fitting_args, 'y_data_normed' - brute = brute_flag - optimze= optimize_flag - sample= sample_flag - - view_scale = 0.6 - - # def plot_instance(z_model, fargs , key, SM, non_dim=False, title_str= None , brute=False, optimze= False, sample= False, view_scale = 0.3): - - x_concat, y_concat, z_concat = fargs - F = M.figure_axis_xy(fig_sizes['one_column_high'][0]*1.2,fig_sizes['one_column_high'][1], view_scale = view_scale, container = True) - title_str = '$\lambda=$'+ str(np.round(2 * np.pi/k_prime_max, 1)) + 'm' - plt.suptitle(title_str, y = 0.92) - gs = GridSpec(12, 5, wspace=0.3, hspace=5.5)#figure=fig, - F.gs = gs - # y_offset= 0.5 - # plt.plot(Gm.eta, Gm.y_model_normed+y_offset * Gm.dist_y/np.diff(Gm.dist_y), **model_plot_karg) - # plt.plot(Gm.eta, Gm.y_model_normed+y_offset * Gm.dist_y/np.diff(Gm.dist_y), **data_plot_karg) - # plt.xlim(-1000, 1000) - - beam_list = list(set(y_concat)) - - F.ax1 = F.fig.add_subplot(gs[0:2, :]) - y_pos, bcol = beam_list[0], col.rels['gt2l'] - #plt.title( str(y_pos) ) - plt.plot(x_concat[y_concat == y_pos]/1e3, z_concat[y_concat == y_pos] , c=bcol, linewidth = 1) - plt.plot(x_concat[y_concat == y_pos]/1e3, z_model[y_concat == y_pos] , '-', c='black', linewidth= 0.6) - #plt.xlim(x_concat[y_concat == y_pos][0]/1e3, x_concat[y_concat == y_pos][-1]) - #plt.xlim(-1000/1e3, 1000/1e3) - plt.ylabel('Slope (m/m)') - - F.ax2 = F.fig.add_subplot(gs[1:3, :]) - y_pos, bcol = beam_list[1], col.rels['gt2r'] - #plt.title( str(y_pos) ) - plt.plot(x_concat[y_concat == y_pos]/1e3, z_concat[y_concat == y_pos] , c=bcol, linewidth = 1) - plt.plot(x_concat[y_concat == y_pos]/1e3, z_model[y_concat == y_pos] , '-', c='black', linewidth= 0.6) - #plt.xlim(x_concat[y_concat == y_pos][0]/1e3, x_concat[y_concat == y_pos][-1]) - plt.ylabel('Slope (m/m)') - - - F.ax1.spines['left'].set_position(('outward', 5)) - F.ax2.spines['right'].set_position(('outward', 5)) - - y_ticks = MT.tick_formatter( np.arange(-10, 12, 1), interval=2, rounder=1, expt_flag=False, shift=0 ) - F.ax1.set_yticks(y_ticks[1]) - F.ax1.set_yticklabels(y_ticks[0]) - F.ax2.set_yticks(y_ticks[1]) - F.ax2.set_yticklabels(y_ticks[0]) - F.ax1.set_ylim(-3, 3) - F.ax2.set_ylim(-3, 3) - - F.ax1.tick_params(bottom=False, labelbottom= False) - F.ax1.spines['bottom'].set_visible(False) - #F.ax2.xaxis.set_ticks_position('top') - #F.ax2.xaxis.set_label_position('top') - F.ax1.spines['bottom'].set_linewidth(0.5) - F.ax1.spines['left'].set_linewidth(0.5) - #F.ax1.xaxis.set_ticks_position('bottom') - #F.ax1.xaxis.set_label_position('bottom') - - - F.ax2.tick_params(left= False, labelleft=False, right= True, labelright= True) - F.ax2.spines['right'].set_visible(True) - F.ax2.spines['left'].set_visible(False) - F.ax2.spines['bottom'].set_visible(False) - F.ax2.set_facecolor((1.0, 1.00, 1.00, 0)) - - F.ax1.axhline(0, color='gray', linewidth = 0.5, alpha = 1) - F.ax2.axhline(0, color='gray', linewidth = 0.5, alpha = 1) - - x_ticks = MT.tick_formatter( np.arange(-10, 10, 0.5), interval=2, rounder=1, expt_flag=False, shift=0 ) - F.ax1.set_xticks(x_ticks[1]) - F.ax1.set_xticklabels(x_ticks[0]) - F.ax1.set_xlim(-2, 2) - - F.ax2.set_xticks(x_ticks[1]) - F.ax2.set_xticklabels(x_ticks[0]) - F.ax2.set_xlim(-1.5, 1.5) - plt.xlabel('km') - - - # no prior - F.ax3 = F.fig.add_subplot(gs[4:7, 0:-1]) - F.ax3.tick_params(bottom= False, labelbottom=False) - F.ax3.spines['bottom'].set_visible(False) - if brute is True: - plt.title('no Prior', loc='left') - SM_nop.plot_brute() - # plt.colorbar(orientation='horizontal') - plt.plot(SM_nop.fitter_brute.brute_x0[1], SM_nop.fitter_brute.brute_x0[0], '.', color ='red', markersize=5, label= 'best fit', zorder=10) - if optimze is True: - SM_nop.plot_optimze(color= 'r', markersize=10, zorder=12, label= 'Dual Annealing') - - if sample is True: - SM_nop.plot_sample(markersize= 2, linewidth= 0.8, alpha= 0.2, color= 'black', zorder=8) - - F.ax4 = F.fig.add_subplot(gs[4:7, -1]) - F.ax4.tick_params(left= False, labelleft=False,labelbottom= False) - #return F - - # if (fitting_kargs['prior'] is not None): - # F.ax3.axhline(prior_sel['alpha'][0], color='green', linewidth = 2, label ='Prior') - # #F.ax3.axhspan(prior_sel['alpha'][0]- prior_sel['alpha'][1], prior_sel['alpha'][0]+ prior_sel['alpha'][1], color='gray', alpha=0.3) - # F.ax3.axhline(prior_sel['alpha'][0]- prior_sel['alpha'][1], color='green', linewidth = 0.7) - # F.ax3.axhline(prior_sel['alpha'][0]+ prior_sel['alpha'][1], color='green', linewidth = 0.7) - - F.ax3.axhline(fitter.params['alpha'].min, color='black', linewidth = 0.6, alpha = 1) - F.ax3.axhline(fitter.params['alpha'].max, color='black', linewidth = 0.6, alpha = 1) - - plt.sca(F.ax3) - #plt.legend(loc= 1) - plt.xlabel('') - plt.ylabel('Angle of Incidence') - plt.xlim(0, np.pi*2) - - plt.sca(F.ax4) - plt.title('Marginal', loc= 'left') - #plt.xlabel('Density') - plt.stairs(y_hist_nop, bins_nop, orientation='horizontal', color='k') - - # deg_ticks=np.arange(-180, 360+60, 60) - # deg_tick_label=[str(l)+'$^\circ$' for l in deg_ticks[:]] - # deg_ticks=deg_ticks * np.pi/180 - - #y_ticks = MT.tick_formatter( np.arange(, 12, 1), interval=2, rounder=1, expt_flag=False, shift=0 ) - F.ax3.set_yticks([-np.pi/2, 0, np.pi/2]) - F.ax3.set_yticklabels(['$-\pi$' ,'0', '$\pi$']) - - F.ax3.set_xticks([0, np.pi/2, np.pi, np.pi* 1.5, 2 *np.pi]) - F.ax3.set_xticklabels(['0', '', '$\pi$', '', '$2\pi$']) - - F.ax4.axhline(fitter.params['alpha'].min, color='black', linewidth = 0.6, alpha = 1) - F.ax4.axhline(fitter.params['alpha'].max, color='black', linewidth = 0.6, alpha = 1) - - # F.ax3.set_ylim(min( -np.pi /2, prior_sel['alpha'][0]- 0.2 ) , max(np.pi /2, prior_sel['alpha'][0] + 0.2 ) ) - # F.ax4.set_ylim(min( -np.pi /2, prior_sel['alpha'][0]- 0.2 ) , max(np.pi /2, prior_sel['alpha'][0]+ 0.2 ) ) - - plt.xlim(0, 20) - F.ax3.set_ylim(-np.pi/2, np.pi/2) - F.ax3.set_xlim(0, 2 *np.pi) - F.ax4.set_ylim(-np.pi/2, np.pi/2) - - # with prior - F.ax5 = F.fig.add_subplot(gs[7:, 0:-1]) - if brute is True: - plt.title('with Prior', loc='left') - SM.plot_brute(marker= '.', color ='blue', markersize=15, label= 'Brute', zorder=10) - plt.plot(SM.fitter_brute.brute_x0[1], SM.fitter_brute.brute_x0[0], '.', color ='red', markersize=5, zorder=10, label='best fit') - - # plt.colorbar(orientation='horizontal') - if optimze is True: - SM.plot_optimze(color= 'r', markersize=10, zorder=12, label= 'Dual Annealing') - - if sample is True: - SM.plot_sample(markersize= 2, linewidth= 0.8, alpha= 0.2, color= 'black', zorder=8) - - - if (fitting_kargs['prior'] is not None): - F.ax5.axhline(prior_sel['alpha'][0], color='orange', linewidth = 2, label ='Prior') - F.ax5.axhspan(prior_sel['alpha'][0]- prior_sel['alpha'][1], prior_sel['alpha'][0]+ prior_sel['alpha'][1], color='orange', alpha=0.2) - F.ax5.axhline(prior_sel['alpha'][0]- prior_sel['alpha'][1], color='orange', linewidth = 0.7) - F.ax5.axhline(prior_sel['alpha'][0]+ prior_sel['alpha'][1], color='orange', linewidth = 0.7) - - F.ax5.axhline(fitter.params['alpha'].min, color='black', linewidth = 0.6, alpha = 1) - F.ax5.axhline(fitter.params['alpha'].max, color='black', linewidth = 0.6, alpha = 1) - - # F.ax5.set_yticks(deg_ticks) - # F.ax5.set_yticklabels(deg_tick_label) - - F.ax5.set_yticks([-np.pi/2, 0, np.pi/2]) - F.ax5.set_yticklabels(['$-\pi$' ,'0', '$\pi$']) - F.ax5.set_xticks([0, np.pi/2, np.pi, np.pi* 1.5, 2 *np.pi]) - F.ax5.set_xticklabels(['0', '', '$\pi$', '', '$2\pi$']) - - plt.sca(F.ax5) - plt.legend(loc= 1) - plt.xlabel('Wave Phase') - plt.ylabel('Incident Angle') - plt.xlim(0, np.pi*2) - - F.ax51 = F.fig.add_subplot(gs[7:, -1]) - F.ax51.tick_params(left= False, labelleft=False)#, right= True, labelright= True) - - plt.title('Marginal', loc= 'left') - plt.xlabel('Density') - plt.stairs(y_hist, bins, orientation='horizontal', color=col.rels['group'+str(gi[0]+1)]) - - F.ax51.axhline(fitter.params['alpha'].min, color='black', linewidth = 0.6, alpha = 1) - F.ax51.axhline(fitter.params['alpha'].max, color='black', linewidth = 0.6, alpha = 1) - - F.ax5.set_ylim(-np.pi/1.1, np.pi/1.1) - F.ax51.set_ylim(-np.pi/1.1, np.pi/1.1) - - # F.ax5.set_ylim(min( -np.pi /2, prior_sel['alpha'][0]- 0.2 ) , max(np.pi /2, prior_sel['alpha'][0] + 0.2 ) ) - # F.ax51.set_ylim(min( -np.pi /2, prior_sel['alpha'][0]- 0.2 ) , max(np.pi /2, prior_sel['alpha'][0]+ 0.2 ) ) - plt.xlim(0, 20) - - marginal_stack_xr = xr.concat(marginal_stack.values(), dim='k' ).sortby('k') - #marginal_stack.mean('k').plot() - # F.ax52 = F.fig.add_subplot(gs[7:, -1]) - # F.ax52.tick_params(left= False, labelleft=False)#, right= True, labelright= True) - # - # plt.title('Sum', loc= 'left') - # plt.xlabel('Density') - # - # marginal_mean = (marginal_stack_xr * marginal_stack_xr.weight).sum('k') / marginal_stack_xr.weight.sum() - # - # plt.stairs(marginal_mean,bins, orientation='horizontal', color=col.rels['group'+str(gi[0]+1)], linewidth= 1.5) - # - # F.ax52.axhline(fitter.params['alpha'].min, color='black', linewidth = 0.6, alpha = 1) - # F.ax52.axhline(fitter.params['alpha'].max, color='black', linewidth = 0.6, alpha = 1) - # - # #F.ax52.set_ylim(min( -np.pi /2, prior_sel['alpha'][0]- 0.2 ) , max(np.pi /2, prior_sel['alpha'][0]+ 0.2 ) ) - # - # F.ax52.set_ylim(-np.pi/1.2, np.pi/1.2) - # plt.xlim(0, 5) - plt.show() - F.save_light(path= plot_path, name = 'MCMC_fit_' + group[0]+'_'+group[1]+'_'+str(int(xi)) +'_'+ str(pk).zfill(3) ) - pk+=1 -# -# -# # %% -# -# # %% -# # # A= dict() -# # # for k_pair in zip(k_list, weight_list): -# # # kk, I = get_instance(k_pair) -# # # A[kk] = I -# # -# # with futures.ProcessPoolExecutor(max_workers=Nworkers) as executor: -# # A = dict( executor.map(get_instance, zip(k_list, weight_list) )) -# # -# # cost_stack = dict() -# # marginal_stack =dict() -# # #fitting_kargs = {'size' :1} -# # L_sample = pd.DataFrame(index=['alpha', 'group_phase', 'K_prime', 'K_amp'] ) -# # L_optimize = pd.DataFrame(index=['alpha', 'group_phase', 'K_prime', 'K_amp'] ) -# # L_brute = pd.DataFrame(index=['alpha', 'group_phase', 'K_prime', 'K_amp'] ) -# # -# # for kk,I in A.items(): -# # L_sample[kk] = I['L_sample_i'] -# # L_optimize[kk] = I['L_optimize_i'] -# # L_brute[kk] = I['L_brute_i'] -# # -# # marginal_stack[kk] = I['marginal_stack_i'] -# # cost_stack[kk] = I['cost'] -# # -# # # ## add beam_group dimension -# # marginal_stack = xr.concat(marginal_stack.values(), dim='k' ).sortby('k') -# # L_sample = L_sample.T.sort_values('K_prime') -# # L_optimize = L_optimize.T.sort_values('K_prime') -# # L_brute = L_brute.T.sort_values('K_prime') -# # -# # #print(marginal_stack.angle.data[::20]) -# # -# # print('done with ', group, xi/1e3) -# # -# # # % collect -# # ikey = str(xi) +'_' + '_'.join(group) -# # -# # #marginal_stack.coords['cost'] = (('k'), np.expand_dims(np.expand_dims(list(cost_stack.values()), 1), 2) ) -# # marginal_stack.name = 'marginals' -# # marginal_stack = marginal_stack.to_dataset() -# # marginal_stack['cost'] = (('k'), list(cost_stack.values()) ) -# # marginal_stack['weight'] = (('k'), weight_list ) -# # -# # group_name = str('group' + group[0].split('gt')[1].split('l')[0]) -# # marginal_stack.coords['beam_group'] = group_name -# # marginal_stack.coords['x'] = xi -# # -# # Marginals[ikey] = marginal_stack.expand_dims(dim = 'x', axis = 0).expand_dims(dim = 'beam_group', axis = 1) -# # Marginals[ikey].coords['N_data'] = ( ('x', 'beam_group'), np.expand_dims(np.expand_dims(N_data, 0), 1) ) -# # # L_brute -# # # L_optimize -# # -# # L_sample['cost'] = cost_stack -# # L_sample['weight'] = weight_list -# # L_collect[group_name, str(int(xi))] = L_sample#pd.concat(L_collect_per_xi) -# # -# -# # %% -# #list(Marginals.values())[0] -# MM = xr.merge( Marginals.values()) -# MM =xr.merge([ MM, Prior_smth]) -# #MM.to_netcdf(save_path + save_name + '_marginals.nc') -# -# LL = pd.concat(L_collect) -# #MT.save_pandas_table({'L_sample':LL} ,save_name+ '_res_table', save_path) -# -# # %% plot -# font_for_print() -# F = M.figure_axis_xy(6, 5.5, view_scale= 0.7, container = True) -# -# gs = GridSpec(4,6, wspace=0.2, hspace=.8)#figure=fig, -# -# ax0 = F.fig.add_subplot(gs[0:2, -1]) -# ax0.tick_params(labelleft=False) -# -# #klims = k_list.min()*0.2 , k_list.max()*1.2 -# -# klims = 0, LL['K_prime'].max()*1.2 -# -# -# for g in MM.beam_group: -# MMi = MM.sel(beam_group= g) -# plt.plot( MMi.weight.T,MMi.k, '.', color= col_dict[str(g.data)], markersize= 3, linewidth = 0.8) -# -# plt.xlabel('Power') -# plt.ylim(klims) -# -# ax1 = F.fig.add_subplot(gs[0:2 , 0:-1]) -# -# for g in MM.beam_group: -# Li = LL.loc[str(g.data)] -# -# angle_list = np.array(Li['alpha']) * 180 /np.pi -# kk_list = np.array(Li['K_prime']) -# weight_list_i = np.array(Li['weight']) -# -# plt.scatter( angle_list, kk_list, s= (weight_list_i*8e1)**2 , c=col_dict[str(g.data)], label ='mode ' + str(g.data) ) -# -# -# # lflag= 'paritions ww3' -# # for i in np.arange(6): -# # i_dir, i_period = Prior.loc['pdp'+ str(i)]['mean'], Prior.loc['ptp'+ str(i)]['mean'] -# # i_k = (2 * np.pi/ i_period)**2 / 9.81 -# # i_dir = [i_dir -360 if i_dir > 180 else i_dir][0] -# # i_dir = [i_dir +360 if i_dir < -180 else i_dir][0] -# # -# # plt.plot(i_dir, i_k, '.', markersize = 6, color= col.red, label= lflag) -# # plt.plot(i_dir, i_k, '-', linewidth = 0.8, color= col.red) -# # -# # lflag = None -# -# dir_best[dir_best> 180] = dir_best[dir_best> 180] -360 -# plt.plot(dir_best, Pwavenumber , '.r', markersize = 6) -# -# dir_interp[dir_interp> 180] = dir_interp[dir_interp> 180] -360 -# plt.plot(dir_interp, Gk.k, '-', color= 'red', linewidth = 0.3, zorder=11) -# -# -# #ax1.axvline( best_guess * 180/ np.pi , color=col.blue, linewidth = 1.5, label ='best guess fitting') -# -# # ax1.axvline( (prior_sel['alpha'][0]) * 180 /np.pi, color='k', linewidth = 1.5, label ='prior') -# # ax1.axvline( (prior_sel['alpha'][0]- prior_sel['alpha'][1]) * 180 /np.pi, color='k', linewidth = 0.7, label ='prior uncertrainty') -# # ax1.axvline( (prior_sel['alpha'][0]+ prior_sel['alpha'][1]) * 180 /np.pi , color='k', linewidth = 0.7) -# -# plt.fill_betweenx(Gk.k, (dir_interp_smth -spread_smth)* 180 /np.pi, (dir_interp_smth +spread_smth)* 180 /np.pi, zorder= 1, color=col.green1, alpha = 0.2 ) -# plt.plot(dir_interp_smth * 180 /np.pi, Gk.k , '.', markersize = 1 , color=col.green1) -# -# ax1.axvline(85, color='gray', linewidth= 2) -# ax1.axvline(-85, color='gray', linewidth= 2) -# -# -# plt.legend() -# plt.ylabel('wavenumber (deg)') -# plt.xlabel('Angle (deg)') -# -# #plt.xlim(- 170, 170) -# #plt.xlim(- 90, 90) -# plt.ylim(klims) -# -# prior_angle_str =str(np.round( (prior_sel['alpha'][0]) * 180 /np.pi)) -# plt.title(track_name + '\nprior=' + prior_angle_str + 'deg', loc= 'left' ) -# -# plt.xlim( min( [ -90, np.nanmin(dir_best)] ), max( [np.nanmax(dir_best), 90]) ) -# -# -# ax3 = F.fig.add_subplot(gs[2 , 0:-1]) -# -# for g in MM.beam_group: -# MMi = MM.sel(beam_group= g) -# wegihted_margins = ( (MMi.marginals * MMi.weight).sum(['x','k'] )/MMi.weight.sum(['x', 'k']) ) -# plt.plot( MMi.angle * 180/ np.pi, wegihted_margins , '.', color= col_dict[str(g.data)], markersize= 2, linewidth = 0.8) -# -# plt.ylabel('Density') -# plt.title('weight margins', loc='left') -# -# #plt.plot(marginal_stack.angle * 180 /np.pi, marginal_stack.T , c=col.gray, label ='weighted mean BF') -# -# #plt.plot(cost_wmean.angle * 180 /np.pi, cost_wmean , c=col.rascade3, label ='weighted mean BF') -# plt.xlim(- 90, 90) -# #plt.xlim(- 125, 125) -# -# ax3 = F.fig.add_subplot(gs[-1 , 0:-1]) -# -# for g in MM.beam_group: -# MMi = MM.sel(beam_group= g) -# wegihted_margins = MMi.marginals.mean(['x','k'] )# ( (MMi.marginals * MMi.weight).sum(['x','k'] )/MMi.weight.sum(['x', 'k']) ) -# plt.plot( MMi.angle * 180/ np.pi, wegihted_margins , '.', color= col_dict[str(g.data)], markersize= 2, linewidth = 0.8) -# -# plt.ylabel('Density') -# plt.xlabel('Angle (deg)') -# plt.title('unweighted margins', loc='left') -# -# #plt.plot(marginal_stack.angle * 180 /np.pi, marginal_stack.T , c=col.gray, label ='weighted mean BF') -# -# #plt.plot(cost_wmean.angle * 180 /np.pi, cost_wmean , c=col.rascade3, label ='weighted mean BF') -# plt.xlim(- 90, 90) -# #plt.xlim(- 125, 125) -# -# #F.save_pup(path= plot_path, name = 'B04_marginal_distributions') -# -# #MT.json_save('B04_success', plot_path, {'time':'time.asctime( time.localtime(time.time()) )'}) diff --git a/analyis_publish/PB04_angle_paper.py b/analyis_publish/PB04_angle_paper.py deleted file mode 100644 index 5f24acb5..00000000 --- a/analyis_publish/PB04_angle_paper.py +++ /dev/null @@ -1,964 +0,0 @@ - -import os, sys -#execfile(os.environ['PYTHONSTARTUP']) - -""" -This file open a ICEsat2 track applied filters and corections and returns smoothed photon heights on a regular grid in an .nc file. -This is python 3 -""" - -exec(open(os.environ['PYTHONSTARTUP']).read()) -exec(open(STARTUP_2021_IceSAT2).read()) - -#%matplotlib inline - -import ICEsat2_SI_tools.convert_GPS_time as cGPS -import h5py -import ICEsat2_SI_tools.io as io -import ICEsat2_SI_tools.spectral_estimates as spec - -import imp -import copy -import spicke_remover -import datetime -import concurrent.futures as futures - -from numba import jit - -from ICEsat2_SI_tools import angle_optimizer -import ICEsat2_SI_tools.wave_tools as waves -import concurrent.futures as futures - -import time - -from contextlib import contextmanager -col.colormaps2(21) - -@contextmanager -def suppress_stdout(): - with open(os.devnull, "w") as devnull: - old_stdout = sys.stdout - sys.stdout = devnull - try: - yield - finally: - sys.stdout = old_stdout - -col_dict = col.rels -#import s3fs -# %% -track_name, batch_key, test_flag = io.init_from_input(sys.argv) # loads standard experiment -#track_name, batch_key, test_flag = '20190605061807_10380310_004_01', 'SH_batch01', False -#track_name, batch_key, test_flag = '20190601094826_09790312_004_01', 'SH_batch01', False -#track_name, batch_key, test_flag = '20190207111114_06260210_004_01', 'SH_batch02', False -#track_name, batch_key, test_flag = '20190219073735_08070210_004_01', 'SH_batch02', False -#track_name, batch_key, test_flag = '20190215184558_07530210_004_01', 'SH_batch02', False - -# good track -#track_name, batch_key, test_flag = '20190502021224_05160312_004_01', 'SH_batch02', False -#track_name, batch_key, test_flag = '20190502050734_05180310_004_01', 'SH_batch02', False -#track_name, batch_key, test_flag = '20190210143705_06740210_004_01', 'SH_batch02', False - - -#track_name, batch_key, test_flag = '20190502021224_05160312_004_01', 'SH_batch02', False -track_name, batch_key, test_flag = '20190213133330_07190212_004_01', 'SH_batch02', False -#20190213133330_07190212_004_01 - -#print(track_name, batch_key, test_flag) -hemis, batch = batch_key.split('_') -#track_name= '20190605061807_10380310_004_01' -ATlevel= 'ATL03' - -save_path = mconfig['paths']['work'] + '/B04_angle_'+hemis+'/' -save_name = 'B04_'+track_name - -plot_path = mconfig['paths']['plot'] + '/'+hemis+'/'+batch_key+'/publish/' + track_name + '/' -MT.mkdirs_r(plot_path) -MT.mkdirs_r(save_path) -bad_track_path =mconfig['paths']['work'] +'bad_tracks/'+ batch_key+'/' -# %% - -all_beams = mconfig['beams']['all_beams'] -high_beams = mconfig['beams']['high_beams'] -low_beams = mconfig['beams']['low_beams'] -beam_groups = mconfig['beams']['groups'] - -#Gfilt = io.load_pandas_table_dict(track_name + '_B01_regridded', load_path) # rhis is the rar photon data - -load_path = mconfig['paths']['work'] +'/B01_regrid_'+hemis+'/' -G_binned = io.load_pandas_table_dict(track_name + '_B01_binned' , load_path) # - -load_path = mconfig['paths']['work'] +'/B02_spectra_'+hemis+'/' -Gx = xr.load_dataset(load_path+ '/B02_'+track_name + '_gFT_x.nc' ) # -Gk = xr.load_dataset(load_path+ '/B02_'+track_name + '_gFT_k.nc' ) # - - -# %% load prior information -load_path = mconfig['paths']['work'] +'/A02_prior_'+hemis+'/' -#track_name = '20190208104534_06410210_004_01' -try: - Prior = MT.load_pandas_table_dict('/A02b_'+track_name, load_path)['priors_hindcast'] -except: - print('Prior not founds exit') - MT.json_save('B04_fail', plot_path, {'time':time.asctime( time.localtime(time.time()) ) , 'reason': 'Prior not found'}) - print('exit()') - exit() - - -#### Define Prior -# Use partitions -# Prior2 = Prior.loc[['ptp0','ptp1','ptp2','ptp3','ptp4','ptp5']]['mean'] -# dominat_period = Prior2[Prior2.max() ==Prior2] -# aa = Prior.loc[['pdp0','pdp1','pdp2','pdp3','pdp4','pdp5']]['mean'].astype('float') -# dominant_dir = waves.get_ave_amp_angle(aa *0+1,aa )[1] -# dominant_dir_spread = Prior.loc[['pspr0','pspr1','pspr2','pspr3','pspr4','pspr5']]['mean'].median() -# -# prior_sel= {'alpha': ( dominant_dir *np.pi/180 , dominant_dir_spread *np.pi/180) } # to radiens -#prior_sel= {'alpha': ( -60 *np.pi/180 , dominant_dir_spread *np.pi/180) } # to radiens - - -Pperiod = Prior.loc[['ptp0','ptp1','ptp2','ptp3','ptp4','ptp5']]['mean'] -Pdir = Prior.loc[['pdp0','pdp1','pdp2','pdp3','pdp4','pdp5']]['mean'].astype('float') -Pspread = Prior.loc[['pspr0','pspr1','pspr2','pspr3','pspr4','pspr5']]['mean'] - -Pperiod = Pperiod[~np.isnan(list(Pspread))] -Pdir = Pdir[~np.isnan(list(Pspread))] -Pspread = Pspread[~np.isnan(list(Pspread))] - - -# reset dirs: -Pdir[Pdir > 180] = Pdir[Pdir > 180] - 360 -Pdir[Pdir < -180] = Pdir[Pdir < -180] + 360 - -# reorder dirs -dir_best = [0] -for dir in Pdir: - ip = np.argmin([ abs(dir_best[-1] - dir), abs(dir_best[-1] - (dir - 360 )), abs(dir_best[-1] - (dir + 360 )) ] ) - new_dir = np.array([ dir, (dir - 360 ) , (dir + 360 ) ])[ip] - dir_best.append(new_dir) -dir_best = np.array(dir_best[1:]) - -# %% - -Pwavenumber = (2 * np.pi / Pperiod )**2 / 9.81 -kk = Gk.k -dir_interp = np.interp(kk, Pwavenumber[Pwavenumber.argsort()] , dir_best[Pwavenumber.argsort()] ) -dir_interp_smth = M.runningmean(dir_interp, 30, tailcopy= True) -dir_interp_smth[-1] = dir_interp_smth[-2] - -spread_interp = np.interp(kk, Pwavenumber[Pwavenumber.argsort()] , Pspread[Pwavenumber.argsort()].astype('float') ) -spread_smth = M.runningmean(spread_interp, 30, tailcopy= True) -spread_smth[-1] = spread_smth[-2] - - -font_for_pres() - -F = M.figure_axis_xy(5, 4.5, view_scale= 0.5) -plt.subplot(2, 1, 1) -plt.title('Prior angle smoothed\n'+ track_name, loc ='left') - - -plt.plot( Pwavenumber , dir_best, '.r', markersize = 8) -plt.plot( kk , dir_interp, '-', color= 'red', linewidth = 0.8, zorder=11) -plt.plot( kk , dir_interp_smth , color=col.green1) - -plt.fill_between(kk, dir_interp_smth -spread_smth, dir_interp_smth +spread_smth, zorder= 1, color=col.green1, alpha = 0.2 ) -plt.ylabel('Angle (deg)') -#plt.xlabel('wavenumber ($2 \pi/\lambda$)') - -ax2 = plt.subplot(2, 1, 2) -plt.title('Prior angle adjusted ', loc ='left') - -# adjust angle def: -dir_interp_smth[dir_interp_smth> 180] = dir_interp_smth[dir_interp_smth> 180]- 360 -dir_interp_smth[dir_interp_smth< -180] = dir_interp_smth[dir_interp_smth< -180]+ 360 - -plt.fill_between(kk, dir_interp_smth -spread_smth, dir_interp_smth +spread_smth, zorder= 1, color=col.green1, alpha = 0.2 ) -plt.plot( kk , dir_interp_smth , '.', markersize = 1 , color=col.green1) - -ax2.axhline(85, color='gray', linewidth= 2) -ax2.axhline(-85, color='gray', linewidth= 2) - -plt.ylabel('Angle (deg)') -plt.xlabel('wavenumber ($2 \pi/\lambda$)') - -F.save_light(path= plot_path, name = 'B04_prior_angle') - - -# save -dir_interp_smth = xr.DataArray(data=dir_interp_smth * np.pi/180 , dims='k', coords ={'k':kk}, name='Prior_direction') -spread_smth = xr.DataArray(data=spread_smth* np.pi/180 , dims='k', coords ={'k':kk}, name='Prior_spread') -Prior_smth = xr.merge([dir_interp_smth, spread_smth]) - -# %% -prior_angle =Prior_smth.Prior_direction * 180/np.pi -if (abs(prior_angle) > 80).all(): - print('Prior angle is ', prior_angle.min().data, prior_angle.max().data, '. quit.') - dd_save = {'time' : time.asctime( time.localtime(time.time()) ), - 'angle': list([ float(prior_angle.min().data), float(prior_angle.max().data), float(prior_angle.median()) ]) } - MT.json_save('B04_fail', plot_path, dd_save) - print('exit()') - #exit() - -# Use fake -#prior_sel= {'alpha': ( 0.6 , dominant_dir_spread *np.pi/180) } # to radiens - -# Use mean direction -#prior_sel= {'alpha': ( Prior.loc['dp']['mean'] *np.pi/180 , Prior.loc['spr']['mean'] *np.pi/180) } - - -# define paramater range -params_dict = {'alpha': [ -0.85 * np.pi /2, 0.85 * np.pi /2, 5], - 'phase':[ 0 , 2*np.pi , 10]} - -alpha_dx = 0.04 -max_wavenumbers = 25 - -sample_flag = True -optimize_flag = False -brute_flag = False - -plot_flag = True - -Nworkers = 6 -N_sample_chain = 300 -N_sample_chain_burn = 30 - -max_x_pos = 8 -x_pos_jump = 2 - -def make_fake_data(xi,group ): - ki= Gk.k[0:2] - - bins = np.arange(params_dict['alpha'][0], params_dict['alpha'][1]+alpha_dx, alpha_dx) - bins_pos = (bins[0:-1] + np.diff(bins)/2) - marginal_stack = xr.DataArray( np.nan* np.vstack([bins_pos, bins_pos]).T, dims= ('angle', 'k'), coords = {'angle':bins_pos, 'k':ki.data } ) - - group_name = str('group' + group[0].split('gt')[1].split('l')[0]) - marginal_stack.coords['beam_group'] = group_name - marginal_stack.coords['x'] = xi - marginal_stack.name = 'marginals' - marginal_stack.expand_dims(dim = 'x', axis = 2).expand_dims(dim = 'beam_group', axis = 3) - return marginal_stack - -def define_wavenumber_weights_tot_var(dd, m = 3, variance_frac = 0.33, k_upper_lim= None, verbose=False): - - """ - return peaks of a power spectrum dd that in the format such that they can be used as weights for the frequencies based fitting - - inputs: - dd xarray with PSD as data amd coordindate wavenumber k - m running mean half-width in gridpoints - variance_frac (0 to 1) How much variance should be explained by the returned peaks - verbose if true it plots some stuff - - - return: - mask size of dd. where True the data is identified as having significant amplitude - k wanumbers where mask is true - dd_rm smoothed version of dd - positions postions where of significant data in array - """ - - if len(dd.shape) == 2: - dd_use = dd.mean('beam') - - if m is None: - dd_rm = dd_use.data#M.runningmean(dd, m, tailcopy=True) - else: - dd_rm = M.runningmean(dd_use, m, tailcopy=True) - - k = dd_use.k[~np.isnan(dd_rm)].data - dd_rm = dd_rm[~np.isnan(dd_rm)] - - orders = dd_rm.argsort()[::-1] - var_mask = dd_rm[orders].cumsum()/dd_rm.sum() < variance_frac - pos_cumsum = orders[var_mask] - #k_list = k[pos_cumsum] - #dd_list = dd_rm[pos_cumsum] - mask = var_mask[orders.argsort()] - if k_upper_lim is not None: - mask = (k < k_upper_lim) & mask - - if verbose: - - plt.plot(dd.k, dd, '-', color = col_dict[str(amp_data.beam[0].data)], markersize= 20, alpha = 0.6) - plt.plot(k, dd_rm, '-k', markersize= 20) - #print(k_list, dd_list) - plt.plot(k[mask], dd_rm[mask], '.r', markersize= 10, zorder=12) - if k_upper_lim is not None: - plt.gca().axvline(k_upper_lim, color= 'black') - - return mask, k, dd_rm, pos_cumsum - -def define_wavenumber_weights_threshold(dd, m = 3, Nstd= 2, verbose=False): - - if m is None: - dd_rm = dd#M.runningmean(dd, m, tailcopy=True) - else: - dd_rm = M.runningmean(dd, m, tailcopy=True) - - k = dd.k[~np.isnan(dd_rm)] - dd_rm = dd_rm[~np.isnan(dd_rm)] - - treshold = np.nanmean(dd_rm) + np.nanstd(dd_rm) *Nstd - mask = dd_rm > treshold - - - if verbose: - plt.plot(dd.k, dd, '-k', markersize= 20) - plt.plot(k, dd_rm, '-b', markersize= 20) - - k_list = k[mask] - dd_list = dd_rm[mask] - #print(k_list, dd_list) - plt.plot(k_list, dd_list, '.r', markersize= 10, zorder=12) - - return mask, k, dd_rm, np.arange(0, mask.size)[mask] - - - -# isolate x positions with data -data_mask = Gk.gFT_PSD_data.mean('k') -data_mask.coords['beam_group'] = ('beam', ['beam_group'+g[2] for g in data_mask.beam.data]) -data_mask_group = data_mask.groupby('beam_group').mean(skipna=False) -# these stancils are actually used -data_sel_mask = data_mask_group.sum('beam_group') !=0 - -x_list = data_sel_mask.x[data_sel_mask] # iterate over these x posistions -x_list_flag = ~np.isnan(data_mask_group.sel(x = x_list) )# flag that is False if there is no data - -#### limit number of x coordinates - -x_list = x_list[::x_pos_jump] -if len(x_list) > max_x_pos: - x_list = x_list[0:max_x_pos] -x_list_flag= x_list_flag.sel(x =x_list) - -# plot -font_for_print() -F = M.figure_axis_xy(5.5, 3, view_scale= 0.8) -plt.suptitle(track_name) -ax1 = plt.subplot(2, 1, 1) -plt.title('Data in Beam', loc= 'left') -plt.pcolormesh(data_mask.x/1e3, data_mask.beam, data_mask, cmap= plt.cm.OrRd) -for i in np.arange(1.5, 6, 2): - ax1.axhline(i, color= 'black', linewidth =0.5) -plt.xlabel('Distance from Ice Edge') - -ax2 = plt.subplot(2, 1, 2) -plt.title('Data in Group', loc= 'left') -plt.pcolormesh(data_mask.x/1e3, data_mask_group.beam_group, data_mask_group, cmap= plt.cm.OrRd) - -for i in np.arange(0.5, 3, 1): - ax2.axhline(i, color= 'black', linewidth =0.5) - -plt.plot( x_list/1e3, x_list*0 +0, '.', markersize= 2, color= col.cascade1 ) -plt.plot( x_list/1e3, x_list*0 +1, '.', markersize= 2, color= col.cascade1 ) -plt.plot( x_list/1e3, x_list*0 +2, '.', markersize= 2, color= col.cascade1 ) - -plt.xlabel('Distance from Ice Edge') - -F.save_pup(path= plot_path, name = 'B04_data_avail') - - -# %% -Marginals = dict() -L_collect = dict() -marginal_stack = dict() - -group_number = np.arange(len(beam_groups)) -ggg, xxx = np.meshgrid(group_number , x_list.data[2:3] ) - -for gi in zip(ggg.flatten(), xxx.flatten()): - print(gi) - - group, xi = beam_groups[gi[0]], gi[1] - - if bool(x_list_flag.sel(x= xi).isel(beam_group= gi[0]).data) is False: - print('no data, fill with dummy') - ikey = str(xi) +'_' + '_'.join(group) - Marginals[ikey] = make_fake_data(xi, group) - #print(Marginals[ikey].angle.data[::20]) - continue - - GGx = Gx.sel(beam= group).sel(x = xi) - GGk = Gk.sel(beam= group).sel(x = xi) - - ### define data - # normalize data - key = 'y_data' - amp_Z = (GGx[key] - GGx[key].mean(['eta'])) /GGx[key].std(['eta']) - N = amp_Z.shape[0] - - # define x,y positions - eta_2d = GGx.eta + GGx.x_coord - GGx.x_coord.mean() - nu_2d = GGx.eta * 0 + GGx.y_coord - GGx.y_coord.mean() - - # repack as np arrays - x_concat = eta_2d.data.T.flatten() - y_concat = nu_2d.data.T.flatten() - z_concat = amp_Z.data.flatten() - - x_concat= x_concat[~np.isnan(z_concat)] - y_concat= y_concat[~np.isnan(z_concat)] - z_concat= z_concat[~np.isnan(z_concat)] - N_data = x_concat.size - - if np.isnan(z_concat).sum() != 0: - raise ValueError('There are still nans') - - mean_dist = (nu_2d.isel(beam= 0) - nu_2d.isel(beam= 1)).mean().data - k_upper_lim = 2 *np.pi / ( mean_dist *1 ) - - print('k_upper_lim ', k_upper_lim) - # threshold method - #mask, k, weights, positions = define_wavenumber_weights_threshold( Gi.mean('dist_y')['gFT_PSD_data'], 3 , verbose= True) - #plt.plot(k[mask], weights[mask], 'g*', markersize=20) - # plt.show() - - #variance method - amp_data = np.sqrt(GGk.gFT_cos_coeff**2 + GGk.gFT_sin_coeff**2) - mask, k, weights, positions = define_wavenumber_weights_tot_var(amp_data, m= 1, k_upper_lim= k_upper_lim, variance_frac = 0.20 , verbose= False) - #plt.xlim( k[mask].min()*0.8 ,max(k_upper_lim, k[mask].max()*1.2) ) - #plt.xlim( k[mask].min()*0.8 ,k[mask].max()*1.4 ) - #plt.show() - - if (len(k[mask]) ==0): - print('no good k found, fill with dummy') - ikey = str(xi) +'_' + '_'.join(group) - Marginals[ikey] = make_fake_data(xi, group) - continue - - - k_list, weight_list = k[mask], weights[mask] - print('# of wavenumber: ' , len(k_list)) - - #### prepare loop - #imp.reload(angle_optimizer) - - SM = angle_optimizer.sample_with_mcmc(params_dict) - SM.set_objective_func(angle_optimizer.objective_func) - - SM.fitting_args = fitting_args = (x_concat, y_concat, z_concat) - - - # test: - k_prime_max= 0.02 #[mask][0] # chose a test wavenumber - amp_Z= 1 - prior_sel= {'alpha': ( Prior_smth.sel(k =k_prime_max, method='nearest').Prior_direction.data, - Prior_smth.sel(k =k_prime_max, method='nearest').Prior_spread.data) } - SM.fitting_kargs = fitting_kargs = {'prior': prior_sel , 'prior_weight' : 3 } - # test if it works - SM.params.add('K_prime', k_prime_max , vary=False , min=k_prime_max*0.5, max=k_prime_max*1.5) - SM.params.add('K_amp', amp_Z , vary=False , min=amp_Z*.0 , max=amp_Z*5) - try: - SM.test_objective_func() - except: - raise ValueError('Objective function test fails') - - #k_prime_max, Z_max = k_list[-1], weight_list[-1] - pk= 0 - for k_prime_max, Z_max in zip(k_list, weight_list): - - - brute_flag = True - prior_sel= {'alpha': ( Prior_smth.sel(k =k_prime_max, method='nearest').Prior_direction.data, - Prior_smth.sel(k =k_prime_max, method='nearest').Prior_spread.data) } - - SM = angle_optimizer.sample_with_mcmc(params_dict) - SM.set_objective_func(angle_optimizer.objective_func) - - SM.fitting_args = fitting_args = (x_concat, y_concat, z_concat) - #print(prior_sel) - SM.fitting_kargs = fitting_kargs = {'prior': prior_sel , 'prior_weight' : 3 } - #SM.fitting_kargs = fitting_kargs = {'prior': None , 'prior_weight' : 1 } - - amp_Z = 1##z_concat.var()#Z_max#0.5 #amp_enhancement * abs(Z_max)**2 /N - - SM.params.add('K_prime', k_prime_max , vary=False , min=k_prime_max*0.5, max=k_prime_max*1.5) - SM.params.add('K_amp' , amp_Z , vary=False , min=amp_Z*.0 , max=amp_Z*5) - #print(SM.params.pretty_print()) - - with suppress_stdout(): - SM.sample(verbose= False, steps=N_sample_chain,progress= False, workers= None) - SM.optimize(verbose= False) - SM.brute(verbose= False) - - y_hist, bins, bins_pos = SM.get_marginal_dist('alpha', alpha_dx, burn = N_sample_chain_burn, plot_flag= False) - fitter = SM.fitter # MCMC results - z_model = SM.objective_func(fitter.params, *fitting_args , test_flag= True) - #cost = (fitter.residual**2).sum()/(z_concat**2).sum() - #cost_list.append( (fitter.residual**2).sum()/(z_concat**2).sum() ) - marginal_stack_i = xr.DataArray( y_hist, dims= ('angle'), coords = {'angle':bins_pos } ) - marginal_stack_i.coords['k'] = np.array(k_prime_max) #( ('k'), np.array(k_prime_max) ) - marginal_stack[k_prime_max] = marginal_stack_i - #marginal_stack_i.coords['weight'] = Z_max - - # no prior: - SM_nop = angle_optimizer.sample_with_mcmc(params_dict) - SM_nop.set_objective_func(angle_optimizer.objective_func) - - SM_nop.fitting_args = fitting_args = (x_concat, y_concat, z_concat) - - brute_flag = True - #print(prior_sel) - SM_nop.fitting_kargs = {'prior': None , 'prior_weight' : 0 } - #SM.fitting_kargs = fitting_kargs = {'prior': None , 'prior_weight' : 1 } - amp_Z = 1##z_concat.var()#Z_max#0.5 #amp_enhancement * abs(Z_max)**2 /N - - SM_nop.params.add('K_prime', k_prime_max , vary=False , min=k_prime_max*0.5, max=k_prime_max*1.5) - SM_nop.params.add('K_amp' , amp_Z , vary=False , min=amp_Z*.0 , max=amp_Z*5) - #print(SM.params.pretty_print()) - try: - SM_nop.test_objective_func() - except: - raise ValueError('Objective function test fails') - - with suppress_stdout(): - SM_nop.sample(verbose= False, steps=N_sample_chain,progress= False, workers= None) - SM_nop.optimize(verbose= False) - SM_nop.brute(verbose= False) - - y_hist_nop, bins_nop, bins_pos_nop = SM_nop.get_marginal_dist('alpha', alpha_dx, burn = N_sample_chain_burn, plot_flag= False) - fitter_nop = SM_nop.fitter # MCMC results - z_model_nop = SM.objective_func(fitter_nop.params, *fitting_args , test_flag= True) - - - z_model, fargs , key = z_model, fitting_args, 'y_data_normed' - brute = brute_flag - optimze= optimize_flag - sample= sample_flag - - view_scale = 0.6 - - # def plot_instance(z_model, fargs , key, SM, non_dim=False, title_str= None , brute=False, optimze= False, sample= False, view_scale = 0.3): - lstrings =iter([i+') ' for i in list(string.ascii_lowercase)]) - import copy - - brute_clevel = np.linspace(-3.05, 3, 30) - font_for_print() - fn = copy.copy(lstrings) - x_concat, y_concat, z_concat = fargs - F = M.figure_axis_xy(fig_sizes['23rd_width_high'][0],fig_sizes['23rd_width_high'][1], view_scale = view_scale, container = True) - title_str = 'Incident Angle Sampling\nmodel wavelength='+ str(np.round(2 * np.pi/k_prime_max, 1)) + 'm' - - plt.suptitle(title_str, y = 0.95, ha='left', x=0.13) - gs = GridSpec(12, 10, wspace=0.3, hspace=10)#figure=fig, - F.gs = gs - # y_offset= 0.5 - # plt.plot(Gm.eta, Gm.y_model_normed+y_offset * Gm.dist_y/np.diff(Gm.dist_y), **model_plot_karg) - # plt.plot(Gm.eta, Gm.y_model_normed+y_offset * Gm.dist_y/np.diff(Gm.dist_y), **data_plot_karg) - # plt.xlim(-1000, 1000) - - beam_list = list(set(y_concat)) - - F.ax1 = F.fig.add_subplot(gs[0:2, :]) - y_pos, bcol = beam_list[0], col.rels['gt2l'] - #plt.title( str(y_pos) ) - plt.plot(x_concat[y_concat == y_pos]/1e3, z_concat[y_concat == y_pos] , c=bcol, linewidth = 1) - plt.plot(x_concat[y_concat == y_pos]/1e3, z_model[y_concat == y_pos] , '-', c='black', linewidth= 0.6) - #plt.xlim(x_concat[y_concat == y_pos][0]/1e3, x_concat[y_concat == y_pos][-1]) - #plt.xlim(-1000/1e3, 1000/1e3) - plt.ylabel('Slope (m/m)') - plt.title(next(fn) +'Beam Pair', loc='left') - - F.ax2 = F.fig.add_subplot(gs[1:3, :]) - y_pos, bcol = beam_list[1], col.rels['gt2r'] - #plt.title( str(y_pos) ) - plt.plot(x_concat[y_concat == y_pos]/1e3, z_concat[y_concat == y_pos] , c=bcol, linewidth = 1) - plt.plot(x_concat[y_concat == y_pos]/1e3, z_model[y_concat == y_pos] , '-', c='black', linewidth= 0.6) - #plt.xlim(x_concat[y_concat == y_pos][0]/1e3, x_concat[y_concat == y_pos][-1]) - - - #F.ax1.spines['left'].set_position(('outward', 5)) - #F.ax2.spines['right'].set_position(('outward', 5)) - #F.ax2.set_ylabel('Slope (m/m)') - - y_ticks = MT.tick_formatter( np.arange(-10, 12, 1), interval=2, rounder=1, expt_flag=False, shift=0 ) - F.ax1.set_yticks(y_ticks[1]) - F.ax1.set_yticklabels(y_ticks[0]) - F.ax2.set_yticks(y_ticks[1]) - F.ax2.set_yticklabels(y_ticks[0]) - F.ax1.set_ylim(-3, 3) - F.ax2.set_ylim(-3, 3) - - F.ax1.tick_params(bottom=False, labelbottom= False) - F.ax1.spines['bottom'].set_visible(False) - #F.ax2.xaxis.set_ticks_position('top') - #F.ax2.xaxis.set_label_position('top') - F.ax1.spines['bottom'].set_linewidth(0.5) - F.ax1.spines['left'].set_linewidth(0.5) - #F.ax1.xaxis.set_ticks_position('bottom') - #F.ax1.xaxis.set_label_position('bottom') - - - F.ax2.tick_params(left= False, labelleft=False, right= True, labelright= True) - F.ax2.spines['right'].set_visible(True) - F.ax2.spines['left'].set_visible(False) - F.ax2.spines['bottom'].set_visible(True) - F.ax2.set_facecolor((1.0, 1.00, 1.00, 0)) - - F.ax1.axhline(0, color='gray', linewidth = 0.5, alpha = 1) - F.ax2.axhline(0, color='gray', linewidth = 0.5, alpha = 1) - - x_ticks = MT.tick_formatter( np.arange(-10, 10, 0.5), interval=2, rounder=1, expt_flag=False, shift=0 ) - F.ax1.set_xticks(x_ticks[1]) - F.ax1.set_xticklabels(x_ticks[0]) - F.ax1.set_xlim(-2, 2) - - F.ax2.set_xticks(x_ticks[1]) - F.ax2.set_xticklabels(x_ticks[0]) - F.ax2.set_xlim(-1.5, 1.5) - plt.xlabel('km') - - - # with prior - F.ax5 = F.fig.add_subplot(gs[3:8, 0:-2]) - F.ax5.tick_params(bottom= False, labelbottom=False) - F.ax5.spines['bottom'].set_visible(False) - - if brute is True: - plt.title(next(fn) +'Sample Visualization', loc='left') - #plt.title('with Prior', loc='left') - SM.plot_brute(clevel = brute_clevel , marker= '.', color ='blue', markersize=15, label= 'Brute', zorder=10) - plt.plot(SM.fitter_brute.brute_x0[1], SM.fitter_brute.brute_x0[0], '.', color ='red', markersize=5, zorder=10, label='best fit') - - # plt.colorbar(orientation='horizontal') - if optimze is True: - SM.plot_optimze(color= 'r', markersize=10, zorder=12, label= 'Dual Annealing') - - if sample is True: - SM.plot_sample(markersize= 2, linewidth= 0.8, alpha= 0.2, color= 'black', zorder=8) - - - if (fitting_kargs['prior'] is not None): - F.ax5.axhline(prior_sel['alpha'][0], color='orange', linewidth = 2, label ='Prior') - F.ax5.axhspan(prior_sel['alpha'][0]- prior_sel['alpha'][1], prior_sel['alpha'][0]+ prior_sel['alpha'][1], color='orange', alpha=0.2) - F.ax5.axhline(prior_sel['alpha'][0]- prior_sel['alpha'][1], color='orange', linewidth = 0.7) - F.ax5.axhline(prior_sel['alpha'][0]+ prior_sel['alpha'][1], color='orange', linewidth = 0.7) - - F.ax5.axhline(fitter.params['alpha'].min, color='black', linewidth = 0.6, alpha = 1) - F.ax5.axhline(fitter.params['alpha'].max, color='black', linewidth = 0.6, alpha = 1) - - F.ax5.set_yticks([-np.pi/2, 0, np.pi/2]) - F.ax5.set_yticklabels(['$-\pi$' ,'0', '$\pi$']) - - # F.ax5.set_yticks(deg_ticks) - # F.ax5.set_yticklabels(deg_tick_label) - - plt.sca(F.ax5) - plt.legend(loc= 1) - plt.xlabel('') - plt.ylabel('Angle of Incidence') - plt.xlim(0, np.pi*2) - - F.ax51 = F.fig.add_subplot(gs[3:8, -2:]) - F.ax51.tick_params(left= False, labelleft=False, labelbottom= False, bottom= False)#, right= True, labelright= True) - F.ax51.spines['bottom'].set_visible(False) - - plt.title(next(fn) +'Marginal', loc= 'left') - #plt.xlabel('Density') - plt.stairs(y_hist, bins, orientation='horizontal', color=col.rels['group'+str(gi[0]+1)]) - - F.ax51.axhline(fitter.params['alpha'].min, color='black', linewidth = 0.6, alpha = 1) - F.ax51.axhline(fitter.params['alpha'].max, color='black', linewidth = 0.6, alpha = 1) - - F.ax5.set_ylim(-np.pi/1.5, np.pi/1.5) - F.ax51.set_ylim(-np.pi/1.5, np.pi/1.5) - - # F.ax5.set_ylim(min( -np.pi /2, prior_sel['alpha'][0]- 0.2 ) , max(np.pi /2, prior_sel['alpha'][0] + 0.2 ) ) - # F.ax51.set_ylim(min( -np.pi /2, prior_sel['alpha'][0]- 0.2 ) , max(np.pi /2, prior_sel['alpha'][0]+ 0.2 ) ) - plt.xlim(0, 20) - #marginal_stack_xr = xr.concat(marginal_stack.values(), dim='k' ).sortby('k') - - # no prior - F.ax3 = F.fig.add_subplot(gs[8:, 0:-2]) - #F.ax3.tick_params(bottom= False, labelbottom=False) - # F.ax3.spines['bottom'].set_visible(False) - if brute is True: - #plt.title(next(fn) +'Beam Pair', loc='left') - plt.title(next(fn) +'Sample Visualization without prior', loc='left') - #plt.title('no Prior', loc='left') - SM_nop.plot_brute(clevel = brute_clevel) - # plt.colorbar(orientation='horizontal') - plt.plot(SM_nop.fitter_brute.brute_x0[1], SM_nop.fitter_brute.brute_x0[0], '.', color ='red', markersize=5, label= 'best fit', zorder=10) - if optimze is True: - SM_nop.plot_optimze(color= 'r', markersize=10, zorder=12, label= 'Dual Annealing') - - if sample is True: - SM_nop.plot_sample(markersize= 2, linewidth= 0.8, alpha= 0.2, color= 'black', zorder=8) - - F.ax4 = F.fig.add_subplot(gs[8:, -2:]) - F.ax4.tick_params(left= False, labelleft=False)#,labelbottom= False) - #return F - - # if (fitting_kargs['prior'] is not None): - # F.ax3.axhline(prior_sel['alpha'][0], color='green', linewidth = 2, label ='Prior') - # #F.ax3.axhspan(prior_sel['alpha'][0]- prior_sel['alpha'][1], prior_sel['alpha'][0]+ prior_sel['alpha'][1], color='gray', alpha=0.3) - # F.ax3.axhline(prior_sel['alpha'][0]- prior_sel['alpha'][1], color='green', linewidth = 0.7) - # F.ax3.axhline(prior_sel['alpha'][0]+ prior_sel['alpha'][1], color='green', linewidth = 0.7) - - F.ax3.axhline(fitter.params['alpha'].min, color='black', linewidth = 0.6, alpha = 1) - F.ax3.axhline(fitter.params['alpha'].max, color='black', linewidth = 0.6, alpha = 1) - - F.ax3.set_yticks([-np.pi/2, 0, np.pi/2]) - F.ax3.set_yticklabels(['$-\pi$' ,'0', '$\pi$']) - F.ax3.set_xticks([0, np.pi/2, np.pi, np.pi* 1.5, 2 *np.pi]) - F.ax3.set_xticklabels(['0', '', '$\pi$', '', '$2\pi$']) - - - plt.sca(F.ax3) - #plt.legend(loc= 1) - plt.xlabel('') - plt.ylabel('Angle of Incidence') - plt.xlim(0, np.pi*2) - plt.xlabel('Wave Phase') - - plt.sca(F.ax4) - plt.title(next(fn) +'Marginal', loc= 'left') - #plt.xlabel('Density') - plt.stairs(y_hist_nop, bins_nop, orientation='horizontal', color='k') - - # deg_ticks=np.arange(-180, 360+60, 60) - # deg_tick_label=[str(l)+'$^\circ$' for l in deg_ticks[:]] - # deg_ticks=deg_ticks * np.pi/180 - - #y_ticks = MT.tick_formatter( np.arange(, 12, 1), interval=2, rounder=1, expt_flag=False, shift=0 ) - F.ax3.set_yticks([-np.pi/2, 0, np.pi/2]) - F.ax3.set_yticklabels(['$-\pi$' ,'0', '$\pi$']) - - F.ax3.set_xticks([0, np.pi/2, np.pi, np.pi* 1.5, 2 *np.pi]) - F.ax3.set_xticklabels(['0', '', '$\pi$', '', '$2\pi$']) - - F.ax4.axhline(fitter.params['alpha'].min, color='black', linewidth = 0.6, alpha = 1) - F.ax4.axhline(fitter.params['alpha'].max, color='black', linewidth = 0.6, alpha = 1) - - # F.ax3.set_ylim(min( -np.pi /2, prior_sel['alpha'][0]- 0.2 ) , max(np.pi /2, prior_sel['alpha'][0] + 0.2 ) ) - # F.ax4.set_ylim(min( -np.pi /2, prior_sel['alpha'][0]- 0.2 ) , max(np.pi /2, prior_sel['alpha'][0]+ 0.2 ) ) - - plt.xlim(0, 20) - F.ax3.set_ylim(-np.pi/2, np.pi/2) - F.ax3.set_xlim(0, 2 *np.pi) - F.ax4.set_ylim(-np.pi/2, np.pi/2) - F.ax4.set_xlabel('Density') - #plt.colorbar() - - #marginal_stack.mean('k').plot() - # F.ax52 = F.fig.add_subplot(gs[7:, -1]) - # F.ax52.tick_params(left= False, labelleft=False)#, right= True, labelright= True) - # - # plt.title('Sum', loc= 'left') - # plt.xlabel('Density') - # - # marginal_mean = (marginal_stack_xr * marginal_stack_xr.weight).sum('k') / marginal_stack_xr.weight.sum() - # - # plt.stairs(marginal_mean,bins, orientation='horizontal', color=col.rels['group'+str(gi[0]+1)], linewidth= 1.5) - # - # F.ax52.axhline(fitter.params['alpha'].min, color='black', linewidth = 0.6, alpha = 1) - # F.ax52.axhline(fitter.params['alpha'].max, color='black', linewidth = 0.6, alpha = 1) - # - # #F.ax52.set_ylim(min( -np.pi /2, prior_sel['alpha'][0]- 0.2 ) , max(np.pi /2, prior_sel['alpha'][0]+ 0.2 ) ) - # - # F.ax52.set_ylim(-np.pi/1.2, np.pi/1.2) - # plt.xlim(0, 5) - - cbaxes = F.fig.add_subplot(gs[6:8, 5:-2]) - cbaxes.axis('off') - cbpos = cbaxes.get_position() - #cbaxes2 = F.fig.add_axes([cbpos.x0,cbpos.y0,cbpos.width/5,cbpos.height]) - cbaxes2 = F.fig.add_axes([cbpos.x0,cbpos.y0+ 2*cbpos.height/6,cbpos.width,cbpos.height/6]) - cb = plt.colorbar(cax =cbaxes2, orientation='horizontal') - cb.set_ticks([-3,0, 3]) - cb.set_label('Anomalous Cost') - - plt.show() - F.save_light(path= plot_path, name = 'MCMC_fit_' + group[0]+'_'+group[1]+'_'+str(int(xi)) +'_'+ str(pk).zfill(3) ) - F.save_pup(path= plot_path, name = 'MCMC_fit_' + group[0]+'_'+group[1]+'_'+str(int(xi)) +'_'+ str(pk).zfill(3) ) - - pk+=1 -# -# -# # %% -# -# # %% -# # # A= dict() -# # # for k_pair in zip(k_list, weight_list): -# # # kk, I = get_instance(k_pair) -# # # A[kk] = I -# # -# # with futures.ProcessPoolExecutor(max_workers=Nworkers) as executor: -# # A = dict( executor.map(get_instance, zip(k_list, weight_list) )) -# # -# # cost_stack = dict() -# # marginal_stack =dict() -# # #fitting_kargs = {'size' :1} -# # L_sample = pd.DataFrame(index=['alpha', 'group_phase', 'K_prime', 'K_amp'] ) -# # L_optimize = pd.DataFrame(index=['alpha', 'group_phase', 'K_prime', 'K_amp'] ) -# # L_brute = pd.DataFrame(index=['alpha', 'group_phase', 'K_prime', 'K_amp'] ) -# # -# # for kk,I in A.items(): -# # L_sample[kk] = I['L_sample_i'] -# # L_optimize[kk] = I['L_optimize_i'] -# # L_brute[kk] = I['L_brute_i'] -# # -# # marginal_stack[kk] = I['marginal_stack_i'] -# # cost_stack[kk] = I['cost'] -# # -# # # ## add beam_group dimension -# # marginal_stack = xr.concat(marginal_stack.values(), dim='k' ).sortby('k') -# # L_sample = L_sample.T.sort_values('K_prime') -# # L_optimize = L_optimize.T.sort_values('K_prime') -# # L_brute = L_brute.T.sort_values('K_prime') -# # -# # #print(marginal_stack.angle.data[::20]) -# # -# # print('done with ', group, xi/1e3) -# # -# # # % collect -# # ikey = str(xi) +'_' + '_'.join(group) -# # -# # #marginal_stack.coords['cost'] = (('k'), np.expand_dims(np.expand_dims(list(cost_stack.values()), 1), 2) ) -# # marginal_stack.name = 'marginals' -# # marginal_stack = marginal_stack.to_dataset() -# # marginal_stack['cost'] = (('k'), list(cost_stack.values()) ) -# # marginal_stack['weight'] = (('k'), weight_list ) -# # -# # group_name = str('group' + group[0].split('gt')[1].split('l')[0]) -# # marginal_stack.coords['beam_group'] = group_name -# # marginal_stack.coords['x'] = xi -# # -# # Marginals[ikey] = marginal_stack.expand_dims(dim = 'x', axis = 0).expand_dims(dim = 'beam_group', axis = 1) -# # Marginals[ikey].coords['N_data'] = ( ('x', 'beam_group'), np.expand_dims(np.expand_dims(N_data, 0), 1) ) -# # # L_brute -# # # L_optimize -# # -# # L_sample['cost'] = cost_stack -# # L_sample['weight'] = weight_list -# # L_collect[group_name, str(int(xi))] = L_sample#pd.concat(L_collect_per_xi) -# # -# -# # %% -# #list(Marginals.values())[0] -# MM = xr.merge( Marginals.values()) -# MM =xr.merge([ MM, Prior_smth]) -# #MM.to_netcdf(save_path + save_name + '_marginals.nc') -# -# LL = pd.concat(L_collect) -# #MT.save_pandas_table({'L_sample':LL} ,save_name+ '_res_table', save_path) -# -# # %% plot -# font_for_print() -# F = M.figure_axis_xy(6, 5.5, view_scale= 0.7, container = True) -# -# gs = GridSpec(4,6, wspace=0.2, hspace=.8)#figure=fig, -# -# ax0 = F.fig.add_subplot(gs[0:2, -1]) -# ax0.tick_params(labelleft=False) -# -# #klims = k_list.min()*0.2 , k_list.max()*1.2 -# -# klims = 0, LL['K_prime'].max()*1.2 -# -# -# for g in MM.beam_group: -# MMi = MM.sel(beam_group= g) -# plt.plot( MMi.weight.T,MMi.k, '.', color= col_dict[str(g.data)], markersize= 3, linewidth = 0.8) -# -# plt.xlabel('Power') -# plt.ylim(klims) -# -# ax1 = F.fig.add_subplot(gs[0:2 , 0:-1]) -# -# for g in MM.beam_group: -# Li = LL.loc[str(g.data)] -# -# angle_list = np.array(Li['alpha']) * 180 /np.pi -# kk_list = np.array(Li['K_prime']) -# weight_list_i = np.array(Li['weight']) -# -# plt.scatter( angle_list, kk_list, s= (weight_list_i*8e1)**2 , c=col_dict[str(g.data)], label ='mode ' + str(g.data) ) -# -# -# # lflag= 'paritions ww3' -# # for i in np.arange(6): -# # i_dir, i_period = Prior.loc['pdp'+ str(i)]['mean'], Prior.loc['ptp'+ str(i)]['mean'] -# # i_k = (2 * np.pi/ i_period)**2 / 9.81 -# # i_dir = [i_dir -360 if i_dir > 180 else i_dir][0] -# # i_dir = [i_dir +360 if i_dir < -180 else i_dir][0] -# # -# # plt.plot(i_dir, i_k, '.', markersize = 6, color= col.red, label= lflag) -# # plt.plot(i_dir, i_k, '-', linewidth = 0.8, color= col.red) -# # -# # lflag = None -# -# dir_best[dir_best> 180] = dir_best[dir_best> 180] -360 -# plt.plot(dir_best, Pwavenumber , '.r', markersize = 6) -# -# dir_interp[dir_interp> 180] = dir_interp[dir_interp> 180] -360 -# plt.plot(dir_interp, Gk.k, '-', color= 'red', linewidth = 0.3, zorder=11) -# -# -# #ax1.axvline( best_guess * 180/ np.pi , color=col.blue, linewidth = 1.5, label ='best guess fitting') -# -# # ax1.axvline( (prior_sel['alpha'][0]) * 180 /np.pi, color='k', linewidth = 1.5, label ='prior') -# # ax1.axvline( (prior_sel['alpha'][0]- prior_sel['alpha'][1]) * 180 /np.pi, color='k', linewidth = 0.7, label ='prior uncertrainty') -# # ax1.axvline( (prior_sel['alpha'][0]+ prior_sel['alpha'][1]) * 180 /np.pi , color='k', linewidth = 0.7) -# -# plt.fill_betweenx(Gk.k, (dir_interp_smth -spread_smth)* 180 /np.pi, (dir_interp_smth +spread_smth)* 180 /np.pi, zorder= 1, color=col.green1, alpha = 0.2 ) -# plt.plot(dir_interp_smth * 180 /np.pi, Gk.k , '.', markersize = 1 , color=col.green1) -# -# ax1.axvline(85, color='gray', linewidth= 2) -# ax1.axvline(-85, color='gray', linewidth= 2) -# -# -# plt.legend() -# plt.ylabel('wavenumber (deg)') -# plt.xlabel('Angle (deg)') -# -# #plt.xlim(- 170, 170) -# #plt.xlim(- 90, 90) -# plt.ylim(klims) -# -# prior_angle_str =str(np.round( (prior_sel['alpha'][0]) * 180 /np.pi)) -# plt.title(track_name + '\nprior=' + prior_angle_str + 'deg', loc= 'left' ) -# -# plt.xlim( min( [ -90, np.nanmin(dir_best)] ), max( [np.nanmax(dir_best), 90]) ) -# -# -# ax3 = F.fig.add_subplot(gs[2 , 0:-1]) -# -# for g in MM.beam_group: -# MMi = MM.sel(beam_group= g) -# wegihted_margins = ( (MMi.marginals * MMi.weight).sum(['x','k'] )/MMi.weight.sum(['x', 'k']) ) -# plt.plot( MMi.angle * 180/ np.pi, wegihted_margins , '.', color= col_dict[str(g.data)], markersize= 2, linewidth = 0.8) -# -# plt.ylabel('Density') -# plt.title('weight margins', loc='left') -# -# #plt.plot(marginal_stack.angle * 180 /np.pi, marginal_stack.T , c=col.gray, label ='weighted mean BF') -# -# #plt.plot(cost_wmean.angle * 180 /np.pi, cost_wmean , c=col.rascade3, label ='weighted mean BF') -# plt.xlim(- 90, 90) -# #plt.xlim(- 125, 125) -# -# ax3 = F.fig.add_subplot(gs[-1 , 0:-1]) -# -# for g in MM.beam_group: -# MMi = MM.sel(beam_group= g) -# wegihted_margins = MMi.marginals.mean(['x','k'] )# ( (MMi.marginals * MMi.weight).sum(['x','k'] )/MMi.weight.sum(['x', 'k']) ) -# plt.plot( MMi.angle * 180/ np.pi, wegihted_margins , '.', color= col_dict[str(g.data)], markersize= 2, linewidth = 0.8) -# -# plt.ylabel('Density') -# plt.xlabel('Angle (deg)') -# plt.title('unweighted margins', loc='left') -# -# #plt.plot(marginal_stack.angle * 180 /np.pi, marginal_stack.T , c=col.gray, label ='weighted mean BF') -# -# #plt.plot(cost_wmean.angle * 180 /np.pi, cost_wmean , c=col.rascade3, label ='weighted mean BF') -# plt.xlim(- 90, 90) -# #plt.xlim(- 125, 125) -# -# #F.save_pup(path= plot_path, name = 'B04_marginal_distributions') -# -# #MT.json_save('B04_success', plot_path, {'time':'time.asctime( time.localtime(time.time()) )'}) diff --git a/analyis_publish/PB04_angle_priors.py b/analyis_publish/PB04_angle_priors.py deleted file mode 100644 index ce31051e..00000000 --- a/analyis_publish/PB04_angle_priors.py +++ /dev/null @@ -1,560 +0,0 @@ -# %% -import os, sys -#execfile(os.environ['PYTHONSTARTUP']) - -""" -This file open a ICEsat2 track applied filters and corections and returns smoothed photon heights on a regular grid in an .nc file. -This is python 3 -""" - -exec(open(os.environ['PYTHONSTARTUP']).read()) -exec(open(STARTUP_2021_IceSAT2).read()) - -#%matplotlib inline - -import ICEsat2_SI_tools.convert_GPS_time as cGPS -import h5py -import ICEsat2_SI_tools.io as io -import ICEsat2_SI_tools.spectral_estimates as spec - - -import copy -import spicke_remover -import datetime -import concurrent.futures as futures - -#from numba import jit -%matplotlib widget -#from ICEsat2_SI_tools import angle_optimizer -import ICEsat2_SI_tools.wave_tools as waves -import concurrent.futures as futures - -import time -import ICEsat2_SI_tools.lanczos as lanczos - -xr.set_options(display_style='text') -from contextlib import contextmanager -col.colormaps2(21) - -@contextmanager -def suppress_stdout(): - with open(os.devnull, "w") as devnull: - old_stdout = sys.stdout - sys.stdout = devnull - try: - yield - finally: - sys.stdout = old_stdout - -col_dict = col.rels -#import s3fs -# %% -track_name, batch_key, test_flag = io.init_from_input(sys.argv) # loads standard experiment -#track_name, batch_key, test_flag = '20190605061807_10380310_004_01', 'SH_batch01', False -#track_name, batch_key, test_flag = '20190601094826_09790312_004_01', 'SH_batch01', False -#track_name, batch_key, test_flag = '20190207111114_06260210_004_01', 'SH_batch02', False -#track_name, batch_key, test_flag = '20190219073735_08070210_004_01', 'SH_batch02', False -#track_name, batch_key, test_flag = '20190215184558_07530210_004_01', 'SH_batch02', False - -# good track -#track_name, batch_key, test_flag = '20190502021224_05160312_004_01', 'SH_batch02', False -#track_name, batch_key, test_flag = '20190502050734_05180310_004_01', 'SH_batch02', False -#track_name, batch_key, test_flag = '20190210143705_06740210_004_01', 'SH_batch02', False -#track_name, batch_key, test_flag = 'NH_20190301_09580203', 'NH_batch05', True - -#track_name, batch_key, test_flag = 'SH_20190213_07190212', 'SH_publish', True -#track_name, batch_key, test_flag = 'SH_20190502_05180312', 'SH_publish', True -track_name, batch_key, test_flag = 'SH_20190502_05160312', 'SH_publish', True - - - - -#print(track_name, batch_key, test_flag) -hemis, batch = batch_key.split('_') -#track_name= '20190605061807_10380310_004_01' -ATlevel= 'ATL03' - -save_path = mconfig['paths']['work'] +batch_key+'/B04_angle/' -save_name = 'B04_'+track_name - -plot_path = mconfig['paths']['plot'] + '/'+hemis+'/'+batch_key+'/publish/' + track_name + '/' -#plot_path = mconfig['paths']['plot'] + '/'+hemis+'/'+batch_key+'/' + track_name + '/' -MT.mkdirs_r(plot_path) -MT.mkdirs_r(save_path) -bad_track_path =mconfig['paths']['work'] +'bad_tracks/'+ batch_key+'/' -# %% - -all_beams = mconfig['beams']['all_beams'] -high_beams = mconfig['beams']['high_beams'] -low_beams = mconfig['beams']['low_beams'] -beam_groups = mconfig['beams']['groups'] - - -#Gfilt = io.load_pandas_table_dict(track_name + '_B01_regridded', load_path) # rhis is the rar photon data - -load_path = mconfig['paths']['work'] +batch_key+'/B01_regrid/' -#G_binned = io.load_pandas_table_dict(track_name + '_B01_binned' , load_path) # -G_binned_store = h5py.File(load_path +'/'+track_name + '_B01_binned.h5', 'r') -G_binned = dict() -for b in all_beams: - - G_binned[b] = io.get_beam_hdf_store(G_binned_store[b]) -G_binned_store.close() - -load_path = mconfig['paths']['work'] +batch_key+'/B02_spectra/' -Gx = xr.load_dataset(load_path+ '/B02_'+track_name + '_gFT_x.nc' ) # -Gk = xr.load_dataset(load_path+ '/B02_'+track_name + '_gFT_k.nc' ) # - - -# %% load prior information -load_path = mconfig['paths']['work'] +batch_key+'/A02_prior/' -#track_name = '20190208104534_06410210_004_01' -try: - Prior = MT.load_pandas_table_dict('/A02_'+track_name, load_path)['priors_hindcast'] -except: - print('Prior not founds exit') - MT.json_save('B04_fail', plot_path, {'time':time.asctime( time.localtime(time.time()) ) , 'reason': 'Prior not found'}) - print('exit()') - exit() - - -#### Define Prior -# Use partitions -# Prior2 = Prior.loc[['ptp0','ptp1','ptp2','ptp3','ptp4','ptp5']]['mean'] -# dominat_period = Prior2[Prior2.max() ==Prior2] -# aa = Prior.loc[['pdp0','pdp1','pdp2','pdp3','pdp4','pdp5']]['mean'].astype('float') -# dominant_dir = waves.get_ave_amp_angle(aa *0+1,aa )[1] -# dominant_dir_spread = Prior.loc[['pspr0','pspr1','pspr2','pspr3','pspr4','pspr5']]['mean'].median() -# -# prior_sel= {'alpha': ( dominant_dir *np.pi/180 , dominant_dir_spread *np.pi/180) } # to radiens -#prior_sel= {'alpha': ( -60 *np.pi/180 , dominant_dir_spread *np.pi/180) } # to radiens - -Pperiod = Prior.loc[['ptp0','ptp1','ptp2','ptp3','ptp4','ptp5']]['mean'] -Pdir = Prior.loc[['pdp0','pdp1','pdp2','pdp3','pdp4','pdp5']]['mean'].astype('float') -Pspread = Prior.loc[['pspr0','pspr1','pspr2','pspr3','pspr4','pspr5']]['mean'] - -Pperiod = Pperiod[ ~np.isnan(list(Pspread))] -Pdir = Pdir[ ~np.isnan(list(Pspread))] -Pspread = Pspread[ ~np.isnan(list(Pspread))] - - -# reset dirs: -Pdir[Pdir > 180] = Pdir[Pdir > 180] - 360 -Pdir[Pdir < -180] = Pdir[Pdir < -180] + 360 - -# reorder dirs -dir_best = [0] -for dir in Pdir: - ip = np.argmin([ abs(dir_best[-1] - dir), abs(dir_best[-1] - (dir - 360 )), abs(dir_best[-1] - (dir + 360 )) ] ) - new_dir = np.array([ dir, (dir - 360 ) , (dir + 360 ) ])[ip] - dir_best.append(new_dir) -dir_best = np.array(dir_best[1:]) - -# %% - -if len(Pperiod) == 0: - print('constant peak wave number') - kk = Gk.k - Pwavenumber = kk*0 + (2 * np.pi / (1/ Prior.loc['fp']['mean']) )**2 / 9.81 - dir_best = kk*0 + Prior.loc['dp']['mean'] - #dir_interp = np.interp(kk, Pwavenumber[Pwavenumber.argsort()] , dir_best[Pwavenumber.argsort()] ) - dir_interp_smth = dir_interp = kk*0 + Prior.loc['dp']['mean'] - spread_smth = spread_interp = kk*0 + Prior.loc['spr']['mean'] - #spread_interp = np.interp(kk, Pwavenumber[Pwavenumber.argsort()] , Pspread[Pwavenumber.argsort()].astype('float') ) - #spread_smth = M.runningmean(spread_interp, 30, tailcopy= True) - #spread_smth[-1] = spread_smth[-2] - -else: - Pwavenumber = (2 * np.pi / Pperiod )**2 / 9.81 - kk = Gk.k - dir_interp = np.interp(kk, Pwavenumber[Pwavenumber.argsort()] , dir_best[Pwavenumber.argsort()] ) - dir_interp_smth = M.runningmean(dir_interp, 30, tailcopy= True) - dir_interp_smth[-1] = dir_interp_smth[-2] - - spread_interp = np.interp(kk, Pwavenumber[Pwavenumber.argsort()] , Pspread[Pwavenumber.argsort()].astype('float') ) - spread_smth = M.runningmean(spread_interp, 30, tailcopy= True) - spread_smth[-1] = spread_smth[-2] - - -font_for_pres() - -F = M.figure_axis_xy(5, 4.5, view_scale= 0.5) -plt.subplot(2, 1, 1) -plt.title('Prior angle smoothed\n'+ track_name, loc ='left') - - -plt.plot( Pwavenumber , dir_best, '.r', markersize = 8) -plt.plot( kk , dir_interp, '-', color= 'red', linewidth = 0.8, zorder=11) -plt.plot( kk , dir_interp_smth , color=col.green1) - -plt.fill_between(kk, dir_interp_smth -spread_smth, dir_interp_smth +spread_smth, zorder= 1, color=col.green1, alpha = 0.2 ) -plt.ylabel('Angle (deg)') -#plt.xlabel('wavenumber ($2 \pi/\lambda$)') - -ax_final = plt.subplot(2, 1, 2) -plt.title('Prior angle adjusted ', loc ='left') - -# adjust angle def: -dir_interp_smth[dir_interp_smth> 180] = dir_interp_smth[dir_interp_smth> 180]- 360 -dir_interp_smth[dir_interp_smth< -180] = dir_interp_smth[dir_interp_smth< -180]+ 360 - -plt.fill_between(kk, dir_interp_smth -spread_smth, dir_interp_smth +spread_smth, zorder= 1, color=col.green1, alpha = 0.2 ) -plt.plot( kk , dir_interp_smth , '.', markersize = 1 , color=col.green1) - -ax_final.axhline(85, color='gray', linewidth= 2) -ax_final.axhline(-85, color='gray', linewidth= 2) - -plt.ylabel('Angle (deg)') -plt.xlabel('wavenumber ($2 \pi/\lambda$)') - -#F.save_light(path= plot_path, name = 'B04_prior_angle') - - -# save -dir_interp_smth = xr.DataArray(data=dir_interp_smth * np.pi/180 , dims='k', coords ={'k':kk}, name='Prior_direction') -spread_smth = xr.DataArray(data=spread_smth* np.pi/180 , dims='k', coords ={'k':kk}, name='Prior_spread') -Prior_smth = xr.merge([dir_interp_smth, spread_smth]) - - -# Use fake -#prior_sel= {'alpha': ( 0.6 , dominant_dir_spread *np.pi/180) } # to radiens - -# Use mean direction -#prior_sel= {'alpha': ( Prior.loc['dp']['mean'] *np.pi/180 , Prior.loc['spr']['mean'] *np.pi/180) } - - -# define paramater range -# params_dict = {'alpha': [ -0.85 * np.pi /2, 0.85 * np.pi /2, 5], -# 'phase':[ 0 , 2*np.pi , 10]} -# -# alpha_dx = 0.02 -# max_wavenumbers = 25 -# -# sample_flag = True -# optimize_flag = False -# brute_flag = False -# -# plot_flag = False -# -# Nworkers = 6 -# N_sample_chain = 300 -# N_sample_chain_burn = 30 -# -max_x_pos = 8 -x_pos_jump = 1 - - -# isolate x positions with data -data_mask = Gk.gFT_PSD_data.mean('k') -data_mask.coords['beam_group'] = ('beam', ['beam_group'+g[2] for g in data_mask.beam.data]) -data_mask_group = data_mask.groupby('beam_group').mean(skipna=False) -# these stancils are actually used -data_sel_mask = data_mask_group.sum('beam_group') !=0 - -x_list = data_sel_mask.x[data_sel_mask] # iterate over these x posistions -x_list_flag = ~np.isnan(data_mask_group.sel(x = x_list) )# flag that is False if there is no data - -#### limit number of x coordinates - -x_list = x_list#[::x_pos_jump] -#if len(x_list) > max_x_pos: -x_list = x_list[0:max_x_pos] -x_list_flag= x_list_flag.sel(x =x_list) - -# plot -font_for_print() -F = M.figure_axis_xy(5.5, 3, view_scale= 0.8) -plt.suptitle(track_name) -ax1 = plt.subplot(2, 1, 1) -plt.title('Data in Beam', loc= 'left') -plt.pcolormesh(data_mask.x/1e3, data_mask.beam, data_mask, cmap= plt.cm.OrRd) -for i in np.arange(1.5, 6, 2): - ax1.axhline(i, color= 'black', linewidth =0.5) -plt.xlabel('Distance from Ice Edge') - -ax_final = plt.subplot(2, 1, 2) -plt.title('Data in Group', loc= 'left') -plt.pcolormesh(data_mask.x/1e3, data_mask_group.beam_group, data_mask_group, cmap= plt.cm.OrRd) - -for i in np.arange(0.5, 3, 1): - ax_final.axhline(i, color= 'black', linewidth =0.5) - -plt.plot( x_list/1e3, x_list*0 +0, '.', markersize= 2, color= col.cascade1 ) -plt.plot( x_list/1e3, x_list*0 +1, '.', markersize= 2, color= col.cascade1 ) -plt.plot( x_list/1e3, x_list*0 +2, '.', markersize= 2, color= col.cascade1 ) - -plt.xlabel('Distance from Ice Edge') - -F.save_pup(path= plot_path, name = 'B04_data_avail') - - - -# %% Load marginal distributions -MM = xr.open_dataset(save_path + save_name + '_marginals.nc') -LL = MT.load_pandas_table_dict(save_name+ '_res_table', save_path)['L_sample'] - -# plot - -fn = copy.copy(lstrings) - -font_for_print() -F = M.figure_axis_xy(fig_sizes['one_column_high'][0], fig_sizes['one_column_high'][1]*0.6, view_scale= 0.7, container = True) -gs = GridSpec(3,6, wspace=0.5, hspace=.8)#figure=fig, - -ax1 = F.fig.add_subplot(gs[0:2 , 0:-2]) - -dd = MM.marginals -dd = dd.mean('beam_group').mean('x').rolling(k=20, min_periods= 1, center=True).mean()#.plot() - -try: - plt.pcolor(dd.angle * 180 /np.pi , dd.k , dd, cmap=col.cascade_r , zorder=0, vmin=0, vmax=5 )#np.arange(10, 37, 1) ) -except: - plt.pcolor(dd.angle * 180 /np.pi , dd.k , dd.T, cmap=col.cascade_r , zorder=0, vmin=0, vmax=5 )#np.arange(10, 37, 1) ) - -#klims = k_list.min()*0.2 , k_list.max()*1.2 -klims = MM.k[~np.isnan(dd.mean('angle'))].min().data, MM.k[~np.isnan(dd.mean('angle'))].max().data# 0, LL['K_prime'].max()*1.2 - - -dir_best[dir_best> 180] = dir_best[dir_best> 180] -360 -plt.plot(dir_best, Pwavenumber , '.k', markersize = 6) - -dir_interp[dir_interp> 180] = dir_interp[dir_interp> 180] -360 -plt.plot(dir_interp, Gk.k, '-', color= 'k', linewidth = 1.5, zorder=1, alpha= 0.4) - -# ax1.axvline( best_guess * 180/ np.pi , color=col.blue, linewidth = 1.5, label ='best guess fitting') -# ax1.axvline( (prior_sel['alpha'][0]) * 180 /np.pi, color='k', linewidth = 1.5, label ='prior') -# ax1.axvline( (prior_sel['alpha'][0]- prior_sel['alpha'][1]) * 180 /np.pi, color='k', linewidth = 0.7, label ='prior uncertrainty') -# ax1.axvline( (prior_sel['alpha'][0]+ prior_sel['alpha'][1]) * 180 /np.pi , color='k', linewidth = 0.7) - -plt.fill_betweenx(Gk.k, (dir_interp_smth -spread_smth)* 180 /np.pi, (dir_interp_smth +spread_smth)* 180 /np.pi, zorder= 1, color=col.orange, alpha = 0.2 ) -plt.plot(dir_interp_smth * 180 /np.pi, Gk.k , '.', markersize = 1 , color=col.orange) - -ax1.axvline(85, color='gray', linewidth= 1) -ax1.axvline(-85, color='gray', linewidth= 1) - -#plt.legend() -plt.ylabel('wavenumber') -plt.xlabel('Angle (deg)') - - -plt.title(io.ID_to_str(track_name)+ '\n'+next(fn) +'marginal PDFs', loc='left') - -#plt.xlim(- 170, 170) -#plt.xlim(- 90, 90) - - -#prior_angle_str =str(np.round( (prior_sel['alpha'][0]) * 180 /np.pi)) -#plt.title(track_name + '\nprior=' + 'deg', loc= 'left' ) -plt.xlim( min( [ -90, np.nanmin(dir_best)] ) - 5, max( [np.nanmax(dir_best), 90]) +5 ) - - -ax0 = F.fig.add_subplot(gs[0:2 ,-2:]) -ax0.tick_params(labelleft=False) - - - -for g in MM.beam_group: - MMi = MM.sel(beam_group= g) - plt.plot( MMi.weight.T,MMi.k, '.', color= col_dict[str(g.data)], markersize= 2, alpha=0.5, linewidth = 0.8) - -plt.title(next(fn) + 'weights', loc='left') -plt.xlabel('Power') -plt.ylim(klims) - -F.save_pup(path= plot_path, name = 'B04_'+track_name+'_prior') -#MT.json_save('B04_success', plot_path, {'time':'time.asctime( time.localtime(time.time()) )'}) - -# %% plot - - -def derive_weights(weights): - weights = (weights-weights.mean())/weights.std() - weights = weights - weights.min() - return weights - - -def weighted_means(data, weights, x_angle, color='k'): - """ - weights should have nans when there is no data - data should have zeros where there is no data - """ - from scipy.ndimage.measurements import label - # make wavenumber groups - groups, Ngroups = label(weights.where(~np.isnan(weights), 0) ) - - for ng in np.arange(1, Ngroups+1): - wi = weights[groups == ng] - weight_norm = weights.sum('k') - k = wi.k.data - data_k = data.sel(k=k).squeeze() - data_weight = (data_k * wi) - #plt.stairs(data_weight.sum('k')/ weight_norm , x_angle, linewidth=1 , color ='k') - # if data_k.k.size > 1: - # for k in data_k.k.data: - # plt.stairs(data_weight.sel(k=k) / weight_norm, x_angle, color ='gray', alpha =0.5) - # else: - # plt.stairs(data_weight.squeeze() / weight_norm, x_angle, color ='gray', alpha =0.5) - - - data_weighted_mean = (data.where( (~np.isnan(data)) & (data != 0), np.nan) * weights ).sum('k')/weight_norm - return data_weighted_mean - - -xticks_pi = np.arange(-np.pi/2, np.pi/2+np.pi/4, np.pi/4) -xtick_labels_pi = ['-$\pi/2$','-$\pi/4$','0','$\pi/4$','$\pi/2$',] - -fn = copy.copy(lstrings) - -font_for_print() -F = M.figure_axis_xy(fig_sizes['one_column_high'][0], fig_sizes['one_column_high'][1]*0.7, view_scale= 0.7, container = True) -gs = GridSpec(3,6, wspace=0.5, hspace=.6)#figure=fig, - -ax1 = F.fig.add_subplot(gs[0:2 , :]) - -angle_mask = MM.angle *0 ==0 -angle_mask[0], angle_mask[-1] = False, False -corrected_marginals = MM.marginals.isel(angle=angle_mask ) + MM.marginals.isel(angle=~angle_mask ).sum('angle')/sum(angle_mask).data - - -#xi = MM.x[3] -xi = MM.x[3] - -dd = corrected_marginals.mean('beam_group').sel(x=xi).rolling(k=20, min_periods= 1, center=True).mean()#.plot() - -try: - plt.pcolor(dd.angle , dd.k , dd, cmap=col.cascade_r , zorder=0, vmin=0, vmax=5 )#np.arange(10, 37, 1) ) -except: - plt.pcolor(dd.angle , dd.k , dd.T, cmap=col.cascade_r , zorder=0, vmin=0, vmax=5 )#np.arange(10, 37, 1) ) - -#klims = k_list.min()*0.2 , k_list.max()*1.2 -klims = MM.k[~np.isnan(dd.mean('angle'))].min().data * 0.8, MM.k[~np.isnan(dd.mean('angle'))].max().data * 1.1# 0, LL['K_prime'].max()*1.2 - - -dir_best[dir_best> 180] = dir_best[dir_best> 180] -360 -plt.plot(dir_best * np.pi/180, Pwavenumber , '.k', markersize = 6) - -dir_interp[dir_interp> 180] = dir_interp[dir_interp> 180] -360 -plt.plot(dir_interp * np.pi/180, Gk.k, '-', color= 'k', linewidth = 1.5, zorder=1, alpha= 0.4) - -# ax1.axvline( best_guess * 180/ np.pi , color=col.blue, linewidth = 1.5, label ='best guess fitting') -# ax1.axvline( (prior_sel['alpha'][0]) * 180 /np.pi, color='k', linewidth = 1.5, label ='prior') -# ax1.axvline( (prior_sel['alpha'][0]- prior_sel['alpha'][1]) * 180 /np.pi, color='k', linewidth = 0.7, label ='prior uncertrainty') -# ax1.axvline( (prior_sel['alpha'][0]+ prior_sel['alpha'][1]) * 180 /np.pi , color='k', linewidth = 0.7) - -plt.fill_betweenx(Gk.k, (dir_interp_smth -spread_smth) , (dir_interp_smth +spread_smth) , zorder= 1, color=col.orange, alpha = 0.2 ) -plt.plot(dir_interp_smth , Gk.k , '.', markersize = 1 , color=col.orange) - - - -x_str= str(int(xi.data/1e3)) -plt.title( 'Example Marginal PDF' + '\n' + io.ID_to_str(track_name)+ '\nat $X_i$='+ x_str +' km '+ '\n'+next(fn) +'mean marginal PDFs per wavenumber', loc='left') - -#plt.xlim(- 170, 170) -#plt.xlim(- 90, 90) -plt.ylim(klims) - -#prior_angle_str =str(np.round( (prior_sel['alpha'][0]) * 180 /np.pi)) -#plt.title(track_name + '\nprior=' + 'deg', loc= 'left' ) -plt.xlim( min( [ -np.pi/2, np.nanmin(dir_best) * np.pi/180 ] ) - 5* np.pi/180, max( [np.nanmax(dir_best) * np.pi/180 , np.pi/2]) +5* np.pi/180 ) - - -ax_final = F.fig.add_subplot(gs[2: ,:]) -#ax_final.tick_params(labelleft=False) - - -ax_list= dict() - -Gweights = corrected_marginals.N_data -Gweights = Gweights/Gweights.max() -group_weight = Gweights.sel(x =xi) - -data_collect = dict() -for group, gpos in zip(MM.beam_group.data, [ gs[0, 0], gs[1, 0], gs[2, 0]] ): - - data = corrected_marginals.sel(x=xi).sel(beam_group= group) - weights = derive_weights( MM.weight.sel(x=xi).sel(beam_group= group) ) - weights = weights**2 - - # derive angle axis - x_angle = data.angle.data #* 180 /np.pi - d_angle = np.diff(x_angle)[0] - x_angle = np.insert(x_angle, x_angle.size , x_angle[-1].data + d_angle) - - if ( (~np.isnan(data)).sum().data == 0) | (( ~np.isnan(weights)).sum().data == 0): - data_wmean = data.mean('k') - else: - data_wmean = weighted_means(data, weights, x_angle, color= col_dict[group] ) - #plt.stairs(data_wmean , x_angle, color =col_dict[group], alpha =1) - - data_collect[group] = data_wmean - -data_collect = xr.concat(data_collect.values(), dim='beam_group') -final_data = (group_weight * data_collect).sum('beam_group') / group_weight.sum('beam_group').data -final_data =final_data / final_data.sum() / np.diff(x_angle)[0] - - -#ax_final = F.fig.add_subplot(gs[-1, :]) -plt.title(next(fn) + 'Final weighted marginal PDF', loc='left') - -k_mask = corrected_marginals.mean('beam_group').mean('angle') - -#priors_k = MM.Prior_direction[ ~np.isnan(k_mask.sel(x= xi))] -priors_k = dir_interp_smth[ ~np.isnan(k_mask.sel(x= xi))] -for pk in priors_k: - ax_final.axvline(pk, color =col.orange, linewidth= 1, alpha = 0.5) - -plt.stairs( final_data , x_angle, color = 'k', alpha =0.5, linewidth =0.8, zorder= 12) - -final_data_smth = lanczos.lanczos_filter_1d(x_angle,final_data, 0.1) -# -# for group in MM.beam_group.data: -# plt.stairs( data_collect.sel(beam_group= group) * group_weight.sel(beam_group= group) /group_weight.sum('beam_group').data, x_angle, color =col_dict[group], alpha =1) - -plt.plot(x_angle[0:-1], final_data_smth, color = 'black', linewidth= 0.8) - -ax_final.axvline( x_angle[0:-1][final_data_smth.argmax()] , color =col.cascade3, linewidth= 1.5, alpha = 1, zorder= 1) -ax_final.axvline( x_angle[0:-1][final_data_smth.argmax()] , color =col.black, linewidth= 3, alpha = 1, zorder= 0) - -ax_final.set_xlabel('Angle') -ax_final.set_ylabel('Density') - -ax_final.set_xlim( - 1.1 * np.pi/2 , 1.3 * np.pi/2) -ax1.set_xlim( - 1.1 * np.pi/2 , 1.3 * np.pi/2) -ax1.set_xticks(xticks_pi) - - -k_l = M.cut_nparray(dd.k.data, klims[0], klims[1] ) - -y_tickslabel, y_ticks = MT.tick_formatter(dd.k.data[k_l][::15]*10, interval= 3, rounder= 2, expt_flag= False) - -y_ticks = y_ticks/10 -ax1.set_yticks( y_ticks ) -ax1.set_yticklabels(y_tickslabel) -#plt.ylim(klims) - -ax1.set_xticklabels(xtick_labels_pi) -ax1.set_xticklabels([]) - -ax_final.set_xticks(xticks_pi) -ax_final.set_xticklabels(xtick_labels_pi) - -ax1.axvline(85 * np.pi/180, color='gray', linewidth= 1) -ax1.axvline(-85 * np.pi/180, color='gray', linewidth= 1) - - -ax_final.axvline(85 * np.pi/180, color='gray', linewidth= 1) -ax_final.axvline(-85 * np.pi/180, color='gray', linewidth= 1) - -#plt.legend() -ax1.set_ylabel('Wavenumber') -#ax1.set_xlabel('Angle (deg)') - - -dlim= 0, final_data_smth.max()* 1.2 -density_labels, density_ticks = MT.tick_formatter( np.arange(dlim[0], dlim[1], 0.5), interval= 2, rounder= 2, expt_flag= False) -ax_final.set_yticks(density_ticks) -ax_final.set_yticklabels(density_labels) -plt.ylim(dlim) -F.save_pup(path= plot_path, name = 'B04_'+track_name+'_prior_merged') - -# %% diff --git a/analyis_publish/PB05_define_angle.py b/analyis_publish/PB05_define_angle.py deleted file mode 100644 index fc75cebb..00000000 --- a/analyis_publish/PB05_define_angle.py +++ /dev/null @@ -1,503 +0,0 @@ -""" -This file open a ICEsat2 track applied filters and corections and returns smoothed photon heights on a regular grid in an .nc file. -This is python 3 -""" - -import os, sys - -exec(open(os.environ['PYTHONSTARTUP']).read()) -exec(open(STARTUP_2021_IceSAT2).read()) - -import ICEsat2_SI_tools.convert_GPS_time as cGPS -import h5py -import ICEsat2_SI_tools.io as io -import ICEsat2_SI_tools.spectral_estimates as spec - -import imp -import copy -import spicke_remover -import datetime -import concurrent.futures as futures - -from numba import jit - -from ICEsat2_SI_tools import angle_optimizer -import ICEsat2_SI_tools.wave_tools as waves -import concurrent.futures as futures - -import time -import ICEsat2_SI_tools.lanczos as lanczos - - -col.colormaps2(21) - -col_dict = col.rels -track_name, batch_key, test_flag = io.init_from_input(sys.argv) # loads standard experiment - - -# main text figure -track_name, batch_key, test_flag = 'SH_20190502_05160312', 'SH_publish', False - -#suppl. figures: -hemis, batch = batch_key.split('_') -ATlevel= 'ATL03' - - - -plot_path = mconfig['paths']['plot'] + '/'+hemis+'/'+batch_key+'/publish/' + track_name + '/' -MT.mkdirs_r(plot_path) -bad_track_path =mconfig['paths']['work'] +'bad_tracks/'+ batch_key+'/' - -all_beams = mconfig['beams']['all_beams'] -high_beams = mconfig['beams']['high_beams'] -low_beams = mconfig['beams']['low_beams'] -beam_groups = mconfig['beams']['groups'] -group_names = mconfig['beams']['group_names'] -load_path = mconfig['paths']['work'] +batch_key +'/B02_spectra/' -Gk = xr.load_dataset(load_path+ '/B02_'+track_name + '_gFT_k.nc' ) # - -load_path = mconfig['paths']['work'] +batch_key +'/B04_angle/' -Marginals = xr.load_dataset(load_path+ '/B04_'+track_name + '_marginals.nc' ) # - -# %% load prior information -load_path = mconfig['paths']['work']+batch_key +'/A02_prior/' -Prior = MT.load_pandas_table_dict('/A02_'+track_name, load_path)['priors_hindcast'] - - -def derive_weights(weights): - weights = (weights-weights.mean())/weights.std() - weights = weights - weights.min() - return weights - -def weighted_means(data, weights, x_angle, color='k'): - """ - weights should have nans when there is no data - data should have zeros where there is no data - """ - from scipy.ndimage.measurements import label - # make wavenumber groups - groups, Ngroups = label(weights.where(~np.isnan(weights), 0) ) - - for ng in np.arange(1, Ngroups+1): - wi = weights[groups == ng] - weight_norm = weights.sum('k') - k = wi.k.data - data_k = data.sel(k=k).squeeze() - data_weight = (data_k * wi) - if data_k.k.size > 1: - for k in data_k.k.data: - plt.stairs(data_weight.sel(k=k) / weight_norm, x_angle, color ='gray', alpha =0.5) - else: - plt.stairs(data_weight.squeeze() / weight_norm, x_angle, color ='gray', alpha =0.5) - - - data_weighted_mean = (data.where( (~np.isnan(data)) & (data != 0), np.nan) * weights ).sum('k')/weight_norm - return data_weighted_mean - - - - -# cut out data at the boundary and redistibute variance -angle_mask = Marginals.angle *0 ==0 -angle_mask[0], angle_mask[-1] = False, False -corrected_marginals = Marginals.marginals.isel(angle=angle_mask ) + Marginals.marginals.isel(angle=~angle_mask ).sum('angle')/sum(angle_mask).data - -# get groupweights -# ----------------- thius does not work jet.ckeck with data on server how to get number of data points per stancil -#Gx['x'] = Gx.x - Gx.x[0] - -# makde dummy variables -M_final = xr.full_like(corrected_marginals.isel(k=0, beam_group =0).drop('beam_group').drop('k'), np.nan) -M_final_smth = xr.full_like(corrected_marginals.isel(k=0, beam_group =0).drop('beam_group').drop('k'), np.nan) -if M_final.shape[0] > M_final.shape[1]: - M_final= M_final.T - M_final_smth= M_final_smth.T - corrected_marginals=corrected_marginals.T - -Gweights = corrected_marginals.N_data -Gweights = Gweights/Gweights.max() - -k_mask = corrected_marginals.mean('beam_group').mean('angle') - -xticks_2pi = np.arange(-np.pi, np.pi+np.pi/4, np.pi/4) -xtick_labels_2pi = ['-$\pi$', '-$3\pi/4$', '-$\pi/2$','-$\pi/4$','0','$\pi/4$','$\pi/2$','$3\pi/4$','$\pi$'] - -xticks_pi = np.arange(-np.pi/2, np.pi/2+np.pi/4, np.pi/4) -xtick_labels_pi = ['-$\pi/2$','-$\pi/4$','0','$\pi/4$','$\pi/2$',] - -group_names=dict() -for n,g in zip(mconfig['beams']['group_names'], mconfig['beams']['groups']): - group_names[n] = ('-'.join(g))[0:3] - - - -font_for_print() -x_list = corrected_marginals.x -for xi in range(x_list.size): - - fn = copy.copy(lstrings) - F = M.figure_axis_xy(fig_sizes['one_column_high'][0],fig_sizes['one_column_high'][1]*0.85, view_scale= 0.8, container = True) - gs = GridSpec(4,1, wspace=0.1, hspace=.8)#figure=fig, - x_str= str(int(x_list[xi]/1e3)) - tname = track_name.split('_')[1]+'\non '+ track_name.split('_')[0][0:8] - plt.suptitle('Weighted marginal PDFs for \n$X_i$='+ x_str +' km for track '+tname, y= 1.03, x = 0.125, horizontalalignment= 'left') - - #plt.suptitle('Weighted marginal PDFs\nx='+ x_str +'\n'+track_name, y= 1.05, x = 0.125, horizontalalignment= 'left') - group_weight = Gweights.isel(x =xi) - - ax_list= dict() - - data_collect = dict() - for group, gpos in zip(Marginals.beam_group.data, [ gs[0, 0], gs[1, 0], gs[2, 0]] ): - ax0 = F.fig.add_subplot(gpos) - ax0.tick_params(labelbottom=False) - ax_list[group] = ax0 - - data = corrected_marginals.isel(x=xi).sel(beam_group= group) - weights = derive_weights( Marginals.weight.isel(x=xi).sel(beam_group= group) ) - weights = weights**2 - - # derive angle axis - x_angle = data.angle.data - d_angle = np.diff(x_angle)[0] - x_angle = np.insert(x_angle, x_angle.size , x_angle[-1].data + d_angle) - - if ( (~np.isnan(data)).sum().data == 0) | (( ~np.isnan(weights)).sum().data == 0): - data_wmean = data.mean('k') - else: - data_wmean = weighted_means(data, weights, x_angle, color= col_dict[group] ) - plt.stairs(data_wmean , x_angle, color =col_dict[group], alpha =1) - - - if group == 'group1': - t_string = group_names[group] +' pair' #group.replace('group', - else: - t_string = group_names[group]+' pair' #group.replace('group', +' ') - - plt.title(next(fn) + t_string, loc ='left') - data_collect[group] = data_wmean - - - data_collect = xr.concat(data_collect.values(), dim='beam_group') - final_data = (group_weight * data_collect).sum('beam_group')/group_weight.sum('beam_group').data - - # get relevant priors - for axx in ax_list.values(): - axx.set_ylim(0, final_data.max() * 1.5) - axx.set_xticks(xticks_pi) - axx.set_xticklabels(xtick_labels_pi) - axx.set_xlim(-np.pi/2, np.pi/2) - - - try: - ax_list['group1'].set_ylabel('PDF') - ax_list['group2'].set_ylabel('PDF') - ax_list['group3'].set_ylabel('PDF') - ax_list['group1'].tick_params(labelbottom=True) - except: - pass - - ax_final = F.fig.add_subplot(gs[-1, :]) - plt.title(next(fn) + 'Final best guess', loc='left') - - priors_k = Marginals.Prior_direction[ ~np.isnan(k_mask.isel(x= xi))] - for pk in priors_k: - ax_final.axvline(pk, color =col.orange, linewidth= 1, alpha = 0.7) - - plt.stairs( final_data , x_angle, color = 'k', alpha =0.5, linewidth =0.8, zorder= 12) - - final_data_smth = lanczos.lanczos_filter_1d(x_angle,final_data, 0.1) - - plt.plot(x_angle[0:-1], final_data_smth, color = 'black', linewidth= 0.8) - - ax_final.axvline( x_angle[0:-1][final_data_smth.argmax()], color =col.cascade3, linewidth= 1.5, alpha = 1, zorder= 1) - ax_final.axvline( x_angle[0:-1][final_data_smth.argmax()], color =col.black, linewidth= 4, alpha = 1, zorder= 0) - - - plt.xlabel('Angle of Incidence (rad)') - ax_final.set_xlim(-np.pi/2, np.pi/2) - ax_final.set_ylabel('PDF') - ax_final.set_xticks(xticks_pi) - ax_final.set_xticklabels(xtick_labels_pi) - - M_final[xi,:] = final_data - M_final_smth[xi, :] = final_data_smth - - F.save_pup(path = plot_path, name = 'B05_weigthed_margnials_x' + x_str) - - - -M_final.name='weighted_angle_PDF' -M_final_smth.name='weighted_angle_PDF_smth' -Gpdf = xr.merge([M_final,M_final_smth]) - -Gpdf.weighted_angle_PDF_smth.plot() -Gpdf.mean('x').weighted_angle_PDF_smth.plot() -best_guess_angle = Gpdf.angle[Gpdf.mean('x').weighted_angle_PDF_smth.argmax()].data - -best_guess_angle * 180/np.pi - -best_guess_angle/np.pi -Gpdf.mean('x').weighted_angle_PDF_smth.plot() - -class plot_polarspectra: - def __init__(self,k, thetas, data, data_type='fraction' ,lims=None, verbose=False): - - """ - data_type either 'fraction' or 'energy', default (fraction) - lims (None) limts of k. if None set by the limits of the vector k - """ - self.k =k - self.data =data - self.thetas =thetas - - #self.sample_unit=sample_unit if sample_unit is not None else 'df' - # decided on freq limit - self.lims= lims = [self.k.min(),self.k.max()] if lims is None else lims - freq_sel_bool=M.cut_nparray(self.k, lims[0], lims[1] ) - - self.min=np.round(np.nanmin(data[freq_sel_bool,:]), 2) - self.max=np.round(np.nanmax(data[freq_sel_bool,:]), 2) - if verbose: - print(str(self.min), str(self.max) ) - - self.klabels=np.linspace(self.min, self.max, 5) - - self.data_type=data_type - if data_type == 'fraction': - self.clevs=np.linspace(np.nanpercentile(dir_data.data, 1), np.ceil(self.max* 0.9), 21) - elif data_type == 'energy': - self.ctrs_min=self.min+self.min*.05 - self.clevs=np.linspace(self.min+self.min*.05, self.max*.60, 21) - - - def linear(self, radial_axis='period', ax=None, cbar_flag=True): - - """ - """ - if ax is None: - ax = plt.subplot(111, polar=True) - else: - ax=ax - ax.set_theta_direction(-1) - ax.set_theta_zero_location("W") - - grid=ax.grid(color='k', alpha=.5, linestyle='-', linewidth=.5) - - if self.data_type == 'fraction': - cm=plt.cm.RdYlBu_r - colorax = ax.contourf(self.thetas,self.k, self.data, self.clevs, cmap=cm, zorder=1)# ,cmap=cm)#, vmin=self.ctrs_min) - elif self.data_type == 'energy': - cm=plt.cm.Paired - cm.set_under='w' - cm.set_bad='w' - colorax = ax.contourf(self.thetas,self.k, self.data, self.clevs, cmap=cm, zorder=1) - self.colorax = colorax - - if cbar_flag: - cbar = plt.colorbar(colorax, fraction=0.046, pad=0.1, orientation="horizontal") - cbar.ax.get_yaxis().labelpad = 30 - cbar.outline.set_visible(False) - clev_tick_names, clev_ticks =MT.tick_formatter(FP.clevs, expt_flag= False, shift= 0, rounder=4, interval=1) - cbar.set_ticks(clev_ticks[::5]) - cbar.set_ticklabels(clev_tick_names[::5]) - self.cbar = cbar - - if (self.lims[-1]- self.lims[0]) > 6000: - radial_ticks = np.arange(100, 1600, 300) - else: - radial_ticks = np.arange(60, 1000, 20) - print(radial_ticks) - xx_tick_names, xx_ticks = MT.tick_formatter( radial_ticks , expt_flag= False, shift= 1, rounder=0, interval=1) - xx_tick_names = [' '+str(d)+'m' for d in xx_tick_names] - - ax.set_yticks(xx_ticks[::1]) - ax.set_yticklabels(xx_tick_names[::1]) - - degrange = np.arange(0,360,30) - degrange = degrange[(degrange<=80)| (degrange>=280)] - degrange_label = np.copy(degrange) - degrange_label[degrange_label > 180] = degrange_label[degrange_label > 180] - 360 - - degrange_label = [str(d)+'$^{\circ}$' for d in degrange_label] - - lines, labels = plt.thetagrids(degrange, labels=degrange_label)#, frac = 1.07) - - for line in lines: - line.set_linewidth(5) - - ax.set_ylim(self.lims) - ax.spines['polar'].set_color("none") - ax.set_rlabel_position(87) - self.ax=ax - -font_for_print() -fn = copy.copy(lstrings) - - -F = M.figure_axis_xy(fig_sizes['two_column_square'][0], fig_sizes['two_column_square'][1], view_scale= 0.7, container = True) -gs = GridSpec(8,6, wspace=0.1, hspace=2.1) -col.colormaps2(21) - -cmap_spec= col.white_base_blgror -clev_spec = np.linspace(-8, -1, 21) *10 - -cmap_angle= col.cascade_r -clev_angle = np.linspace(0, 1.5, 21) - - -ax1 = F.fig.add_subplot(gs[0:3, :]) -ax1.tick_params(labelbottom=True) - -weighted_spec = (Gk.gFT_PSD_data * Gk.N_per_stancil).sum('beam') /Gk.N_per_stancil.sum('beam') -x_spec = weighted_spec.x/1e3 - -k_low_limits =weighted_spec.k[::10] -weighted_spec_sub = weighted_spec.groupby_bins('k' , k_low_limits).mean() -k_low = (k_low_limits + k_low_limits.diff('k')[0]/2).data -weighted_spec_sub['k_bins'] = k_low[0:-1] -weighted_spec_sub = weighted_spec_sub.rename({'k_bins': 'k'}) - -lam_p = 2 *np.pi/k_low_limits -lam = lam_p * np.cos(best_guess_angle) -k = 2 * np.pi/lam -xlims = x_spec[0], x_spec[-5] - -clev_spec = np.linspace(-80, (10* np.log(weighted_spec)).max() * 0.9, 21) - - - -dd = 10* np.log(weighted_spec_sub) -clev_log = M.clevels( [dd.quantile(0.01).data * 0.3, dd.quantile(0.98).data * 2.5], 31)* 1 -plt.pcolormesh(x_spec, lam, dd, cmap=cmap_spec , vmin = clev_log[0], vmax = clev_log[-1]) - - -plt.plot(x_spec[0:5], lam[dd.argmax('k')][0:5], linestyle= '-', color='black') -plt.text(x_spec[0:5].max()+2, lam[dd.argmax('k')][0:5].mean()+0, 'corrected peak', ha='left', color='black', fontsize = 8) - -plt.plot(x_spec[0:5], lam_p[dd.argmax('k')][0:5], linestyle= '--', color='black') -plt.text(x_spec[0:5].max()+2, lam_p[dd.argmax('k')][0:5].mean()+0, 'observed peak', ha='left', color='black', fontsize = 8) - -plt.title(next(fn) + 'Slope Power Spectra (m/m)$^2$ k$^{-1}$\nfor ' + io.ID_to_str(track_name) , loc='left') - -cbar = plt.colorbar( fraction=0.018, pad=0.01, orientation="vertical", label ='Power') -cbar.outline.set_visible(False) -clev_ticks = np.round(clev_spec[::3], 0) -cbar.set_ticks(clev_ticks) -cbar.set_ticklabels(clev_ticks) - -plt.ylabel('corrected wavelength $(m)$') -ax2 = F.fig.add_subplot(gs[3:5, :]) -ax2.tick_params(labelleft=True) - -dir_data = Gpdf.interp(x= weighted_spec.x).weighted_angle_PDF_smth.T - -x = Gpdf.x/1e3 -angle = Gpdf.angle[::10] - -dir_data_sub = dir_data.groupby_bins('angle' , angle).mean() -angle_low = (angle + angle.diff('angle')[0]/2).data -dir_data_sub['angle_bins'] = angle_low[0:-1] -dir_data_sub = dir_data_sub.rename({'angle_bins': 'angle'}) -plt.pcolormesh(dir_data_sub.x/1e3, dir_data_sub.angle, dir_data_sub , vmin= clev_angle[0], vmax= clev_angle[-1], cmap = cmap_spec) -cbar = plt.colorbar( fraction=0.02, pad=0.01, orientation="vertical", label ='Density') -cbar.outline.set_visible(False) -plt.title(next(fn) + 'Direction PDFs', loc='left') -plt.ylabel('Angle') -plt.xlabel('X (km)') - -ax2.set_yticks(xticks_pi) -ax2.set_yticklabels(xtick_labels_pi) -ax2.set_ylim(angle[0], angle[-1]) - -x_ticks = np.arange(0, xlims[-1].data, 25) -x_tick_labels, x_ticks = MT.tick_formatter(x_ticks, expt_flag= False, shift= 0, rounder=1, interval=2) - -ax1.set_xticks(x_ticks) -ax2.set_xticks(x_ticks) -ax1.set_xticklabels(x_tick_labels) -ax2.set_xticklabels(x_tick_labels) - -lam_lim= lam[-1].data, 550 -ax1.set_ylim(lam_lim) - -ax1.set_xlim(xlims) -ax2.set_xlim(xlims) -ax2.axhline(best_guess_angle, color=col.orange, linewidth=0.8) - - -x_pos_list = [0, 1, 2] -lsrtrings = iter(['c)', 'd)', 'e)']) - -dir_ax_list =list() -for x_pos, gs in zip( x_pos_list , [ gs[-3:, 0:2], gs[-3:, 2:4], gs[-3:, 4:]] ): - x_range = weighted_spec.x.data[x_pos] + 12.5e3/2 #, x_pos[-1]]] - print(x_range) - ax1.axvline(x_range/1e3, linestyle= '-', color= col.green, linewidth=0.9, alpha = 0.8) - ax2.axvline(x_range/1e3, linestyle= '-', color= col.green, linewidth=0.9, alpha = 0.8) - - i_lstring = next(lsrtrings) - ax1.text(x_range/1e3, np.array(lam_lim).mean()* 1.2 * 3/2, ' '+ i_lstring, fontsize= 8, color =col.green) - - - i_spec = weighted_spec.isel(x= x_pos ) - i_dir = corrected_marginals.interp(x= weighted_spec.x).isel(x= x_pos ) - print(i_spec.x.data, i_spec.x.data) - dir_data = (i_dir * i_dir.N_data).sum([ 'beam_group'])/ i_dir.N_data.sum([ 'beam_group']) - lims = dir_data.k[ (dir_data.sum('angle')!=0) ][0].data, dir_data.k[ (dir_data.sum('angle')!=0) ][-1].data - - N_angle = i_dir.angle.size - dir_data2 = dir_data - plot_data = dir_data2 * i_spec - - plot_data = dir_data2.rolling(angle =2, k =15, min_periods= 1, center=True ).median() * i_spec#.mean('x') - plot_data = plot_data.sel(k=slice(lims[0],lims[-1] ) ) - - lam_p = 2 *np.pi/plot_data.k.data - lam = lam_p * np.cos(best_guess_angle) - - if np.nanmax(plot_data.data) != np.nanmin(plot_data.data): - - ax3 = F.fig.add_subplot(gs, polar=True) - FP= plot_polarspectra(lam, plot_data.angle, plot_data, lims=[lam[-1], 138 ] , verbose= False, data_type= 'fraction') - FP.clevs=np.linspace(np.nanpercentile(plot_data.data, 1), np.round(plot_data.max(), 4), 21) - FP.linear(ax = ax3, cbar_flag=False) - plt.title('\n\n'+i_lstring,y=1.0, pad=-6, color=col.green) - - dir_ax_list.append(ax3) - - -cbar = plt.colorbar(FP.colorax , fraction=0.046, pad=0.01, orientation="vertical", ax = dir_ax_list) -cbar.ax.get_yaxis().labelpad = 5 -cbar.outline.set_visible(False) - -clev_tick_names, clev_ticks =MT.tick_formatter(FP.clevs, expt_flag= False, shift= 0, rounder=6, interval=10) -cbar.set_ticks(clev_ticks[::10]) -cbar.set_ticklabels( np.round(clev_ticks[::10]*1e3, 2) ) -cbar.set_label('Energy Density \n(10$^3$ (m/m)$^2$ k$^{-1}$ deg$^{-1}$ )', rotation=90)#, fontsize=10) - - -# shift simple - -font_for_print() -fn = copy.copy(lstrings) - - -F = M.figure_axis_xy(fig_sizes['one_column'][0], fig_sizes['one_column'][1]*1.5, view_scale= 0.7, container = True) - -plt.title('Observed and Corrected Wave Spectrum in the MIZ\nestimated incident wave $\\theta=66^\circ$', loc ='left') -shifted_spec = 10* np.log(weighted_spec.rolling(k=10, min_periods= 1, center=True).mean()) -shifted_spec = weighted_spec.rolling(k=10, min_periods= 1, center=True).mean() - -plt.plot(lam_p, shifted_spec.isel(x=0), c = col.cascade1, label ='observed along-track \nwave spectrum') -plt.plot(lam, shifted_spec.isel(x=0), c = col.cascade2, linestyle = '--', label = 'corrected wave spectrum') - -plt.legend() - -plt.xlim(0, 900) -plt.ylim(0, 0.16) - -plt.xlabel('Wavelength ($\lambda$)') -plt.ylabel('$m^2/\lambda$') -F.save_pup(path = plot_path, name = 'B05_dir_ov_'+track_name+'_1d') - diff --git a/analyis_publish/PB05_define_angle_supl.py b/analyis_publish/PB05_define_angle_supl.py deleted file mode 100644 index 7f0880ae..00000000 --- a/analyis_publish/PB05_define_angle_supl.py +++ /dev/null @@ -1,453 +0,0 @@ - -import os, sys -#execfile(os.environ['PYTHONSTARTUP']) - -""" -This file open a ICEsat2 track applied filters and corections and returns smoothed photon heights on a regular grid in an .nc file. -This is python 3 -""" - -exec(open(os.environ['PYTHONSTARTUP']).read()) -exec(open(STARTUP_2021_IceSAT2).read()) - -#%matplotlib inline - -import ICEsat2_SI_tools.convert_GPS_time as cGPS -import h5py -import ICEsat2_SI_tools.io as io -import ICEsat2_SI_tools.spectral_estimates as spec - -import imp -import copy -import spicke_remover -import datetime -import concurrent.futures as futures - -from numba import jit - -from ICEsat2_SI_tools import angle_optimizer -import ICEsat2_SI_tools.wave_tools as waves -import concurrent.futures as futures - -import time -import ICEsat2_SI_tools.lanczos as lanczos - - -col.colormaps2(21) - -col_dict = col.rels -track_name, batch_key, test_flag = io.init_from_input(sys.argv) # loads standard experiment -track_name, batch_key, test_flag = 'SH_20190224_08800210', 'SH_publish', False - -hemis, batch = batch_key.split('_') -ATlevel= 'ATL03' - - - -plot_path = mconfig['paths']['plot'] + '/'+hemis+'/'+batch_key+'/publish/' + track_name + '/' -MT.mkdirs_r(plot_path) -bad_track_path =mconfig['paths']['work'] +'bad_tracks/'+ batch_key+'/' - -all_beams = mconfig['beams']['all_beams'] -high_beams = mconfig['beams']['high_beams'] -low_beams = mconfig['beams']['low_beams'] -beam_groups = mconfig['beams']['groups'] -group_names = mconfig['beams']['group_names'] -load_path = mconfig['paths']['work'] +batch_key +'/B02_spectra/' -Gk = xr.load_dataset(load_path+ '/B02_'+track_name + '_gFT_k.nc' ) # - -load_path = mconfig['paths']['work'] +batch_key +'/B04_angle/' -Marginals = xr.load_dataset(load_path+ '/B04_'+track_name + '_marginals.nc' ) # - -# %% load prior information -load_path = mconfig['paths']['work']+batch_key +'/A02_prior/' -Prior = MT.load_pandas_table_dict('/A02_'+track_name, load_path)['priors_hindcast'] - - -def derive_weights(weights): - weights = (weights-weights.mean())/weights.std() - weights = weights - weights.min() - return weights - -def weighted_means(data, weights, x_angle, color='k'): - """ - weights should have nans when there is no data - data should have zeros where there is no data - """ - from scipy.ndimage.measurements import label - # make wavenumber groups - groups, Ngroups = label(weights.where(~np.isnan(weights), 0) ) - - for ng in np.arange(1, Ngroups+1): - wi = weights[groups == ng] - weight_norm = weights.sum('k') - k = wi.k.data - data_k = data.sel(k=k).squeeze() - data_weight = (data_k * wi) - if data_k.k.size > 1: - for k in data_k.k.data: - plt.stairs(data_weight.sel(k=k) / weight_norm, x_angle, color ='gray', alpha =0.5) - else: - plt.stairs(data_weight.squeeze() / weight_norm, x_angle, color ='gray', alpha =0.5) - - - data_weighted_mean = (data.where( (~np.isnan(data)) & (data != 0), np.nan) * weights ).sum('k')/weight_norm - return data_weighted_mean - - - - -# cut out data at the boundary and redistibute variance -angle_mask = Marginals.angle *0 ==0 -angle_mask[0], angle_mask[-1] = False, False -corrected_marginals = Marginals.marginals.isel(angle=angle_mask ) + Marginals.marginals.isel(angle=~angle_mask ).sum('angle')/sum(angle_mask).data - -# get groupweights -# ----------------- thius does not work jet.ckeck with data on server how to get number of data points per stancil -#Gx['x'] = Gx.x - Gx.x[0] - -# makde dummy variables -M_final = xr.full_like(corrected_marginals.isel(k=0, beam_group =0).drop('beam_group').drop('k'), np.nan) -M_final_smth = xr.full_like(corrected_marginals.isel(k=0, beam_group =0).drop('beam_group').drop('k'), np.nan) -if M_final.shape[0] > M_final.shape[1]: - M_final= M_final.T - M_final_smth= M_final_smth.T - corrected_marginals=corrected_marginals.T - -Gweights = corrected_marginals.N_data -Gweights = Gweights/Gweights.max() - -k_mask = corrected_marginals.mean('beam_group').mean('angle') - -xticks_2pi = np.arange(-np.pi, np.pi+np.pi/4, np.pi/4) -xtick_labels_2pi = ['-$\pi$', '-$3\pi/4$', '-$\pi/2$','-$\pi/4$','0','$\pi/4$','$\pi/2$','$3\pi/4$','$\pi$'] - -xticks_pi = np.arange(-np.pi/2, np.pi/2+np.pi/4, np.pi/4) -xtick_labels_pi = ['-$\pi/2$','-$\pi/4$','0','$\pi/4$','$\pi/2$',] - -group_names=dict() -for n,g in zip(mconfig['beams']['group_names'], mconfig['beams']['groups']): - group_names[n] = ('-'.join(g))[0:3] - - - -font_for_print() -x_list = corrected_marginals.x -for xi in range(x_list.size): - - fn = copy.copy(lstrings) - F = M.figure_axis_xy(fig_sizes['one_column_high'][0],fig_sizes['one_column_high'][1]*0.85, view_scale= 0.8, container = True) - gs = GridSpec(4,1, wspace=0.1, hspace=.8)#figure=fig, - x_str= str(int(x_list[xi]/1e3)) - tname = track_name.split('_')[1]+'\non '+ track_name.split('_')[0][0:8] - plt.suptitle('Weighted marginal PDFs for \n$X_i$='+ x_str +' km for track '+tname, y= 1.03, x = 0.125, horizontalalignment= 'left') - - group_weight = Gweights.isel(x =xi) - - ax_list= dict() - - data_collect = dict() - for group, gpos in zip(Marginals.beam_group.data, [ gs[0, 0], gs[1, 0], gs[2, 0]] ): - ax0 = F.fig.add_subplot(gpos) - ax0.tick_params(labelbottom=False) - ax_list[group] = ax0 - - data = corrected_marginals.isel(x=xi).sel(beam_group= group) - weights = derive_weights( Marginals.weight.isel(x=xi).sel(beam_group= group) ) - weights = weights**2 - - # derive angle axis - x_angle = data.angle.data - d_angle = np.diff(x_angle)[0] - x_angle = np.insert(x_angle, x_angle.size , x_angle[-1].data + d_angle) - - if ( (~np.isnan(data)).sum().data == 0) | (( ~np.isnan(weights)).sum().data == 0): - data_wmean = data.mean('k') - else: - data_wmean = weighted_means(data, weights, x_angle, color= col_dict[group] ) - plt.stairs(data_wmean , x_angle, color =col_dict[group], alpha =1) - - if group == 'group1': - t_string = group_names[group] +' pair' - else: - t_string = group_names[group]+' pair' - - plt.title(next(fn) + t_string, loc ='left') - data_collect[group] = data_wmean - - data_collect = xr.concat(data_collect.values(), dim='beam_group') - final_data = (group_weight * data_collect).sum('beam_group')/group_weight.sum('beam_group').data - - # get relevant priors - for axx in ax_list.values(): - axx.set_ylim(0, final_data.max() * 1.5) - axx.set_xticks(xticks_pi) - axx.set_xticklabels(xtick_labels_pi) - axx.set_xlim(-np.pi/2, np.pi/2) - - - try: - ax_list['group1'].set_ylabel('PDF') - ax_list['group2'].set_ylabel('PDF') - ax_list['group3'].set_ylabel('PDF') - ax_list['group1'].tick_params(labelbottom=True) - except: - pass - - ax_final = F.fig.add_subplot(gs[-1, :]) - plt.title(next(fn) + 'Final best guess', loc='left') - - priors_k = Marginals.Prior_direction[ ~np.isnan(k_mask.isel(x= xi))] - for pk in priors_k: - ax_final.axvline(pk, color =col.orange, linewidth= 1, alpha = 0.7) - - plt.stairs( final_data , x_angle, color = 'k', alpha =0.5, linewidth =0.8, zorder= 12) - - final_data_smth = lanczos.lanczos_filter_1d(x_angle,final_data, 0.1) - - plt.plot(x_angle[0:-1], final_data_smth, color = 'black', linewidth= 0.8) - - ax_final.axvline( x_angle[0:-1][final_data_smth.argmax()], color =col.cascade3, linewidth= 1.5, alpha = 1, zorder= 1) - ax_final.axvline( x_angle[0:-1][final_data_smth.argmax()], color =col.black, linewidth= 4, alpha = 1, zorder= 0) - - - plt.xlabel('Angle of Incidence (rad)') - ax_final.set_xlim(-np.pi/2, np.pi/2) - ax_final.set_ylabel('PDF') - ax_final.set_xticks(xticks_pi) - ax_final.set_xticklabels(xtick_labels_pi) - - M_final[xi,:] = final_data - M_final_smth[xi, :] = final_data_smth - - F.save_pup(path = plot_path, name = 'B05_weigthed_margnials_x' + x_str) - - - -M_final.name='weighted_angle_PDF' -M_final_smth.name='weighted_angle_PDF_smth' -Gpdf = xr.merge([M_final,M_final_smth]) - -Gpdf.weighted_angle_PDF_smth.plot() - -Gpdf.mean('x').weighted_angle_PDF_smth.plot() -best_guess_angle = Gpdf.angle[Gpdf.mean('x').weighted_angle_PDF_smth.argmax()].data - -best_guess_angle * 180/np.pi - -best_guess_angle/np.pi -Gpdf.mean('x').weighted_angle_PDF_smth.plot() - -class plot_polarspectra: - def __init__(self,k, thetas, data, data_type='fraction' ,lims=None, verbose=False): - - """ - data_type either 'fraction' or 'energy', default (fraction) - lims (None) limts of k. if None set by the limits of the vector k - """ - self.k =k - self.data =data - self.thetas =thetas - self.lims= lims = [self.k.min(),self.k.max()] if lims is None else lims #1.0 /lims[1], 1.0/ lims[0] - freq_sel_bool=M.cut_nparray(self.k, lims[0], lims[1] ) - - self.min=np.round(np.nanmin(data[freq_sel_bool,:]), 2)#*0.5e-17 - self.max=np.round(np.nanmax(data[freq_sel_bool,:]), 2) - if verbose: - print(str(self.min), str(self.max) ) - - self.klabels=np.linspace(self.min, self.max, 5) #np.arange(10, 100, 20) - - self.data_type=data_type - if data_type == 'fraction': - self.clevs=np.linspace(np.nanpercentile(dir_data.data, 1), np.ceil(self.max* 0.9), 21) - elif data_type == 'energy': - self.ctrs_min=self.min+self.min*.05 - self.clevs=np.linspace(self.min+self.min*.05, self.max*.60, 21) - - - def linear(self, radial_axis='period', ax=None, cbar_flag=True): - - """ - """ - if ax is None: - ax = plt.subplot(111, polar=True) - else: - ax=ax - ax.set_theta_direction(-1) #right turned postive - ax.set_theta_zero_location("W") - - grid=ax.grid(color='k', alpha=.5, linestyle='-', linewidth=.5) - - if self.data_type == 'fraction': - cm=plt.cm.RdYlBu_r - colorax = ax.contourf(self.thetas,self.k, self.data, self.clevs, cmap=cm, zorder=1) - elif self.data_type == 'energy': - cm=plt.cm.Paired - cm.set_under='w' - cm.set_bad='w' - colorax = ax.contourf(self.thetas,self.k, self.data, self.clevs, cmap=cm, zorder=1) - - if cbar_flag: - cbar = plt.colorbar(colorax, fraction=0.046, pad=0.1, orientation="horizontal") - cbar.ax.get_yaxis().labelpad = 30 - cbar.outline.set_visible(False) - clev_tick_names, clev_ticks =MT.tick_formatter(FP.clevs, expt_flag= False, shift= 0, rounder=4, interval=1) - cbar.set_ticks(clev_ticks[::5]) - cbar.set_ticklabels(clev_tick_names[::5]) - self.cbar = cbar - - if (self.lims[-1]- self.lims[0]) > 6000: - radial_ticks = np.arange(100, 1600, 300) - else: - radial_ticks = np.arange(100, 1000, 50) - print(radial_ticks) - xx_tick_names, xx_ticks = MT.tick_formatter( radial_ticks , expt_flag= False, shift= 1, rounder=0, interval=1) - xx_tick_names = [' '+str(d)+'m' for d in xx_tick_names] - - ax.set_yticks(xx_ticks[::1]) - ax.set_yticklabels(xx_tick_names[::1]) - - degrange = np.arange(0,360,30) - degrange = degrange[(degrange<=80)| (degrange>=280)] - degrange_label = np.copy(degrange) - degrange_label[degrange_label > 180] = degrange_label[degrange_label > 180] - 360 - - degrange_label = [str(d)+'$^{\circ}$' for d in degrange_label] - - lines, labels = plt.thetagrids(degrange, labels=degrange_label)#, frac = 1.07) - - for line in lines: - line.set_linewidth(5) - - ax.set_ylim(self.lims) - ax.spines['polar'].set_color("none") - ax.set_rlabel_position(87) - self.ax=ax - -font_for_print() -fn = copy.copy(lstrings) - - -F = M.figure_axis_xy(fig_sizes['two_column_square'][0], fig_sizes['two_column_square'][1], view_scale= 0.7, container = True) -gs = GridSpec(8,6, wspace=0.1, hspace=2.1) -col.colormaps2(21) - -cmap_spec= col.white_base_blgror -clev_spec = np.linspace(-8, -1, 21) *10 - -cmap_angle= col.cascade_r -clev_angle = np.linspace(0, 1.5, 21) - - -ax1 = F.fig.add_subplot(gs[0:3, :]) -ax1.tick_params(labelbottom=True) - -weighted_spec = (Gk.gFT_PSD_data * Gk.N_per_stancil).sum('beam') /Gk.N_per_stancil.sum('beam') -weighted_spec = weighted_spec.isel(x=slice(0,-2)) -x_spec = weighted_spec.x/1e3 -lam_p = 2 *np.pi/weighted_spec.k -lam = lam_p * np.cos(best_guess_angle) -k = 2 * np.pi/lam - -xlims = x_spec[0]-12.5/2, x_spec[-5] -clev_spec = np.linspace(-80, (10* np.log(weighted_spec)).max() * 0.9, 21) - -dd = 10* np.log(weighted_spec.rolling(k=10, min_periods= 1, center=True).mean()) -clev_log = M.clevels( [dd.quantile(0.01).data * 0.3, dd.quantile(0.98).data * 1.5], 31)* 1 -plt.pcolormesh(x_spec, lam, dd, cmap=cmap_spec , vmin = clev_log[0], vmax = clev_log[-1]) - - - -plt.plot(x_spec[0:5], lam[dd.argmax('k')][0:5], linestyle= '-', color='black') -plt.text(x_spec[0:5].max()+2, lam[dd.argmax('k')][0:5].mean()+0, 'corrected peak', ha='left', color='black', fontsize = 8) - -plt.plot(x_spec[0:5], lam_p[dd.argmax('k')][0:5], linestyle= '--', color='black') -plt.text(x_spec[0:5].max()+2, lam_p[dd.argmax('k')][0:5].mean()+0, 'observed peak', ha='left', color='black', fontsize = 8) - -plt.title(next(fn) + 'Slope Power Spectra (m/m)$^2$ k$^{-1}$\nfor ' + io.ID_to_str(track_name) , loc='left') - -cbar = plt.colorbar( fraction=0.018, pad=0.01, orientation="vertical", label ='Power') -cbar.outline.set_visible(False) -clev_ticks = np.round(clev_spec[::3], 0) -cbar.set_ticks(clev_ticks) -cbar.set_ticklabels(clev_ticks) - -plt.ylabel('corrected wavelength $(m)$') -ax2 = F.fig.add_subplot(gs[3:5, :]) -ax2.tick_params(labelleft=True) - -dir_data = Gpdf.interp(x= weighted_spec.x).weighted_angle_PDF_smth.T - -x = Gpdf.x/1e3 -angle = Gpdf.angle -plt.pcolormesh(x_spec, angle, dir_data.rolling(angle =10).median() , vmin= clev_angle[0], vmax= clev_angle[-1], cmap = cmap_spec) - -cbar = plt.colorbar( fraction=0.02, pad=0.01, orientation="vertical", label ='Density') -cbar.outline.set_visible(False) -plt.title(next(fn) + 'Direction PDFs', loc='left') - - -plt.ylabel('Angle') -plt.xlabel('X (km)') - - -ax2.set_yticks(xticks_pi) -ax2.set_yticklabels(xtick_labels_pi) -ax2.set_ylim(angle[0], angle[-1]) - - -x_ticks = np.arange(0, xlims[-1].data, 25) -x_tick_labels, x_ticks = MT.tick_formatter(x_ticks, expt_flag= False, shift= 0, rounder=1, interval=2) - -ax1.set_xticks(x_ticks) -ax2.set_xticks(x_ticks) -ax1.set_xticklabels(x_tick_labels) -ax2.set_xticklabels(x_tick_labels) - -lam_lim= lam[-1].data, lam[10].data -ax1.set_ylim(lam_lim) - -ax1.set_xlim(xlims) -ax2.set_xlim(xlims) -ax2.axhline(best_guess_angle, color=col.orange, linewidth=0.8) - - -x_pos_list = [0, 1, 2] -#x_pos_list -lsrtrings = iter(['c)', 'd)', 'e)']) - -for x_pos, gs in zip( x_pos_list , [ gs[-3:, 0:2], gs[-3:, 2:4], gs[-3:, 4:]] ): - x_range = weighted_spec.x.data[x_pos] - print(x_range) - ax1.axvline(x_range/1e3, linestyle= '-', color= col.green, linewidth=0.9, alpha = 0.8) - ax2.axvline(x_range/1e3, linestyle= '-', color= col.green, linewidth=0.9, alpha = 0.8) - - i_lstring = next(lsrtrings) - ax1.text(x_range/1e3, np.array(lam_lim).mean()*3/2, ' '+ i_lstring, fontsize= 8, color =col.green) - - i_spec = weighted_spec.isel(x= x_pos ) - i_dir = corrected_marginals.interp(x= weighted_spec.x).isel(x= x_pos ) - print(i_spec.x.data, i_spec.x.data) - dir_data = (i_dir * i_dir.N_data).sum([ 'beam_group'])/ i_dir.N_data.sum([ 'beam_group']) - lims = dir_data.k[ (dir_data.sum('angle')!=0) ][0].data, dir_data.k[ (dir_data.sum('angle')!=0) ][-1].data - - N_angle = i_dir.angle.size - dir_data2 = dir_data - - plot_data = dir_data2 * i_spec - plot_data = dir_data2.rolling(angle =2, k =15, min_periods= 1, center=True ).mean() * i_spec - - plot_data = plot_data.sel(k=slice(lims[0],lims[-1] ) ) - xx = 2 * np.pi/plot_data.k - - if np.nanmax(plot_data.data) != np.nanmin(plot_data.data): - - ax3 = F.fig.add_subplot(gs, polar=True) - FP= plot_polarspectra(xx, plot_data.angle, plot_data, lims=[xx[-1], 340 ] , verbose= False, data_type= 'fraction') - FP.clevs=np.linspace(np.nanpercentile(plot_data.data, 1), np.round(plot_data.max(), 4), 21) - FP.linear(ax = ax3, cbar_flag=False) - plt.title('\n\n'+i_lstring,y=1.0, pad=-6, color=col.green) - -F.save_pup(path = plot_path, name = 'B05_dir_ov_'+track_name) -F.save_light(path = plot_path, name = 'B05_dir_ov_'+track_name) diff --git a/analyis_publish/PB06_plot_reconstruction.py b/analyis_publish/PB06_plot_reconstruction.py deleted file mode 100644 index 7ffe116e..00000000 --- a/analyis_publish/PB06_plot_reconstruction.py +++ /dev/null @@ -1,495 +0,0 @@ - - -import os, sys -#execfile(os.environ['PYTHONSTARTUP']) - -""" -This file open a ICEsat2 track applied filters and corections and returns smoothed photon heights on a regular grid in an .nc file. -This is python 3 -""" - -exec(open(os.environ['PYTHONSTARTUP']).read()) -exec(open(STARTUP_2021_IceSAT2).read()) - -#%matplotlib inline - -import ICEsat2_SI_tools.convert_GPS_time as cGPS -import h5py -import ICEsat2_SI_tools.io as io -import ICEsat2_SI_tools.spectral_estimates as spec - -import time -import imp -import copy -import spicke_remover -import datetime -import generalized_FT as gFT -from scipy.ndimage.measurements import label - -#xr.set_options(display_style='text')() -#import s3fs -# %% -ID_name, batch_key, ID_flag = io.init_from_input(sys.argv) # loads standard experiment -#ID_name, batch_key, ID_flag = '20190605061807_10380310_004_01', 'SH_batch01', False -#ID_name, batch_key, ID_flag = '20190601094826_09790312_004_01', 'SH_batch01', False -#ID_name, batch_key, ID_flag = '20190207111114_06260210_004_01', 'SH_batch02', False -#ID_name, batch_key, ID_flag = '20190208152826_06440210_004_01', 'SH_batch01', False -#ID_name, batch_key, ID_flag = '20190213133330_07190212_004_01', 'SH_batch02', False -#ID_name, batch_key, ID_flag = '20190207002436_06190212_004_01', 'SH_batch02', False -#ID_name, batch_key, ID_flag = '20190206022433_06050212_004_01', 0 - -#ID_name, batch_key, ID_flag = '20190215184558_07530210_004_01', 'SH_batch02', False -#ID_name, batch_key, ID_flag = 'SH_20190219_08070210', 'SH_publish', True -#ID_name, batch_key, ID_flag = 'SH_20190502_05160312', 'SH_publish', True -#ID_name, batch_key, ID_flag = 'SH_20190502_05180312', 'SH_publish', True - -ID_name, batch_key, ID_flag = 'SH_20190224_08800210', 'SH_publish', True -#ID_name, batch_key, ID_flag = 'SH_20190224_08800210', 'SH_publish', True -#ID_name, batch_key, ID_flag = 'SH_20190502_05160312', 'SH_publish', True - - -ID, _, hemis, batch = io.init_data(ID_name, batch_key, ID_flag, mconfig['paths']['work'], ) -#print(ID_name, batch_key, ID_flag) -hemis, batch = batch_key.split('_') - - -## -------------- use lower level data ------------------ -# ATlevel= 'ATL03' -# -# load_path_scratch = mconfig['paths']['scratch'] +'/'+ batch_key +'/' -# load_path_work = mconfig['paths']['work'] +'/'+ batch_key +'/' -# -# #B0_hdf5 = h5py.File(load_path_scratch +'/A01c_ATL03_'+ID_name+ '_corrected.h5', 'r') -# B2_hdf5 = h5py.File(load_path_work +'B01_regrid'+'/'+ID_name + '_B01_regridded.h5', 'r') -# B3_hdf5 = h5py.File(load_path_work +'B01_regrid'+'/'+ID_name + '_B01_binned.h5', 'r') -# -# B0, B2, B3 = dict(), dict(), dict() -# for b in all_beams: -# #B0[b] = io.get_beam_hdf_store(B0_hdf5[b]) -# B2[b] = io.get_beam_hdf_store(B2_hdf5[b]) -# B3[b] = io.get_beam_hdf_store(B3_hdf5[b]) -# -# B2_hdf5.close(), B2_hdf5.close() -# -# load_path = mconfig['paths']['work']+ batch_key +'/B02_spectra/' -# load_file = load_path + 'B02_' + ID_name #+ '.nc' -# #MT.mkdirs_r(plot_path) -# -# Gk = xr.open_dataset(load_file+'_gFT_k.nc') -# Gx = xr.open_dataset(load_file+'_gFT_x.nc') - -## -------------------- use final prodiucts -all_beams = mconfig['beams']['all_beams'] -high_beams = mconfig['beams']['high_beams'] -low_beams = mconfig['beams']['low_beams'] - -load_path_work = mconfig['paths']['work'] +'/'+ batch_key +'/' -load_path = load_path_work +'/B06_corrected_separated/' - -B2 = io.load_pandas_table_dict('B06_' + ID_name+ '_B06_corrected_resid', load_path) -B3 = io.load_pandas_table_dict('B06_' + ID_name+ '_binned_resid', load_path) - -load_file = load_path + 'B06_' + ID_name #+ '.nc' -Gk = xr.open_dataset(load_file+'_gFT_k_corrected.nc') -Gx = xr.open_dataset(load_file+'_gFT_x_corrected.nc') - -ATL07_path = mconfig['paths']['scratch']+'/'+ batch_key +'/' -os.listdir(ATL07_path) -B07= dict() -for b in all_beams: - B07[b]= io.getATL07_beam(ATL07_path +ID['tracks']['ATL07']+'.h5', beam=b) - -# print(Gk) -# print(Gx) - - - -# %% check paths (again) -col.colormaps2(21) -col_dict= col.rels - -# define simple routines -def add_info(D, Dk, ylims): - eta = D.eta + D.x - N_per_stancil, ksize = Dk.N_per_stancil.data , Dk.k.size - plt.text(eta[0].data, ylims[-1], ' N='+numtostr(N_per_stancil) + ' N/2M= '+ fltostr(N_per_stancil/2/ksize, 1)) - -# Single views -def plot_data_eta(D, offset = 0 , **kargs ): - eta_1 = D.eta + D.x - y_data = D.y_model +offset - plt.plot(eta_1,y_data , **kargs) - return eta_1 - -def plot_model_eta(D, ax, offset = 0, **kargs ): - eta = D.eta + D.x - y_data = D.y_model+offset - plt.plot(eta ,y_data , **kargs) - - ax.axvline(eta[0].data, linewidth=0.1, color=kargs['color'], alpha=0.5) - ax.axvline(eta[-1].data, linewidth=0.1, color=kargs['color'], alpha=0.5) - -# %% -fltostr = MT.float_to_str -numtostr = MT.num_to_str -font_for_print() - -#for i in x_pos_sel[::2]: -#i =x_pos_sel[20] -#MT.mkdirs_r(plot_path+'B03_spectra/') - - -#~np.isnan(Gk.mean('beam').mean('k').gFT_PSD_data.data) -x_pos_sel = np.arange(Gk.x.size)[(Gk.mean('beam').mean('k').gFT_PSD_data.data >0 )] -x_pos_max = Gk.mean('beam').mean('k').gFT_PSD_data[~np.isnan(Gk.mean('beam').mean('k').gFT_PSD_data)].argmax().data -xpp = x_pos_sel[ [int(i) for i in np.round(np.linspace(0, x_pos_sel.size-1, 4))]] -#xpp = np.insert(xpp, 0, x_pos_max) -xpp =x_pos_sel - -# %% -i = 5 -#i=6 - -k = all_beams[0] -#k = 'gt2l' - -plot_path = mconfig['paths']['plot'] + '/'+hemis+'/'+batch_key+'/publish/' + ID_name + '/B06/' -MT.mkdirs_r(plot_path) -font_for_print() - - -for i in xpp: - - fn = copy.copy(lstrings) - - F = M.figure_axis_xy(5.5, 7, container =True, view_scale= 0.8) - - plt.suptitle('ALT03 Decomposition\n'+ io.ID_to_str(ID_name), y = 0.93, x = 0.13, horizontalalignment ='left') - #Photon height reconstruction | x='+str(Gk.x[i].data)+' \n' + ID_name, y = 0.95) - gs = GridSpec(30,6, wspace=0, hspace=0.8)#figure=fig, - - ax0 = F.fig.add_subplot(gs[0:6, :]) - col_d = col.__dict__['rels'] - plt.title(' '+next(fn)+ 'Slope data and model', loc='left', y= 0.83) - dx = Gx.eta.diff('eta').mean().data - neven = True - offs = 0 - Gx_1 = Gx.isel(x= i).sel(beam = k) - Gk_1 = Gk.isel(x= i).sel(beam = k) - - k_thresh = Gk_1.k_lim.data - dist_stencil = Gx_1.eta + Gx_1.x - dist_stencil_lims = dist_stencil[0].data, dist_stencil[-1].data - - # cutting Table data - # photon data - # gridded data - mask_x_bin = ( (B3[k]['dist'] >= dist_stencil_lims[0]) & (B3[k]['dist'] <= dist_stencil_lims[1]) ) - T3_sel = B3[k].loc[mask_x_bin] - - T2 = B2[k]#.sort_index(ascending= False) - mask_x_true = (T2['x_true'] >= T3_sel['x_true'].min()) & (T2['x_true'] <= T3_sel['x_true'].max()) - T2_sel = B2[k].loc[mask_x_true] - - B07_sel=B07[k][(T3_sel['delta_time'].min() < B07[k]['time']['delta_time']) & (B07[k]['time']['delta_time'] < T3_sel['delta_time'].max()) ] - - ### slope data - T3 = B3[k]#.loc[mask_x_bin] - dd = np.copy(T3['heights_c_weighted_mean']) - dd = np.gradient(dd) - dd, _ = spicke_remover.spicke_remover(dd, spreed=10, verbose=False) - dd_nans = (np.isnan(dd) ) + (T3['N_photos'] <= 5) - # dd_no_nans = dd[~dd_nans] # windowing is applied here - # x_no_nans = T3['dist'][~dd_nans] - dd[dd_nans] = np.nan# windowing is applied here - xx = T3['dist'] - xx[dd_nans] = np.nan - - #plt.plot( xx , dd, color=col.green,alpha=0.8, linewidth =0.3) - #B3[k]['dist'] - - #plot_model_eta(Gx_1, ax0, offset= offs, linestyle='-', color=col_d[k], linewidth=0.4, alpha=1, zorder=12 , label = 'GFT model') - # ylims= -np.nanstd(Gx_1.y_data)*3, np.nanstd(Gx_1.y_data)*3 - # #add_info(Gx_1, Gk_1 , ylims ) - - lead_color = col.cascade1#col_d[k] - - # oringial data - eta_1= plot_data_eta(Gx_1 , offset= offs , linestyle= '-', c=col.gray,linewidth=2, alpha =1, zorder=11, label = 'mean photon\nheight slope') - - # reconstruct slope model - # introduce frequency filter: - gFT_cos_coeff_sel = np.copy(Gk_1.gFT_cos_coeff) - gFT_sin_coeff_sel = np.copy(Gk_1.gFT_sin_coeff) - gFT_cos_coeff_sel[Gk_1.k > k_thresh] = 0 - gFT_sin_coeff_sel[Gk_1.k > k_thresh] = 0 - - - FT = gFT.generalized_Fourier(Gx_1.eta + Gx_1.x, None,Gk_1.k ) - _ = FT.get_H() - FT.b_hat=np.concatenate([ gFT_cos_coeff_sel, gFT_sin_coeff_sel ]) - plt.plot(Gx_1.eta + Gx_1.x, FT.model()+offs ,'-', c=lead_color, linewidth=0.5, alpha=1,zorder= 12, label = 'GFT slope model') - - - plt.legend(loc=1, ncol=2) - - ax1 = F.fig.add_subplot(gs[6:13, :]) - plt.title(' '+next(fn)+ 'Height data and model', loc='left', y= 0.83) - - ### height decomposition - # plotting observed datazx - # T3_sel['heights_c_weighted_mean'] - plt.plot( T3_sel['dist'] , T3_sel['heights_c_weighted_mean'], '-' , color =col_d[k], linewidth = 1, label = 'observed $h_c$ mean') - - if T2_sel['x_true'].iloc[0] > T2_sel['x_true'].iloc[-1]: - T2_sel['dist'] = np.interp(T2_sel['x_true'][::-1], T3_sel['x_true'][::-1], T3_sel['dist'][::-1] )[::-1] - else: - T2_sel['dist'] = np.interp(T2_sel['x_true'][::1], T3_sel['x_true'][::1], T3_sel['dist'] ) - plt.scatter( T2_sel['dist'] , T2_sel['heights_c'], s= 1, marker='o', color='black', alpha =0.02, edgecolors= 'none' ) - - try: - if (T3_sel['delta_time'].iloc[0] > T3_sel['delta_time'].iloc[-1]) is (B07_sel['time']['delta_time'].iloc[0] > B07_sel['time']['delta_time'].iloc[-1]): - B07_sel['dist'] = np.interp(B07_sel['time']['delta_time'], T3_sel['delta_time'], T3_sel['dist'] ) - else: - B07_sel['dist'] = np.interp(np.array(B07_sel['time']['delta_time']), np.array(T3_sel['delta_time'][::-1]), np.array(T3_sel['dist'][::-1]) ) - B07_flag = True - except: - B07_flag = False - - # B07_sel['dist'] = scipy.interpolate.griddata(np.array(T3_sel['delta_time'][::-1]), np.array(T3_sel['dist'][::-1]) ,np.array(B07_sel['time']['delta_time']), method= 'nearest' ) - # plt.plot(np.array(B07_sel['time']['delta_time'])) - # plt.plot(np.array(T3_sel['delta_time'][::-1])) - # plt.grid() - - # plt.plot( B07_sel['dist']- B07_sel['dist_np']) - #plt.plot( B07_sel['dist'] , B07_sel['heights']['height_segment_height'], color='black', alpha =1, linewidth = 0.6 ) - #plt.scatter(B07_sel['dist'], B07_sel['heights']['height_segment_height'], s= 1, marker='o', color='black', alpha =0.7, edgecolors= 'none' ) - - dist_nanmask = np.isnan(Gx_1.y_data) - # height_data = np.interp(dist_stencil, T3_sel['dist'], T3_sel['heights_c_weighted_mean']) #[~np.isnan(Gx_1.y_data)] - - # def fit_offset(x, data, model, nan_mask, deg): - # - # #x, data, model, nan_mask, deg = dist_stencil, height_data, height_model2, dist_nanmask, 1 - # p_offset = np.polyfit(x[~nan_mask], data[~nan_mask] - model[~nan_mask], deg) - # p_offset[-1] = 0 - # poly_offset = np.polyval(p_offset,x ) - # return poly_offset - # - # poly_offset = fit_offset(dist_stencil, height_data, height_model2, dist_nanmask, 1) - - #plt.plot(dist_stencil, height_model2 ,'-', c='orange', linewidth=0.6, alpha=1,zorder= 12, label = 'spectral int model') - #plt.plot(dist_stencil, poly_offset ,'-', c=col.gridcolor, linewidth=0.6, alpha=1,zorder= 12, label = 'offset') - # plt.plot(dist_stencil, height_model2 ,'-', c=lead_color, linewidth=0.8, alpha=1,zorder= 12, label = 'GFT height model + correction') - - plt.plot( T3_sel['dist'] , T3_sel['heights_c_model'] ,'-', c=lead_color, linewidth=0.5, alpha=1,zorder= 12, label = '$h_c$ model') - - plt.legend(loc = 4, ncol =2) - - ax1b = F.fig.add_subplot(gs[13:19, :]) - plt.title(' '+next(fn)+ 'ATL07', loc='left', y= 0.83) - x_key= 'dist' - AT07_cat_offset= 0 - AT07_bool_offset =0 - htype_cmap = [col.orange, col.cascade3, col.cascade2, col.cascade1] - htype_list= ['cloud_covered','other', 'specular_lead', 'dark_lead' , 'other'] - for htype, hcolor, htype_str in zip( [0, 1, (2, 5), (6, 9)] , htype_cmap , htype_list ): - if type(htype) is tuple: - imask = (B07_sel['heights']['height_segment_type'] >= htype[0]) & (B07_sel['heights']['height_segment_type'] <= htype[1]) - else: - imask = B07_sel['heights']['height_segment_type'] == htype - pdata = B07_sel[imask] - if pdata.size != 0: - plt.plot( pdata[x_key], pdata['heights']['height_segment_height'] + AT07_cat_offset, '.', color =hcolor, markersize=0.8,alpha=1, label=htype_str) - - - for htype, hcolor, htype_str, hsize in zip( [0, 1] , [col.gridcolor, col.red] , [None, 'ssh'] , [0.8, 5]): - - pdata = B07_sel[B07_sel['heights']['height_segment_ssh_flag'] == htype] - if pdata.size != 0: - plt.plot( pdata[x_key], pdata['heights']['height_segment_height']*0+AT07_bool_offset, '.', color =hcolor, markersize=hsize,alpha=0.9, label=htype_str) - - plt.legend(ncol=3, loc=1) - - - #plt.plot( B07_sel['dist_np'] , B07_sel['heights']['height_segment_height'], color='red', alpha =1, linewidth = 0.6 ) - - # plt.plot( B07_sel['ref']['latitude'][0:100] , B07_sel['heights']['height_segment_height'][0:100])#, s= 1, marker='o', color='black', alpha =1, edgecolors= 'none' ) - # plt.plot( T3_sel['lats'][0:100] , T3_sel['heights_c_model'][0:100])#, s= 1, marker='o', color='black', alpha =1, edgecolors= 'none' ) - # - # plt.plot( B07_sel['dist'][0:200] , B07_sel['heights']['height_segment_height'][0:200], 'r')#, s= 1, marker='o', color='black', alpha =1, edgecolors= 'none' ) - # plt.plot( T3_sel['dist'][:] , T3_sel['heights_c_model'][:])#, s= 1, marker='o', color='black', alpha =1, edgecolors= 'none' ) - # - # T2_sel - # B07_sel.T - # B07_sel['ref']['height_segment_id'] - # B07_sel['heights']['height_segment_ssh_flag'].plot() - # B07_sel['heights']['height_segment_type'].plot() - - - # T3_sel['heights_c_weighted_mean'].plot() - # T2_sel['heights_c'].plot() - # - # T2_sel['dist'].iloc[0], T3_sel['dist'].iloc[0] - # T2_sel['dist'].iloc[-1], T3_sel['dist'].iloc[-1] - - # reconstructued data by integration - # height_model = np.cumsum(FT.model()) + T3_sel['heights_c_weighted_mean'].iloc[0] - # plt.plot( Gx_1.eta + Gx_1.x, height_model, linewidth = 0.6 , color = 'red', label = 'real space integral') - - # FT_int = gFT.generalized_Fourier(Gx_1.eta + Gx_1.x, None,Gk_1.k ) - # _ = FT_int.get_H() - # FT_int.b_hat = np.concatenate([ -gFT_sin_coeff_sel /Gk_1.k, gFT_cos_coeff_sel/Gk_1.k ]) - # - # height_model2 = FT_int.model() /dx# + T3_sel['heights_c_weighted_mean'].iloc[0] - - - - ax2 = F.fig.add_subplot(gs[19:24, :]) - plt.title(' '+next(fn)+ 'Residual heights of ATL03', loc='left', y= 0.83) - height_residual = T2_sel['heights_c_residual'] #T2_sel['heights_c'] - np.interp(T2_sel['dist'], dist_stencil, height_model2 + poly_offset) - plt.scatter(T2_sel['dist'], height_residual, s= 1, marker='o', color='black', alpha =0.02, edgecolors= 'none' ) - - # heights_c_weighted_mean_stancil = np.interp(dist_stencil, T3_sel['dist'], T3_sel['heights_c_weighted_mean'] ) - # height_residual_mean = (heights_c_weighted_mean_stancil - height_model2) - poly_offset - # height_residual_mean[dist_nanmask] = np.nan - - - - height_residual_mean = T3_sel['heights_c_residual'] - height_residual_mean_x = T3_sel['dist'] - plt.plot( height_residual_mean_x , height_residual_mean , color =col.rascade1, linewidth = 0.5, label = 'residual $h_c$') - plt.fill_between(height_residual_mean_x , height_residual_mean, color= col.cascade2, edgecolor = None, alpha = 0.4, zorder= 0) - plt.legend(loc = 1) - - #ax2.set_ylim(0,np.nanmax(height_residual) * 1.5) - - # for pos, kgroup, lflag in zip([ gs[2, 0:2], gs[2, 2:4], gs[2, 4:]], [, ['gt2l', 'gt2r'], ['gt3l', 'gt3r']], [True, False, False] ): - - ax41 = F.fig.add_subplot(gs[3:6, 4:]) - - #ax41.tick_params(labelleft=lflag) - - dd = Gk_1.gFT_PSD_data#.rolling(k=10, min_periods= 1, center=True).mean() - plt.plot(Gk_1.k, dd, color='gray', linewidth=.5 ,alpha= 0.5 ) - - dd = Gk_1.gFT_PSD_data.rolling(k=10, min_periods= 1, center=True).mean() - plt.plot(Gk_1.k, dd, color=lead_color, linewidth=.8 ) - - klim= Gk_1.k[0], Gk_1.k[-1] - plt.xlim(klim) - - plt.ylabel('$(m/m)^2/k$') - #plt.title('Spectra', loc ='left')s - #plt.xlabel('k (2$\pi$ m$^{-1}$)') - plt.ylim(dd.min(),np.nanmax(dd.data) * 1.3) - - ax41.axvline(k_thresh, linewidth=1, color='black', alpha=1) - ax41.axvspan(k_thresh , klim[-1], color='black', alpha=0.5, zorder=12) - ax41.set_facecolor((1.0, 1.00, 1.00, 0.8)) - - #plt.show() - - #F.save_light(path=plot_path+'B03_spectra/', name = 'B03_freq_reconst_x'+str(i)) - #MT.json_save('B03_success', plot_path, {'time':'time.asctime( time.localtime(time.time()) )'}) - - stencil_pos = spec.create_chunk_boundaries_unit_lengths(1000, (dist_stencil[0], dist_stencil[-1]), ov=0, iter_flag=False).T - - V0_list, V1_list, V2_list, V3_list = list(), list(), list(), list() - no_nan_sum= list() - for s in stencil_pos: - V0_list.append( T2_sel['heights_c'].loc[M.cut_nparray( np.array(T2_sel['dist']), s[0], s[-1]) ].var() ) - V1_list.append( T3_sel['heights_c_weighted_mean'].loc[M.cut_nparray( np.array(T3_sel['dist']), s[0], s[-1]) ].var() ) - V2_list.append( np.nanvar( T3_sel['heights_c_model'].loc[M.cut_nparray( np.array(T3_sel['dist']), s[0], s[-1])]) ) - V3_list.append( np.nanvar(height_residual_mean[ M.cut_nparray( np.array(height_residual_mean_x), s[0], s[-1])]) ) - - no_nan_sum.append( (~dist_nanmask[M.cut_nparray( dist_stencil, s[0], s[-1])].data).sum()) - - - ax3 = F.fig.add_subplot(gs[27:29, :]) - - plt.title(next(fn) + 'Variance Decomposition', loc='left') - V0_list, V1_list, V2_list = np.array(V0_list),np.array(V1_list),np.array(V2_list), - no_nan_sum = np.array(no_nan_sum) - no_nan_sum = no_nan_sum/no_nan_sum.max() - - edge_pos = np.insert(stencil_pos[:,0], stencil_pos[:,0].size, stencil_pos[:,-1][-1]) - plt.stairs(no_nan_sum * V0_list/V0_list, edge_pos, baseline=0, fill=True, color= col.black, alpha=0.6, label = 'photon variance') - plt.stairs(no_nan_sum * V1_list/V0_list, edge_pos, baseline=0, fill=True, color= col_d[k] , label = 'mean photon variance') - plt.stairs(no_nan_sum * V2_list/V0_list, edge_pos, baseline=0, fill=True, color= lead_color, label = 'wave variance') - plt.stairs(no_nan_sum * (V3_list/V0_list+ V2_list/V0_list) , edge_pos, baseline=no_nan_sum * V2_list/V0_list, fill=True, color= col.green, label = 'residual variance') - - plt.legend(ncol= 4, bbox_to_anchor=(-0.02, 0), loc= 2) - - # residual - #ax0.set_xticks(eta_ticks) - #ax0.set_xticklabels(eta_ticks/1e3) - #ax0.set_ylabel('Slope (m/m)') - #ax1.spines['top'].set_visible(True) - #ax1.spines['top'].set_linewidth(0.2) - - #ax1.xaxis.set_ticks_position('top') - #ax1.xaxis.set_label_position('top') - ax0.set_ylabel('Slope (m/m)') - ax1.set_ylabel('Photon Height (m)') - ax2.set_ylabel('Photon Height (m)') - - #ax2.spines['bottom'].set_visible(True) - ax2.set_xlabel('Distance from the ice Edge (km)') - - eta_ticks = np.arange(dist_stencil[0], dist_stencil[-1]+ 500, 500) - eta_tick_labels, eta_ticks = MT.tick_formatter(eta_ticks[1::4]/1e3, interval= 3, expt_flag= False, shift=0) - - - y_tick_labels, y_ticks = MT.tick_formatter(np.arange(-0.1, 0.1+ 0.05, 0.05), interval= 2, expt_flag= False, shift=0) - ax0.set_yticks(y_ticks) - ax0.set_yticklabels(y_tick_labels) - ylim_slope= np.round(Gx_1.y_data.std().data*3 * 10)/10 - ax0.set_ylim(-1.3* ylim_slope ,ylim_slope*1.5) - - y_tick_labels, y_ticks = MT.tick_formatter(np.arange(-0.5, 3, 0.5), interval= 2, expt_flag= False, shift=1) - ax1.set_yticks(y_ticks) - ax1.set_yticklabels(y_tick_labels) - ax1.set_ylim(-0.8, 1.5) - - ax1b.set_yticks(y_ticks) - ax1b.set_yticklabels(y_tick_labels) - ax1b.set_ylim(-0.8, 0.8) - - ax2.set_xticks(eta_ticks*1e3) - ax2.set_xticklabels(eta_tick_labels) - - ax2.set_yticks(y_ticks) - ax2.set_yticklabels(y_tick_labels) - ax2.set_ylim(-0.5 , 1.5) - - ax3.set_yticks(y_ticks) - ax3.set_yticklabels(y_tick_labels) - ax3.set_ylim(0, 1) - - xlims= eta_1[0].data+ 0 * dx, eta_1[-1].data- 1000 * dx - #xlims= eta_1[0].data+ 0 * dx, eta_1[-1].data- 0 * dx - - for axx in [ax0, ax1, ax1b, ax3]: - axx.set_xlim(xlims ) - axx.axhline(0, linewidth =0.5, color=col.black) - axx.spines['bottom'].set_visible(False) - axx.tick_params(labelbottom=False, bottom=False) - ax2.set_xlim(xlims ) - - - #F.save_light(path= plot_path, name='B03_decomposition_'+str(num_count).zfill(4)) - #F.save(path= plot_path, name='B06_decomposition_2_'+k+'_x'+str(i)+'_'+ID_name) - - #F.save_light(path= plot_path, name='B06_decomposition_'+k+'_x'+str(i)+'_'+ID_name) - #F.save_pup(path= plot_path , name='B06_decomposition_'+k+'_x'+str(i)+'_'+ID_name) - -# %% - -V0_photon_var = T2_sel['heights_c'].var() -V1_mean_photon_var = T3_sel['heights_c_weighted_mean'].var() -V2_wave_model_var = np.nanvar(height_residual_mean) - -V0_photon_var/ V0_photon_var -V1_mean_photon_var/V0_photon_var -V2_wave_model_var/V0_photon_var diff --git a/analyis_publish/PB06_plot_reconstruction_simple.py b/analyis_publish/PB06_plot_reconstruction_simple.py deleted file mode 100644 index 357467f2..00000000 --- a/analyis_publish/PB06_plot_reconstruction_simple.py +++ /dev/null @@ -1,496 +0,0 @@ - - -import os, sys -#execfile(os.environ['PYTHONSTARTUP']) - -""" -This file open a ICEsat2 track applied filters and corections and returns smoothed photon heights on a regular grid in an .nc file. -This is python 3 -""" - -exec(open(os.environ['PYTHONSTARTUP']).read()) -exec(open(STARTUP_2021_IceSAT2).read()) - -#%matplotlib inline - -import ICEsat2_SI_tools.convert_GPS_time as cGPS -import h5py -import ICEsat2_SI_tools.io as io -import ICEsat2_SI_tools.spectral_estimates as spec - -import time -import imp -import copy -import spicke_remover -import datetime -import generalized_FT as gFT -from scipy.ndimage.measurements import label - -#xr.set_options(display_style='text')() -#import s3fs -# %% -ID_name, batch_key, ID_flag = io.init_from_input(sys.argv) # loads standard experiment -#ID_name, batch_key, ID_flag = '20190605061807_10380310_004_01', 'SH_batch01', False -#ID_name, batch_key, ID_flag = '20190601094826_09790312_004_01', 'SH_batch01', False -#ID_name, batch_key, ID_flag = '20190207111114_06260210_004_01', 'SH_batch02', False -#ID_name, batch_key, ID_flag = '20190208152826_06440210_004_01', 'SH_batch01', False -#ID_name, batch_key, ID_flag = '20190213133330_07190212_004_01', 'SH_batch02', False -#ID_name, batch_key, ID_flag = '20190207002436_06190212_004_01', 'SH_batch02', False -#ID_name, batch_key, ID_flag = '20190206022433_06050212_004_01', 'SH_batch02', False - - -#ID_name, batch_key, ID_flag = '20190215184558_07530210_004_01', 'SH_batch02', False -#ID_name, batch_key, ID_flag = 'SH_20190219_08070210', 'SH_publish', True -#ID_name, batch_key, ID_flag = 'SH_20190502_05160312', 'SH_publish', True -#ID_name, batch_key, ID_flag = 'SH_20190502_05180312', 'SH_publish', True - -ID_name, batch_key, ID_flag = 'SH_20190224_08800210', 'SH_publish', True -#ID_name, batch_key, ID_flag = 'SH_20190219_08070210', 'SH_publish', True - - - - -ID, _, hemis, batch = io.init_data(ID_name, batch_key, ID_flag, mconfig['paths']['work'], ) -#print(ID_name, batch_key, ID_flag) -hemis, batch = batch_key.split('_') - - -## -------------- use lower level data ------------------ -# ATlevel= 'ATL03' -# -# load_path_scratch = mconfig['paths']['scratch'] +'/'+ batch_key +'/' -# load_path_work = mconfig['paths']['work'] +'/'+ batch_key +'/' -# -# #B0_hdf5 = h5py.File(load_path_scratch +'/A01c_ATL03_'+ID_name+ '_corrected.h5', 'r') -# B2_hdf5 = h5py.File(load_path_work +'B01_regrid'+'/'+ID_name + '_B01_regridded.h5', 'r') -# B3_hdf5 = h5py.File(load_path_work +'B01_regrid'+'/'+ID_name + '_B01_binned.h5', 'r') -# -# B0, B2, B3 = dict(), dict(), dict() -# for b in all_beams: -# #B0[b] = io.get_beam_hdf_store(B0_hdf5[b]) -# B2[b] = io.get_beam_hdf_store(B2_hdf5[b]) -# B3[b] = io.get_beam_hdf_store(B3_hdf5[b]) -# -# B2_hdf5.close(), B2_hdf5.close() -# -# load_path = mconfig['paths']['work']+ batch_key +'/B02_spectra/' -# load_file = load_path + 'B02_' + ID_name #+ '.nc' -# #MT.mkdirs_r(plot_path) -# -# Gk = xr.open_dataset(load_file+'_gFT_k.nc') -# Gx = xr.open_dataset(load_file+'_gFT_x.nc') - -## -------------------- use final prodiucts -all_beams = mconfig['beams']['all_beams'] -high_beams = mconfig['beams']['high_beams'] -low_beams = mconfig['beams']['low_beams'] - -load_path_work = mconfig['paths']['work'] +'/'+ batch_key +'/' -load_path = load_path_work +'/B06_corrected_separated/' - -B2 = io.load_pandas_table_dict('B06_' + ID_name+ '_B06_corrected_resid', load_path) -B3 = io.load_pandas_table_dict('B06_' + ID_name+ '_binned_resid', load_path) - -load_file = load_path + 'B06_' + ID_name #+ '.nc' -Gk = xr.open_dataset(load_file+'_gFT_k_corrected.nc') -Gx = xr.open_dataset(load_file+'_gFT_x_corrected.nc') - -ATL07_path = mconfig['paths']['scratch']+'/'+ batch_key +'/' -os.listdir(ATL07_path) -B07= dict() -for b in all_beams: - B07[b]= io.getATL07_beam(ATL07_path +ID['tracks']['ATL07']+'.h5', beam=b) - -# print(Gk) -# print(Gx) - - - -# %% check paths (again) -col.colormaps2(21) -col_dict= col.rels - -# define simple routines -def add_info(D, Dk, ylims): - eta = D.eta + D.x - N_per_stancil, ksize = Dk.N_per_stancil.data , Dk.k.size - plt.text(eta[0].data, ylims[-1], ' N='+numtostr(N_per_stancil) + ' N/2M= '+ fltostr(N_per_stancil/2/ksize, 1)) - -# Single views -def plot_data_eta(D, offset = 0 , **kargs ): - eta_1 = D.eta + D.x - y_data = D.y_model +offset - plt.plot(eta_1,y_data , **kargs) - return eta_1 - -def plot_model_eta(D, ax, offset = 0, **kargs ): - eta = D.eta + D.x - y_data = D.y_model+offset - plt.plot(eta ,y_data , **kargs) - - ax.axvline(eta[0].data, linewidth=0.1, color=kargs['color'], alpha=0.5) - ax.axvline(eta[-1].data, linewidth=0.1, color=kargs['color'], alpha=0.5) - -# %% -fltostr = MT.float_to_str -numtostr = MT.num_to_str -font_for_print() - -#for i in x_pos_sel[::2]: -#i =x_pos_sel[20] -#MT.mkdirs_r(plot_path+'B03_spectra/') - - -#~np.isnan(Gk.mean('beam').mean('k').gFT_PSD_data.data) -x_pos_sel = np.arange(Gk.x.size)[(Gk.mean('beam').mean('k').gFT_PSD_data.data >0 )] -x_pos_max = Gk.mean('beam').mean('k').gFT_PSD_data[~np.isnan(Gk.mean('beam').mean('k').gFT_PSD_data)].argmax().data -xpp = x_pos_sel[ [int(i) for i in np.round(np.linspace(0, x_pos_sel.size-1, 4))]] -#xpp = np.insert(xpp, 0, x_pos_max) -xpp =x_pos_sel - -# %% -i = 5 -#i=6 - -k = all_beams[0] -#k = 'gt2l' - -plot_path = mconfig['paths']['plot'] + '/'+hemis+'/'+batch_key+'/publish/' + ID_name + '/B06/' -MT.mkdirs_r(plot_path) -font_for_print() - - -for i in xpp: - - fn = copy.copy(lstrings) - - F = M.figure_axis_xy(5.5, 6.5, container =True, view_scale= 0.8) - - plt.suptitle('ALT03 Decomposition\n'+ io.ID_to_str(ID_name), y = 0.93, x = 0.13, horizontalalignment ='left') - #Photon height reconstruction | x='+str(Gk.x[i].data)+' \n' + ID_name, y = 0.95) - gs = GridSpec(30,6, wspace=0, hspace=0.8)#figure=fig, - - ax0 = F.fig.add_subplot(gs[0:6, :]) - col_d = col.__dict__['rels'] - plt.title(' '+next(fn)+ 'Slope data and model', loc='left', y= 0.83) - dx = Gx.eta.diff('eta').mean().data - neven = True - offs = 0 - Gx_1 = Gx.isel(x= i).sel(beam = k) - Gk_1 = Gk.isel(x= i).sel(beam = k) - - k_thresh = Gk_1.k_lim.data - dist_stencil = Gx_1.eta + Gx_1.x - dist_stencil_lims = dist_stencil[0].data, dist_stencil[-1].data - - # cutting Table data - # photon data - # gridded data - mask_x_bin = ( (B3[k]['dist'] >= dist_stencil_lims[0]) & (B3[k]['dist'] <= dist_stencil_lims[1]) ) - T3_sel = B3[k].loc[mask_x_bin] - - T2 = B2[k]#.sort_index(ascending= False) - mask_x_true = (T2['x_true'] >= T3_sel['x_true'].min()) & (T2['x_true'] <= T3_sel['x_true'].max()) - T2_sel = B2[k].loc[mask_x_true] - - B07_sel=B07[k][(T3_sel['delta_time'].min() < B07[k]['time']['delta_time']) & (B07[k]['time']['delta_time'] < T3_sel['delta_time'].max()) ] - - ### slope data - T3 = B3[k]#.loc[mask_x_bin] - dd = np.copy(T3['heights_c_weighted_mean']) - dd = np.gradient(dd) - dd, _ = spicke_remover.spicke_remover(dd, spreed=10, verbose=False) - dd_nans = (np.isnan(dd) ) + (T3['N_photos'] <= 5) - # dd_no_nans = dd[~dd_nans] # windowing is applied here - # x_no_nans = T3['dist'][~dd_nans] - dd[dd_nans] = np.nan# windowing is applied here - xx = T3['dist'] - xx[dd_nans] = np.nan - - #plt.plot( xx , dd, color=col.green,alpha=0.8, linewidth =0.3) - #B3[k]['dist'] - - #plot_model_eta(Gx_1, ax0, offset= offs, linestyle='-', color=col_d[k], linewidth=0.4, alpha=1, zorder=12 , label = 'GFT model') - # ylims= -np.nanstd(Gx_1.y_data)*3, np.nanstd(Gx_1.y_data)*3 - # #add_info(Gx_1, Gk_1 , ylims ) - - lead_color = col.cascade1#col_d[k] - - # oringial data - eta_1= plot_data_eta(Gx_1 , offset= offs , linestyle= '-', c=col.gray,linewidth=2, alpha =1, zorder=11, label = 'mean photon\nheight slope') - - # reconstruct slope model - # introduce frequency filter: - gFT_cos_coeff_sel = np.copy(Gk_1.gFT_cos_coeff) - gFT_sin_coeff_sel = np.copy(Gk_1.gFT_sin_coeff) - gFT_cos_coeff_sel[Gk_1.k > k_thresh] = 0 - gFT_sin_coeff_sel[Gk_1.k > k_thresh] = 0 - - - FT = gFT.generalized_Fourier(Gx_1.eta + Gx_1.x, None,Gk_1.k ) - _ = FT.get_H() - FT.b_hat=np.concatenate([ gFT_cos_coeff_sel, gFT_sin_coeff_sel ]) - plt.plot(Gx_1.eta + Gx_1.x, FT.model()+offs ,'-', c=lead_color, linewidth=0.5, alpha=1,zorder= 12, label = 'GFT slope model') - - - plt.legend(loc=1, ncol=2) - - ax1 = F.fig.add_subplot(gs[6:13, :]) - plt.title(' '+next(fn)+ 'Height data and model', loc='left', y= 0.83) - - ### height decomposition - # plotting observed datazx - # T3_sel['heights_c_weighted_mean'] - plt.plot( T3_sel['dist'] , T3_sel['heights_c_weighted_mean'], '-' , color =col_d[k], linewidth = 1, label = 'observed $h_c$ mean') - - if T2_sel['x_true'].iloc[0] > T2_sel['x_true'].iloc[-1]: - T2_sel['dist'] = np.interp(T2_sel['x_true'][::-1], T3_sel['x_true'][::-1], T3_sel['dist'][::-1] )[::-1] - else: - T2_sel['dist'] = np.interp(T2_sel['x_true'][::1], T3_sel['x_true'][::1], T3_sel['dist'] ) - plt.scatter( T2_sel['dist'] , T2_sel['heights_c'], s= 1, marker='o', color='black', alpha =0.02, edgecolors= 'none' ) - - try: - if (T3_sel['delta_time'].iloc[0] > T3_sel['delta_time'].iloc[-1]) is (B07_sel['time']['delta_time'].iloc[0] > B07_sel['time']['delta_time'].iloc[-1]): - B07_sel['dist'] = np.interp(B07_sel['time']['delta_time'], T3_sel['delta_time'], T3_sel['dist'] ) - else: - B07_sel['dist'] = np.interp(np.array(B07_sel['time']['delta_time']), np.array(T3_sel['delta_time'][::-1]), np.array(T3_sel['dist'][::-1]) ) - B07_flag = True - except: - B07_flag = False - - # B07_sel['dist'] = scipy.interpolate.griddata(np.array(T3_sel['delta_time'][::-1]), np.array(T3_sel['dist'][::-1]) ,np.array(B07_sel['time']['delta_time']), method= 'nearest' ) - # plt.plot(np.array(B07_sel['time']['delta_time'])) - # plt.plot(np.array(T3_sel['delta_time'][::-1])) - # plt.grid() - - # plt.plot( B07_sel['dist']- B07_sel['dist_np']) - #plt.plot( B07_sel['dist'] , B07_sel['heights']['height_segment_height'], color='black', alpha =1, linewidth = 0.6 ) - #plt.scatter(B07_sel['dist'], B07_sel['heights']['height_segment_height'], s= 1, marker='o', color='black', alpha =0.7, edgecolors= 'none' ) - - dist_nanmask = np.isnan(Gx_1.y_data) - # height_data = np.interp(dist_stencil, T3_sel['dist'], T3_sel['heights_c_weighted_mean']) #[~np.isnan(Gx_1.y_data)] - - # def fit_offset(x, data, model, nan_mask, deg): - # - # #x, data, model, nan_mask, deg = dist_stencil, height_data, height_model2, dist_nanmask, 1 - # p_offset = np.polyfit(x[~nan_mask], data[~nan_mask] - model[~nan_mask], deg) - # p_offset[-1] = 0 - # poly_offset = np.polyval(p_offset,x ) - # return poly_offset - # - # poly_offset = fit_offset(dist_stencil, height_data, height_model2, dist_nanmask, 1) - - #plt.plot(dist_stencil, height_model2 ,'-', c='orange', linewidth=0.6, alpha=1,zorder= 12, label = 'spectral int model') - #plt.plot(dist_stencil, poly_offset ,'-', c=col.gridcolor, linewidth=0.6, alpha=1,zorder= 12, label = 'offset') - # plt.plot(dist_stencil, height_model2 ,'-', c=lead_color, linewidth=0.8, alpha=1,zorder= 12, label = 'GFT height model + correction') - - plt.plot( T3_sel['dist'] , T3_sel['heights_c_model'] ,'-', c=lead_color, linewidth=0.5, alpha=1,zorder= 12, label = '$h_c$ model') - - plt.legend(loc = 4, ncol =2) - - # ax1b = F.fig.add_subplot(gs[13:19, :]) - # plt.title(' '+next(fn)+ 'ATL07', loc='left', y= 0.83) - # x_key= 'dist' - # AT07_cat_offset= 0 - # AT07_bool_offset =0 - # htype_cmap = [col.orange, col.cascade3, col.cascade2, col.cascade1] - # htype_list= ['cloud_covered','other', 'specular_lead', 'dark_lead' , 'other'] - # for htype, hcolor, htype_str in zip( [0, 1, (2, 5), (6, 9)] , htype_cmap , htype_list ): - # if type(htype) is tuple: - # imask = (B07_sel['heights']['height_segment_type'] >= htype[0]) & (B07_sel['heights']['height_segment_type'] <= htype[1]) - # else: - # imask = B07_sel['heights']['height_segment_type'] == htype - # pdata = B07_sel[imask] - # plt.plot( pdata[x_key], pdata['heights']['height_segment_height'] + AT07_cat_offset, '.', color =hcolor, markersize=0.8,alpha=1, label=htype_str) - # - # - # for htype, hcolor, htype_str, hsize in zip( [0, 1] , [col.gridcolor, col.red] , [None, 'ssh'] , [0.8, 5]): - # - # pdata = B07_sel[B07_sel['heights']['height_segment_ssh_flag'] == htype] - # plt.plot( pdata[x_key], pdata['heights']['height_segment_height']*0+AT07_bool_offset, '.', color =hcolor, markersize=hsize,alpha=0.9, label=htype_str) - # - # plt.legend(ncol=3, loc=1) - - - #plt.plot( B07_sel['dist_np'] , B07_sel['heights']['height_segment_height'], color='red', alpha =1, linewidth = 0.6 ) - - # plt.plot( B07_sel['ref']['latitude'][0:100] , B07_sel['heights']['height_segment_height'][0:100])#, s= 1, marker='o', color='black', alpha =1, edgecolors= 'none' ) - # plt.plot( T3_sel['lats'][0:100] , T3_sel['heights_c_model'][0:100])#, s= 1, marker='o', color='black', alpha =1, edgecolors= 'none' ) - # - # plt.plot( B07_sel['dist'][0:200] , B07_sel['heights']['height_segment_height'][0:200], 'r')#, s= 1, marker='o', color='black', alpha =1, edgecolors= 'none' ) - # plt.plot( T3_sel['dist'][:] , T3_sel['heights_c_model'][:])#, s= 1, marker='o', color='black', alpha =1, edgecolors= 'none' ) - # - # T2_sel - # B07_sel.T - # B07_sel['ref']['height_segment_id'] - # B07_sel['heights']['height_segment_ssh_flag'].plot() - # B07_sel['heights']['height_segment_type'].plot() - - - # T3_sel['heights_c_weighted_mean'].plot() - # T2_sel['heights_c'].plot() - # - # T2_sel['dist'].iloc[0], T3_sel['dist'].iloc[0] - # T2_sel['dist'].iloc[-1], T3_sel['dist'].iloc[-1] - - # reconstructued data by integration - # height_model = np.cumsum(FT.model()) + T3_sel['heights_c_weighted_mean'].iloc[0] - # plt.plot( Gx_1.eta + Gx_1.x, height_model, linewidth = 0.6 , color = 'red', label = 'real space integral') - - # FT_int = gFT.generalized_Fourier(Gx_1.eta + Gx_1.x, None,Gk_1.k ) - # _ = FT_int.get_H() - # FT_int.b_hat = np.concatenate([ -gFT_sin_coeff_sel /Gk_1.k, gFT_cos_coeff_sel/Gk_1.k ]) - # - # height_model2 = FT_int.model() /dx# + T3_sel['heights_c_weighted_mean'].iloc[0] - - - - ax2 = F.fig.add_subplot(gs[13:18, :]) - plt.title(' '+next(fn)+ 'Residual heights of ATL03', loc='left', y= 0.83) - height_residual = T2_sel['heights_c_residual'] #T2_sel['heights_c'] - np.interp(T2_sel['dist'], dist_stencil, height_model2 + poly_offset) - plt.scatter(T2_sel['dist'], height_residual, s= 1, marker='o', color='black', alpha =0.02, edgecolors= 'none' ) - - # heights_c_weighted_mean_stancil = np.interp(dist_stencil, T3_sel['dist'], T3_sel['heights_c_weighted_mean'] ) - # height_residual_mean = (heights_c_weighted_mean_stancil - height_model2) - poly_offset - # height_residual_mean[dist_nanmask] = np.nan - - - - height_residual_mean = T3_sel['heights_c_residual'] - height_residual_mean_x = T3_sel['dist'] - plt.plot( height_residual_mean_x , height_residual_mean , color =col.rascade1, linewidth = 0.5, label = 'residual $h_c$') - plt.fill_between(height_residual_mean_x , height_residual_mean, color= col.cascade2, edgecolor = None, alpha = 0.4, zorder= 0) - plt.legend(loc = 1) - - - # for pos, kgroup, lflag in zip([ gs[2, 0:2], gs[2, 2:4], gs[2, 4:]], [, ['gt2l', 'gt2r'], ['gt3l', 'gt3r']], [True, False, False] ): - - ax41 = F.fig.add_subplot(gs[3:6, 4:]) - - #ax41.tick_params(labelleft=lflag) - - dd = Gk_1.gFT_PSD_data#.rolling(k=10, min_periods= 1, center=True).mean() - plt.plot(Gk_1.k, dd, color='gray', linewidth=.5 ,alpha= 0.5 ) - - dd = Gk_1.gFT_PSD_data.rolling(k=10, min_periods= 1, center=True).mean() - plt.plot(Gk_1.k, dd, color=lead_color, linewidth=.8 ) - - klim= Gk_1.k[0], Gk_1.k[-1] - plt.xlim(klim) - - plt.ylabel('$(m/m)^2/k$') - #plt.title('Spectra', loc ='left')s - #plt.xlabel('k (2$\pi$ m$^{-1}$)') - plt.ylim(dd.min(),np.nanmax(dd.data) * 1.5) - - ax41.axvline(k_thresh, linewidth=1, color='black', alpha=1) - ax41.axvspan(k_thresh , klim[-1], color='black', alpha=0.5, zorder=12) - ax41.set_facecolor((1.0, 1.00, 1.00, 0.8)) - - #plt.show() - - #F.save_light(path=plot_path+'B03_spectra/', name = 'B03_freq_reconst_x'+str(i)) - #MT.json_save('B03_success', plot_path, {'time':'time.asctime( time.localtime(time.time()) )'}) - - stencil_pos = spec.create_chunk_boundaries_unit_lengths(1000, (dist_stencil[0], dist_stencil[-1]), ov=0, iter_flag=False).T - - V0_list, V1_list, V2_list, V3_list = list(), list(), list(), list() - no_nan_sum= list() - for s in stencil_pos: - V0_list.append( T2_sel['heights_c'].loc[M.cut_nparray( np.array(T2_sel['dist']), s[0], s[-1]) ].var() ) - V1_list.append( T3_sel['heights_c_weighted_mean'].loc[M.cut_nparray( np.array(T3_sel['dist']), s[0], s[-1]) ].var() ) - V2_list.append( np.nanvar( T3_sel['heights_c_model'].loc[M.cut_nparray( np.array(T3_sel['dist']), s[0], s[-1])]) ) - V3_list.append( np.nanvar(height_residual_mean[ M.cut_nparray( np.array(height_residual_mean_x), s[0], s[-1])]) ) - - no_nan_sum.append( (~dist_nanmask[M.cut_nparray( dist_stencil, s[0], s[-1])].data).sum()) - - - ax3 = F.fig.add_subplot(gs[21:23, :]) - - plt.title(next(fn) + 'Variance Decomposition', loc='left') - V0_list, V1_list, V2_list = np.array(V0_list),np.array(V1_list),np.array(V2_list), - no_nan_sum = np.array(no_nan_sum) - no_nan_sum = no_nan_sum/no_nan_sum.max() - - edge_pos = np.insert(stencil_pos[:,0], stencil_pos[:,0].size, stencil_pos[:,-1][-1]) - plt.stairs(no_nan_sum * V0_list/V0_list, edge_pos, baseline=0, fill=True, color= col.black, alpha=0.6, label = 'photon variance') - plt.stairs(no_nan_sum * V1_list/V0_list, edge_pos, baseline=0, fill=True, color= col_d[k] , label = 'mean photon variance') - plt.stairs(no_nan_sum * V2_list/V0_list, edge_pos, baseline=0, fill=True, color= lead_color, label = 'wave variance') - plt.stairs(no_nan_sum * (V3_list/V0_list+ V2_list/V0_list) , edge_pos, baseline=no_nan_sum * V2_list/V0_list, fill=True, color= col.green, label = 'residual variance') - - plt.legend(ncol= 4, bbox_to_anchor=(-0.02, 0), loc= 2) - - # residual - #ax0.set_xticks(eta_ticks) - #ax0.set_xticklabels(eta_ticks/1e3) - #ax0.set_ylabel('Slope (m/m)') - #ax1.spines['top'].set_visible(True) - #ax1.spines['top'].set_linewidth(0.2) - - #ax1.xaxis.set_ticks_position('top') - #ax1.xaxis.set_label_position('top') - ax0.set_ylabel('Slope (m/m)') - ax1.set_ylabel('Photon Height (m)') - ax2.set_ylabel('Photon Height (m)') - - #ax2.spines['bottom'].set_visible(True) - ax2.set_xlabel('Distance from the ice Edge (km)') - - eta_ticks = np.arange(dist_stencil[0], dist_stencil[-1]+ 500, 500) - eta_tick_labels, eta_ticks = MT.tick_formatter(eta_ticks[1::4]/1e3, interval= 3, expt_flag= False, shift=0) - - - y_tick_labels, y_ticks = MT.tick_formatter(np.arange(-0.1, 0.1+ 0.05, 0.05), interval= 2, expt_flag= False, shift=0) - ax0.set_yticks(y_ticks) - ax0.set_yticklabels(y_tick_labels) - ylim_slope= np.round(Gx_1.y_data.std().data*3 * 10)/10 - ax0.set_ylim(-1.3* ylim_slope ,ylim_slope*1.5) - - y_tick_labels, y_ticks = MT.tick_formatter(np.arange(-0.5, 3, 0.5), interval= 2, expt_flag= False, shift=1) - ax1.set_yticks(y_ticks) - ax1.set_yticklabels(y_tick_labels) - ax1.set_ylim(-0.8, 1.5) - - # ax1b.set_yticks(y_ticks) - # ax1b.set_yticklabels(y_tick_labels) - # ax1b.set_ylim(-0.8, 0.8) - - ax2.set_xticks(eta_ticks*1e3) - ax2.set_xticklabels(eta_tick_labels) - - ax2.set_yticks(y_ticks) - ax2.set_yticklabels(y_tick_labels) - ax2.set_ylim(0, 1.5) - - ax3.set_yticks(y_ticks) - ax3.set_yticklabels(y_tick_labels) - ax3.set_ylim(0, 1) - - xlims= eta_1[0].data+ 0 * dx, eta_1[-1].data- 1000 * dx - #xlims= eta_1[0].data+ 0 * dx, eta_1[-1].data- 0 * dx - - for axx in [ax0, ax1, ax3]: - axx.set_xlim(xlims ) - axx.axhline(0, linewidth =0.5, color=col.black) - axx.spines['bottom'].set_visible(False) - axx.tick_params(labelbottom=False, bottom=False) - ax2.set_xlim(xlims ) - - - #F.save_light(path= plot_path, name='B03_decomposition_'+str(num_count).zfill(4)) - #F.save(path= plot_path, name='B06_decomposition_2_'+k+'_x'+str(i)+'_'+ID_name) - - F.save_light(path= plot_path, name='B06_decomposition_simple_'+k+'_x'+str(i)+'_'+ID_name) - #F.save_pup(path= plot_path, name='B06_decomposition_simple_'+k+'_x'+str(i)+'_'+ID_name) - - - #num_count +=1 -# %% - -V0_photon_var = T2_sel['heights_c'].var() -V1_mean_photon_var = T3_sel['heights_c_weighted_mean'].var() -V2_wave_model_var = np.nanvar(height_residual_mean) - -V0_photon_var/ V0_photon_var -V1_mean_photon_var/V0_photon_var -V2_wave_model_var/V0_photon_var diff --git a/analyis_publish/PB07_plot_N_and_data.py b/analyis_publish/PB07_plot_N_and_data.py deleted file mode 100644 index b0d6098a..00000000 --- a/analyis_publish/PB07_plot_N_and_data.py +++ /dev/null @@ -1,331 +0,0 @@ - -# %% -import os, sys -#execfile(os.environ['PYTHONSTARTUP']) - -""" -This file open a ICEsat2 track applied filters and corections and returns smoothed photon heights on a regular grid in an .nc file. -This is python 3 -""" - -exec(open(os.environ['PYTHONSTARTUP']).read()) -exec(open(STARTUP_2021_IceSAT2).read()) - -#%matplotlib inline - -import ICEsat2_SI_tools.convert_GPS_time as cGPS -import h5py -import ICEsat2_SI_tools.io as io -import ICEsat2_SI_tools.spectral_estimates as spec - -import time -import imp -import copy -import spicke_remover -import datetime -import generalized_FT as gFT -from scipy.ndimage.measurements import label - -#import xarray as xr -xr.set_options(display_style='text') -#import s3fs -# %% -ID_name, batch_key, ID_flag = io.init_from_input(sys.argv) # loads standard experiment -#ID_name, batch_key, ID_flag = '20190605061807_10380310_004_01', 'SH_batch01', False -#ID_name, batch_key, ID_flag = '20190601094826_09790312_004_01', 'SH_batch01', False -#ID_name, batch_key, ID_flag = '20190207111114_06260210_004_01', 'SH_batch02', False -#ID_name, batch_key, ID_flag = '20190208152826_06440210_004_01', 'SH_batch01', False -#ID_name, batch_key, ID_flag = '20190213133330_07190212_004_01', 'SH_batch02', False -#ID_name, batch_key, ID_flag = '20190207002436_06190212_004_01', 'SH_batch02', False -#ID_name, batch_key, ID_flag = '20190206022433_06050212_004_01', 'SH_batch02', False - - -#ID_name, batch_key, ID_flag = '20190215184558_07530210_004_01', 'SH_batch02', False -#ID_name, batch_key, ID_flag = 'SH_20190219_08070210', 'SH_publish', True -#ID_name, batch_key, ID_flag = 'SH_20190502_05160312', 'SH_publish', True -#ID_name, batch_key, ID_flag = 'SH_20190502_05180312', 'SH_publish', True - - - -#ID_name, batch_key, ID_flag = 'SH_20190213_07190212', 'SH_publish', True - - -# used in paper: -#ID_name, batch_key, ID_flag = 'SH_20190219_08070210', 'SH_publish', True -#ID_name, batch_key, ID_flag = 'SH_20190224_08800210', 'SH_publish', True -#ID_name, batch_key, ID_flag = 'SH_20190502_05160312', 'SH_publish', True # no ATL07 data - -ID_name, batch_key, ID_flag = 'SH_20190502_05180312', 'SH_publish', True - -TND =mconfig['track_name_dict'] - -# % 1 X -# % Track 1 -# % SH_20190224_08800210 -# -# % 3 X -# % Track 2 -# % SH_20190219_08070210 -# -# % 4 X -# % Track 3 -# % SH_20190502_05160312 -# -# % 1 X -# % Track 4 -# % SH_20190502_05180312 - - -ID, _, hemis, batch = io.init_data(ID_name, batch_key, ID_flag, mconfig['paths']['work'], ) -#print(ID_name, batch_key, ID_flag) -hemis, batch = batch_key.split('_') - -plot_path = mconfig['paths']['plot'] + '/'+hemis+'/'+batch_key+'/publish/B07/' -MT.mkdirs_r(plot_path) - -## -------------- use lower level data ------------------ -# ATlevel= 'ATL03' -# -# load_path_scratch = mconfig['paths']['scratch'] +'/'+ batch_key +'/' -# load_path_work = mconfig['paths']['work'] +'/'+ batch_key +'/' -# -# #B0_hdf5 = h5py.File(load_path_scratch +'/A01c_ATL03_'+ID_name+ '_corrected.h5', 'r') -# B2_hdf5 = h5py.File(load_path_work +'B01_regrid'+'/'+ID_name + '_B01_regridded.h5', 'r') -# B3_hdf5 = h5py.File(load_path_work +'B01_regrid'+'/'+ID_name + '_B01_binned.h5', 'r') -# -# B0, B2, B3 = dict(), dict(), dict() -# for b in all_beams: -# #B0[b] = io.get_beam_hdf_store(B0_hdf5[b]) -# B2[b] = io.get_beam_hdf_store(B2_hdf5[b]) -# B3[b] = io.get_beam_hdf_store(B3_hdf5[b]) -# -# B2_hdf5.close(), B2_hdf5.close() -# -# load_path = mconfig['paths']['work']+ batch_key +'/B02_spectra/' -# load_file = load_path + 'B02_' + ID_name #+ '.nc' -# #MT.mkdirs_r(plot_path) -# -# Gk = xr.open_dataset(load_file+'_gFT_k.nc') -# Gx = xr.open_dataset(load_file+'_gFT_x.nc') - -## -------------------- use final prodiucts -all_beams = mconfig['beams']['all_beams'] -high_beams = mconfig['beams']['high_beams'] -low_beams = mconfig['beams']['low_beams'] - -load_path_work = mconfig['paths']['work'] +'/'+ batch_key +'/' -load_path = load_path_work +'/B06_corrected_separated/' - -B2 = io.load_pandas_table_dict('B06_' + ID_name+ '_B06_corrected_resid', load_path) -B3 = io.load_pandas_table_dict('B06_' + ID_name+ '_binned_resid', load_path) - -load_file = load_path + 'B06_' + ID_name #+ '.nc' -Gk = xr.open_dataset(load_file+'_gFT_k_corrected.nc') -Gx = xr.open_dataset(load_file+'_gFT_x_corrected.nc') - -ATL07_path = mconfig['paths']['scratch']+'/'+ batch_key +'/' -os.listdir(ATL07_path) -B07= dict() -try: - for b in all_beams: - B07[b]= io.getATL07_beam(ATL07_path +ID['tracks']['ATL07']+'.h5', beam=b) - B07_flag = True - -except: - B07_flag = False - -#B07_flag = False -# print(Gk) -# print(Gx) - -# %% check paths (again) -col.colormaps2(21) -col_dict= col.rels -col_d = col.__dict__['rels'] - - -#~np.isnan(Gk.mean('beam').mean('k').gFT_PSD_data.data) -x_pos_sel = np.arange(Gk.x.size)#[(Gk.mean('beam').mean('k').gFT_PSD_data.data >0 )] -# x_pos_max = Gk.mean('beam').mean('k').gFT_PSD_data[~np.isnan(Gk.mean('beam').mean('k').gFT_PSD_data)].argmax().data -# xpp = x_pos_sel[ [int(i) for i in np.round(np.linspace(0, x_pos_sel.size-1, 4))]] -#xpp = np.insert(xpp, 0, x_pos_max) -xpp =x_pos_sel - -x_sel = Gk.x/1e3#[(Gk.mean('beam').mean('k').gFT_PSD_data.data >0 )]/1e3 - -dx = np.diff(x_sel)[0]/2 -edge_pos = np.insert(x_sel.data, 0, 0)#+dx - - -VAR_stats_sum = None -N_sample_stats_sum = None - -N_stack= 0 -for k in all_beams: - - N_sample_stats = pd.DataFrame(index=['ATL03', 'ATL03_used', 'ATL07'] ) - VAR_stats = pd.DataFrame(index=['ATL03_photon', 'ATL03_wave_model', 'ATL03_smth_data','ATL03_smth_wave_model', 'ATL07_heights'] ) - - print(k) - - for i in xpp: - Gx_1 = Gx.isel(x= i).sel(beam = k) - Gk_1 = Gk.isel(x= i).sel(beam = k) - - #k_thresh = Gk_1.k_lim.data - dist_stencil = Gx_1.eta + Gx_1.x - dist_stencil_lims = dist_stencil[0].data, dist_stencil[-1].data - - # cutting Table data - # photon data - # gridded data - mask_x_bin = ( (B3[k]['dist'] >= dist_stencil_lims[0]) & (B3[k]['dist'] <= dist_stencil_lims[1]) ) - T3_sel = B3[k].loc[mask_x_bin] - - T2 = B2[k]#.sort_index(ascending= False) - mask_x_true = (T2['x_true'] >= T3_sel['x_true'].min()) & (T2['x_true'] <= T3_sel['x_true'].max()) - T2_sel = B2[k].loc[mask_x_true] - #sum(mask_x_true) - - if B07_flag: - B07_sel=B07[k][(T3_sel['delta_time'].min() < B07[k]['time']['delta_time']) & (B07[k]['time']['delta_time'] < T3_sel['delta_time'].max()) ] - - - ## photon counts - N_sample_stats[i] = [T2_sel.shape[0], Gk_1.N_photons.data, B07_sel['env']['n_photons_actual'].sum() ] - - # vanriance estimates - # ATL03 - T2_photon_var = T2_sel['heights_c'].var() - T2_wave_model_var = T2_sel['heights_c_model'].var() - T3_data_var = T3_sel['heights_c_weighted_mean'].var() - T3_wave_model_var = T3_sel['heights_c_model'].var() - - # T2_resid_var = T2_sel['heights_c_residual'].var() - # T2_rebin_var = T2_photon_var - (T2_wave_model_var + T2_resid_var) - - # ATL 03 - if B07_flag: - B07_total_var = B07_sel['heights']['height_segment_height'].var() - else: - B07_total_var = T3_wave_model_var * np.nan - - VAR_stats[i] = [T2_photon_var, T2_wave_model_var, T3_data_var, T3_wave_model_var, B07_total_var ] - - if VAR_stats_sum is None: - VAR_stats_sum = VAR_stats - N_sample_stats_sum = N_sample_stats - else: - print('add') - VAR_stats_sum += VAR_stats - N_sample_stats_sum += N_sample_stats - - N_stack += 1 - - - -VAR_stats_sum = VAR_stats_sum.T/N_stack -VAR_stats_sum.index = x_sel - -N_sample_stats_sum = N_sample_stats_sum.T/N_stack -N_sample_stats_sum.index = x_sel - -VAR_stats_sum[VAR_stats_sum == 0] = np.nan -N_sample_stats_sum[N_sample_stats_sum == 0] = np.nan - -# %% - -# VAR_stats_sum['ATL03_photon'].plot() -# VAR_stats_sum['ATL03_wave_model'].plot() -# -# VAR_stats_sum['ATL03_smth_data'].plot() -# VAR_stats_sum['ATL03_smth_wave_model'].plot() -# -# VAR_stats_sum['ATL07_heights'].plot() -# plt.ylim(0, 0.21 *6) - -font_for_print() -fn = copy.copy(lstrings) - - - -#F = M.figure_axis_xy(5.5, 6.5, container =True, view_scale= 0.8) -#F = M.figure_axis_xy( fig_sizes['23rd_width'][0] , fig_sizes['23rd_width'][1]*2.3 , container =True, view_scale= 0.8) -F = M.figure_axis_xy( fig_sizes['one_column_high'][0] , fig_sizes['one_column_high'][1] * 1.5 , container =True, view_scale= 0.8) - -#plt.suptitle('ALT03 Decomposition\n'+ io.ID_to_str(ID_name), y = 0.93, x = 0.13, horizontalalignment ='left') -#plt.suptitle('Explained Variance Decomposition\n'+ io.ID_to_str(ID_name), y = 0.93, x = 0.13, horizontalalignment ='left') -plt.suptitle('Explained Variance \nDecomposition\n'+ TND[ID_name] , y = 0.955, x = 0.13, horizontalalignment ='left') - - -#Photon height reconstruction | x='+str(Gk.x[i].data)+' \n' + ID_name, y = 0.95) -gs = GridSpec(10, 4, wspace=0, hspace=1)#figure=fig, - -ax0 = F.fig.add_subplot(gs[0:3, :]) -plt.title(' '+next(fn)+ 'ATL03 Expl. Variance', loc='left', y= 0.96) - -#edge_pos = np.insert(VAR_stats_sum.index, VAR_stats_sum.index.size, VAR_stats_sum.index[-1]) -plt.stairs(VAR_stats_sum['ATL03_photon'], edge_pos, baseline=0, fill=True, color= col.gridcolor, alpha=1, label = 'Photon variance (<20 meter)') -plt.stairs(VAR_stats_sum['ATL03_smth_data'], edge_pos, baseline=0, fill=True, color= col.cascade2, alpha=0.6, label = '$h_c$ variance (> 20 meters)') -plt.stairs(VAR_stats_sum['ATL03_wave_model'], edge_pos, baseline=0, fill=True, edgecolor=col.black, color= col.cascade1, alpha=1, label = 'wave variance (model)', linewidth=0.8) -#plt.stairs(VAR_stats_sum['ATL03_smth_wave_model'], edge_pos, baseline=0, fill=False, color= col.green, alpha=1, label = 'photon variance') - - -# plt.stairs(no_nan_sum * V1_list/V0_list, edge_pos, baseline=0, fill=True, color= col_d[k] , label = 'mean photon variance') -# plt.stairs(no_nan_sum * V2_list/V0_list, edge_pos, baseline=0, fill=True, color= lead_color, label = 'wave variance') -# plt.stairs(no_nan_sum * (V3_list/V0_list+ V2_list/V0_list) , edge_pos, baseline=no_nan_sum * V2_list/V0_list, fill=True, color= col.green, label = 'residual variance') - -#plt.legend(ncol= 4, bbox_to_anchor=(-0.02, 0), loc= 2) -#plt.legend(ncol= 2, bbox_to_anchor=(+0.52, 1.30), loc=2) -plt.legend(ncol= 1, bbox_to_anchor=(+0.55, 1.45), loc=2) - -y_max = np.median(VAR_stats_sum['ATL03_photon']) -#y_max = np.quantile(VAR_stats_sum['ATL03_photon'], 0.9) *1.2 -y_max = np.nanquantile(VAR_stats_sum['ATL03_photon'], 0.8) *1.5 - -# residual -#ax0.set_xticks(eta_ticks) -#ax0.set_xticklabels(eta_ticks/1e3) -#ax0.set_ylabel('Slope (m/m)') -#ax1.spines['top'].set_visible(True) -#ax1.spines['top'].set_linewidth(0.2) - -ax1 = F.fig.add_subplot(gs[1+2:3+2, :]) - -#\com{change label: "photon variance" -> "Photon variance (<20 meter)"; "observed hc mean" --> "$h_c$ variance > 20 meters"; "wave variance (model)"} - - -plt.title(' '+next(fn)+ 'ATL07 Observed Variance', loc='left', y= 0.95) - -plt.stairs(VAR_stats_sum['ATL07_heights'], edge_pos, baseline=0, fill=True, color=col.orange, edgecolor=col.gray, alpha=1, label = 'ATL07 variance') -plt.stairs(VAR_stats_sum['ATL03_wave_model'], edge_pos, baseline=0, fill=False, edgecolor=col.black, alpha=1, linewidth=1, label = 'wave variance as in (a)') - -dmask = np.isnan(VAR_stats_sum['ATL07_heights']) -hatch_data = np.ones(VAR_stats_sum['ATL07_heights'].size) +y_max -hatch_data[~dmask] =np.nan -plt.stairs( hatch_data, edge_pos, baseline=0, fill=True, color= col.gridcolor, alpha=0.3) -plt.stairs( hatch_data, edge_pos, baseline=0, fill=False, color= col.black , alpha=0.3, label = 'no data', hatch='//') - -plt.legend(ncol= 3, bbox_to_anchor=(0, -0.4), loc=2) - -#ax1.xaxis.set_ticks_position('top') -#ax1.xaxis.set_label_position('top') -ax0.set_ylabel('Variance (m$^2$)') -ax0.set_ylim(0, y_max) -ax0.tick_params( bottom=True, labelbottom=False) - - -ax1.set_ylabel('Variance (m$^2$)') -ax1.set_ylim(0, y_max *1.8/3) - -ax1.set_xlim(edge_pos[0], edge_pos[-4]-2) -ax0.set_xlim(edge_pos[0], edge_pos[-4]-2) - - - -ax1.set_xlabel('Distance from Ice Edge (km)') - -F.save_light(path= plot_path, name='B07_explvar_'+ID_name) -F.save_pup(path= plot_path , name='B07_explvar_'+ID_name) - -# %% diff --git a/analyis_publish/SB02_directional_distortion.py b/analyis_publish/SB02_directional_distortion.py deleted file mode 100644 index 27a7c8a3..00000000 --- a/analyis_publish/SB02_directional_distortion.py +++ /dev/null @@ -1,140 +0,0 @@ - -import os, sys -#execfile(os.environ['PYTHONSTARTUP']) - -""" -This file open a ICEsat2 track applied filters and corections and returns smoothed photon heights on a regular grid in an .nc file. -This is python 3 -""" - -exec(open(os.environ['PYTHONSTARTUP']).read()) -exec(open(STARTUP_2021_IceSAT2).read()) -#%matplotlib inline - -import ICEsat2_SI_tools.convert_GPS_time as cGPS -import h5py -import ICEsat2_SI_tools.io as io -import ICEsat2_SI_tools.spectral_estimates as spec - -import imp -import copy -import spicke_remover -import datetime - - -# %% - -f = np.arange(1/1000, 1/5, 1/500) -import JONSWAP_gamma as spectal_models - - - -plot_path = mconfig['paths']['plot'] + '/explanetory_figures/' - -col.colormaps2(21) -font_for_pres() -F = M.figure_axis_xy(5,3, view_scale= 0.7) - -U= 20 # results are incensitive to U -f_max = 1/20 -gamma= 1 -Jswap= spectal_models.JONSWAP_default_alt(f, f_max, 20 , gamma=gamma) - -import itertools - -clist = itertools.cycle([col.cascade1, col.rascade1, col.cascade2, col.rascade2]) - -#plt.plot(1/f, Jswap, 'k', label='true', zorder=12, linewidth = 0.5) -for f_max in np.arange(1/25, 1/10, 1/100): -#for U in np.arange(1, 30, 10): - - fold= f_max - cc = next(clist) - for alpha in np.arange(-82.5, 85, 7.5): - #alpha = 80 - f_prime= f * np.sqrt( np.cos(np.pi *alpha/ 180) ) - Jswap= spectal_models.JONSWAP_default_alt(f, f_max, U , gamma=gamma) - - # f_max_prime = f_max * np.sqrt( np.cos(np.pi *alpha/ 180) ) - # - # Jswap_prime= spectal_models.JONSWAP_default_alt(f_prime, f_max, 20 , gamma=gamma) - - if alpha == 0: - lstring= '$T_p$=' + str(np.round(1/f_max, 1)) +'s' - plt.plot(1/f_prime, Jswap, c=cc, label=lstring, linewidth = 1) - else: - plt.plot(1/f_prime, Jswap, c=cc, linewidth = 0.5) - -#F.ax.set_yscale('log') -plt.title('Spectral Distortion with oberservation angle ($\\alpha \pm85 ^\circ$)') -F.ax.set_xscale('log') -plt.xlabel("observed Period (T')") -plt.xlim(0, 250) -plt.legend() - -# %% - - -f = np.arange(1/1000, 1/5, 1/800) -import JONSWAP_gamma as spectal_models - - -col.colormaps2(21) -font_for_pres() -F = M.figure_axis_xy(5,3, view_scale= 0.7) - -U= 20 # results are incensitive to U -f_max = 1/20 -gamma= 1 -Jswap= spectal_models.JONSWAP_default_alt(f, f_max, 20 , gamma=gamma) - -import itertools -col.colormaps2(21) - -clist = itertools.cycle([col.cascade1, col.rascade1, col.cascade2, col.rascade2]) -clist = itertools.cycle(col.greyredorange(np.linspace(0, 10))) - - - -#plt.plot(1/f, Jswap, 'k', label='true', zorder=12, linewidth = 0.5) -#for U in np.arange(1, 30, 10): -#for f_max in np.arange(1/25, 1/10, 1/80): -for T_max in np.arange(8, 18, 2): - - f_max = 1/T_max - fold= f_max - cc = next(clist) - - for alpha in np.insert(np.arange(0, 85, 10), 9 , 85 ): - #alpha = 80 - - f_prime= f * np.sqrt( np.cos(np.pi *alpha/ 180) ) - Jswap= spectal_models.JONSWAP_default_alt(f, f_max, U , gamma=gamma) - - k_prime = (2 * np.pi * f_prime)**2 / 9.81 - lambda_prime = 9.81 / (2 * np.pi * f_prime**2 ) - - # f_max_prime = f_max * np.sqrt( np.cos(np.pi *alpha/ 180) ) - # - # Jswap_prime= spectal_models.JONSWAP_default_alt(f_prime, f_max, 20 , gamma=gamma) - - if alpha == 0: - lstring= '$T_p$=' + str(T_max) +'s' - plt.plot(lambda_prime, Jswap, c=cc, label=lstring, linewidth = 2) - else: - plt.plot(lambda_prime, Jswap, c=cc, linewidth = 0.6) - - if (T_max == 16) & (alpha == 0 or alpha == 85 or alpha == 60 or alpha == 80) : - plt.text(lambda_prime[Jswap.argmax()], Jswap.max(), ' ' +str(alpha)+'$^\circ$', ha= 'center', va= 'bottom') - - -#F.ax.set_yscale('log') -plt.title('Spectral distortion of the oberserved wave spectra\n($\\theta=0$ to $\pm85^\circ$)', loc='left') -F.ax.set_xscale('log') -plt.xlabel("Observed wavelength ($\lambda'$)") -plt.ylabel("Amplitude ($m^2/k$)") - -plt.xlim(5e1, 4e4) -plt.legend() - -F.save_pup(path = plot_path, name= 'wavespectra_distortion') diff --git a/analyis_publish/SB05_2d_wavefield_emulator_puplish.py b/analyis_publish/SB05_2d_wavefield_emulator_puplish.py deleted file mode 100644 index fdaa66d3..00000000 --- a/analyis_publish/SB05_2d_wavefield_emulator_puplish.py +++ /dev/null @@ -1,375 +0,0 @@ - -import os, sys -#execfile(os.environ['PYTHONSTARTUP']) - -""" -This file open a ICEsat2 track applied filters and corections and returns smoothed photon heights on a regular grid in an .nc file. -This is python 3 -""" -if __name__ == '__main__': - exec(open(os.environ['PYTHONSTARTUP']).read()) - exec(open(STARTUP_2021_IceSAT2).read()) - #%matplotlib inline - - import ICEsat2_SI_tools.convert_GPS_time as cGPS - import h5py - import ICEsat2_SI_tools.io as io - import ICEsat2_SI_tools.spectral_estimates as spec - - import imp - import copy - import spicke_remover - import datetime - import concurrent.futures as futures - #import s3fs - # %% - track_name, batch_key, test_flag = io.init_from_input(sys.argv) # loads standard experiment - #track_name, batch_key, test_flag = '20190605061807_10380310_004_01', 'SH_batch01', False - #track_name, batch_key, test_flag = '20190601094826_09790312_004_01', 'SH_batch01', False - #track_name, batch_key, test_flag = '20190207111114_06260210_004_01', 'SH_batch02', False - track_name, batch_key, test_flag = '20190219073735_08070210_004_01', 'SH_batch02', False - - - - - #print(track_name, batch_key, test_flag) - hemis, batch = batch_key.split('_') - #track_name= '20190605061807_10380310_004_01' - ATlevel= 'ATL03' - - save_path = mconfig['paths']['work'] + '/B03_spectra_'+hemis+'/' - save_name = 'B03_'+track_name - - #plot_path = mconfig['paths']['plot'] + '/'+hemis+'/'+batch_key+'/' + track_name + '/B_spectra/' - plot_path = mconfig['paths']['plot'] + '/phase_fitting_fake/2D_fake/' - MT.mkdirs_r(plot_path) - MT.mkdirs_r(save_path) - bad_track_path =mconfig['paths']['work'] +'bad_tracks/'+ batch_key+'/' -# %% -# if __name__ == '__main__': -# all_beams = mconfig['beams']['all_beams'] -# high_beams = mconfig['beams']['high_beams'] -# low_beams = mconfig['beams']['low_beams'] -# #Gfilt = io.load_pandas_table_dict(track_name + '_B01_regridded', load_path) # rhis is the rar photon data -# -# load_path = mconfig['paths']['work'] +'/B01_regrid_'+hemis+'/' -# Gd = io.load_pandas_table_dict(track_name + '_B01_binned' , load_path) # -# load_path = mconfig['paths']['work'] + '/B02_spectra_'+hemis+'/' -# Gpars = io.load_pandas_table_dict('B02_'+ track_name + '_params' , load_path) # -# Gspec = xr.open_dataset(load_path + 'B02_'+ track_name + '_LS.nc' ) # -# Gspec['Y_model_hat'] = Gspec.Y_model_hat_real + Gspec.Y_model_hat_imag *1j -# Gspec = Gspec.drop('Y_model_hat_real').drop('Y_model_hat_imag') -# -# dk = Gspec.k.diff('k').mean().data -# Lpoints = Gspec.Lpoints -# -# Gspec = Gspec.sel(k = slice(0.000125, 0.025)).isel(x =slice(4, 30)) -# -# Gspec.coords['f'] = (('k'), np.sqrt(Gspec.k.data * 9.81)/ 2/np.pi ) -# Gspec.coords['T'] = 1/Gspec.coords['f'] -# Gspec=Gspec.swap_dims({'k': 'f'}) -# -# #Gspec.spectral_power_optm.sel(beam='weighted_mean').plot() -# # %% -# #k_lim= 0.02 -# A, B = Gspec.sel(beam= 'gt2r').Y_model_hat , Gspec.sel(beam= 'gt2l').Y_model_hat -# -# r_ave_kargs={'x':2, 'f':10, 'center':True, 'min_periods':2} -# r_ave_kargs2={'f':10, 'center':True, 'min_periods':2} -# #(abs(B) - abs(A)).plot() -# S_aa = (A*A.conj()).real -# S_bb = (B*B.conj()).real -# # co_spec = (A.conj() *B) /S_aa/S_bb -# # abs(co_spec).plot() -# co_spec = (A.conj() *B).rolling(**r_ave_kargs).mean() -# np.log(abs(co_spec)).plot(levels=np.arange(-2, 3, 0.1)) -# -# (abs(co_spec)).plot(levels=np.exp(np.arange(-3, 2, 0.1))) -# -# abs(co_spec).mean('x').plot() - -# %% - - -x = np.linspace(-np.pi, np.pi, 200) - -plt.plot(x, np.cos(x /2)**1*2 ) -plt.plot(x, np.cos(x /2)**1.5*2 ) -plt.plot(x, np.cos(x /2)**2*2 ) -plt.grid() - -# %% -fp= 1/12.0 -f = np.arange(1/50, 1/5, 0.001) -beta = 2.4*(f / (0.95 *fp ) ) -bb, xx= np.meshgrid(beta, x) - -# %% -plt.contourf(x, f, (1 /np.cosh( (bb *xx) )**2).T ) - -plt.grid() - - -#(abs(co_spec)/(S_aa *S_bb).rolling(**r_ave_kargs).mean()).plot() -# -# (abs(A.conj() *B)/(S_aa *S_bb)).rolling(**r_ave_kargs).mean()[:,:].plot() - -# # %% -# if __name__ == '__main__': -# L1 = 50 -# k1 = 2* np.pi /L1 -# l1 = 2* np.pi /L1 -# -# L2 = 65 -# k2 = 2* np.pi /L2 -# -# x=np.arange(-250, 250, 0.5) -# y=np.arange(-200, 200, 0.5) -# Nx, Ny= x.size, y.size -# XX, YY = np.meshgrid(x, y) -# XX, YY = XX.reshape(XX.size), YY.reshape(YY.size) -# -# alpha = 35 -# kk, ll = np.cos(alpha * np.pi/180) * np.array([0.9*k1, k1, 1.1* k1]), np.sin(alpha * np.pi/180) * np.array([0.9* k1, 1*k1, 1.1* k1]) -# M_k, M_l = kk.size, ll.size -# #y =np.sin(k1* x) + np.sin(k2* x) -# kk_mesh, ll_mesh = np.meshgrid(kk, ll) -# kk_mesh, ll_mesh = kk_mesh.reshape(kk_mesh.size), ll_mesh.reshape(ll_mesh.size) -# G = np.cos(np.outer(XX, kk_mesh) + np.outer(YY, ll_mesh)).T# + np.sin(np.outer(XX, kk_mesh) + np.outer(YY, ll_mesh)).T -# #G = np.vstack([ np.cos(np.outer(x, k) + np.outer(y, l)).T , np.sin(np.outer(x, k) + np.outer(y, l) ).T ] ).T -# G.shape -# -# plt.contourf(x, y, G.sum(0).reshape(Ny, Nx) ) -# plt.axis('equal') - - # %% radial coordincates - -def gaus_2d(x, y, pos_tuple, sigma_g ): - #grid = ( (XX - pos_tuple[0]) * (YY - pos_tuple[1]) ) - gx = np.exp(-0.5 * (x - pos_tuple[0])**2 /sigma_g**2 ) - gy = np.exp(-0.5 * (y - pos_tuple[1])**2 /sigma_g**2 ) - return np.outer(gx , gy).T - -if __name__ == '__main__': - - k_range = np.linspace(0, 0.1, 30) - l_range = np.linspace(-0.1, .1 , 60) - kk, ll = np.meshgrid(k_range, l_range) - gaus_lk = gaus_2d( k_range, l_range, [0.02, 0.0] , 0.01) - # - # M.figure_axis_xy(4, 4, view_scale= 0.5) - # plt.contourf(k_range, l_range, gaus_lk ) - # plt.axis('equal') - -# %% - -k_0 = 0.03 -l_0 = 0 -dk = 0.01 -stancil_size =0 -def get_stancils_kl(k_0, l_0, size =1 , dk= 0.01, mesh = True): - import numpy as np - """ - size is the stancil half width. if 0 the stancil is 1, if one the stancil is 3 and so on. - - """ - if size ==0: - stancil_k = np.array(k_0) - stancil_l = np.array(l_0) - else: - stancil_k = (np.arange(-size, size +1 , 1) *dk + k_0 ) - stancil_l = (np.arange(-size, size +1 , 1) *dk + l_0 ) - - if mesh: - stancil_k_mesh, stancil_l_mesh = np.meshgrid(stancil_k, stancil_l) - else: - stancil_k_mesh, stancil_l_mesh = stancil_k, stancil_l - - return stancil_k_mesh, stancil_l_mesh - - -def get_rand_stancils(a_mean, size =1 , dk= 0.01): - import numpy as np - """ - size is here the total stancil size. - dk is the 2d std of the gaussian - - """ - if size == 1: - stancil_k = np.array(a_mean) - else: - - stancil_k = np.random.normal(a_mean, dk,size-1) - stancil_k = np.insert(stancil_k ,0, a_mean ) - return stancil_k - - -def gaus_2d_mesh(XX,YY, pos_tuple, sigma_g ): - #grid = ( (XX - pos_tuple[0]) * (YY - pos_tuple[1]) ) - import numpy as np - gx = np.exp(-0.5 * (XX - pos_tuple[0])**2 /sigma_g**2 ) - gy = np.exp(-0.5 * (YY - pos_tuple[1])**2 /sigma_g**2 ) - return (gx * gy).T - -if __name__ == '__main__': - - k_mesh, l_mesh = get_stancils_kl(k_0, l_0, size= stancil_size, dk= dk) - amp_mesh =gaus_2d_mesh( k_mesh, l_mesh , [k_0, l_0] , dk) - - - stancil_k_mesh, stancil_l_mesh = k_mesh.reshape(k_mesh.size), l_mesh.reshape(l_mesh.size) - stancil_amp_mesh = amp_mesh.reshape(amp_mesh.size) - - # plt.contourf(k_mesh, l_mesh, amp_mesh) - # plt.axis('equal') - - -# %% radial coodinates -def get_stancils_polar( amp, angle_rad, size=1, dk = 0.01, mesh = True, plot_flag = True, amp_std= None, random=True): - """ - inputs: - - amp length of peak k vector in radial coordinates - angle_rad angle of peak k vector in radians between - pi/2 to + pi/2 - size determines number of wave numbers used M = size*2+1. if 0, it returns single wave number, if 1 it returns 3 wavenumbers - dk spread between the wavenumber - mesh (True) the returns are MxM stancils (M = size*2+1), if False only the terms along the cross are used. - plot_flag plots the wavegroup in k-l space - - returns: - list of k wave numbers, list of l wave numbers, list of relative amplitudes, shape of the stancil - """ - import numpy as np - k0 = amp * np.cos(angle_rad) - l0 = amp * np.sin(angle_rad) - - if amp_std is None: - amp_std = dk - else: - amp_std = amp_std - - if random: - - k_mesh = get_rand_stancils(k0, size= size, dk= dk) - l_mesh = get_rand_stancils(l0, size= size, dk= dk) - - amp_mesh = get_rand_stancils(0, size= size, dk= 0.2) - amp_mesh= np.ones(amp_mesh.size) - abs(amp_mesh) - stancil_k_mesh, stancil_l_mesh, stancil_amp_mesh = k_mesh, l_mesh, amp_mesh - else: - - k_mesh, l_mesh = get_stancils_kl(k0, l0, size= size, dk= dk, mesh= mesh) - amp_mesh = gaus_2d_mesh( k_mesh, l_mesh , [k0, l0] , amp_std) - - stancil_k_mesh, stancil_l_mesh = k_mesh.reshape(k_mesh.size), l_mesh.reshape(l_mesh.size) - stancil_amp_mesh = amp_mesh.reshape(amp_mesh.size) - - amp_mesh = amp_mesh/amp_mesh.sum() - - - #print(k_mesh, l_mesh, amp_mesh) - if plot_flag: - import matplotlib.pyplot as plt - if size == 1: - plt.plot(k_mesh, l_mesh, '.', markersize= amp_mesh*3, color= 'black') - else: - if random: - plt.scatter(k_mesh, l_mesh, amp_mesh*3, color= 'black') - else: - plt.contour(k_mesh, l_mesh, amp_mesh, colors= 'black', linewidths= 1) - - - return stancil_k_mesh, stancil_l_mesh, stancil_amp_mesh, k_mesh.shape - - -# %% -font_for_pres() -#k_mesh.shape -#for angle in np.arange(-80, 80+20, 40): -#for phase in np.arange(0, 2*np.pi, np.pi/3): -#for k_abs in np.arange(0.01, 0.09, 0.01): - -angle =30 -amp =1 -phase = 0 -#k_abs = 0.1 -k_abs = (2* np.pi/10)**2 / 9.81 -k_abs_noise = (2* np.pi/4)**2 / 9.81 - - - -x=np.arange(-200, 200, 0.5) * 2 -y=np.arange(-200, 200, 0.5) * 2 -Nx, Ny= x.size, y.size -XX, YY = np.meshgrid(x, y) -XX, YY = XX.reshape(XX.size), YY.reshape(YY.size) - -#for dk in np.arange(0.005+0.005, 0.02, 0.002): -#for size in [4, 5, 6]: -# %% - - dk = 0.016 - size = 4 - - #for angle in np.arange(-80, 80+20, 40): - #for k_abs in np.arange(0.01, 0.09, 0.01): - #for phase in np.arange(0, 2*np.pi, np.pi/3): - - - F = M.figure_axis_xy( fig_sizes['one_column_high'][0], fig_sizes['one_column_high'][1]* 1.5, view_scale = 0.8, container =True) - #plt.suptitle('k_abs=' + str(k_abs) +' \nangle=' + str(angle) + ' \nsize=' + str(size) +' dk=' + str(dk) ) - gs = GridSpec(8,1, wspace=0.1, hspace=30.8) - ax1 = F.fig.add_subplot(gs[0:4, 0]) - plt.title('Narrow-banded waves in \nspectral space', loc= 'left') - - #ax = plt.subplot(1, 2, 1) - #k_list, l_list, amp_weights, stancil_shape = get_stancils_polar(k_abs, angle * np.pi/180, size=size, dk = dk, mesh = True , plot_flag= True, random = True) - k_list, l_list, amp_weights, stancil_shape = [0.03485149, 0.03299441, 0.03838695, 0.02980313], [0.02012152, 0.0399972 , 0.02650097, 0.01930944], [1. , 0.77879636, 0.91494225, 0.95463948],20 - plt.scatter(k_list, l_list, s = 15*np.array(amp_weights)**4, color= col.rascade1) - plt.plot(k_list[0], l_list[0], '.', markersize= 9*amp_weights[0]**4, color= col.rascade2) - #np.array(amp_weights)**2 - circle1 = plt.Circle((k_list[0], l_list[0]), dk, color=col.black,linewidth= 0.5, fill=False) - ax1.add_patch(circle1) - - kk= np.arange(0, 0.1, 0.001) - plt.contourf(kk, kk, gaus_2d(kk, kk, (k_list[0], l_list[0]), dk), np.linspace(0,1.7, 21) ,cmap= plt.cm.Greys , zorder=0) - - k_noise, l_noise, amp_noise, stancil_shape = get_stancils_polar(0.25, 0 * np.pi/180, size=20, dk = 0.6, mesh = True , plot_flag= False, random = True) - amp_noise = (amp_noise *0+1) * 0 - - plt.xlim(0, 0.1) - #plt.axis('equal') - plt.ylim(0, 0.1) - plt.xlabel('along track\nwavenumber k') - plt.ylabel('across track\nwavenumber l') - #plt.axis('equal') - - # % Derive real space model - #plt.subplot(1, 2, 2) - ax2 = F.fig.add_subplot(gs[4:, 0]) - plt.title('Real space', loc= 'left') - k_all = np.concatenate([k_list, k_noise]) - l_all = np.concatenate([l_list, l_noise]) - amp_all = np.concatenate([amp_weights, amp_noise]) - amp_all.shape - - - G = np.vstack([ np.cos(np.outer(XX, k_all) + np.outer(YY, l_all)).T , np.sin(np.outer(XX, k_all) + np.outer(YY, l_all)).T ] ).T - #G = np.vstack([ np.cos(np.outer(XX, k_noise) + np.outer(YY, l_noise)).T , np.sin(np.outer(XX, k_noise) + np.outer(YY, l_noise) ).T ] ).T - - #phase1 = np.random.rand(1, amp_list.size) * np.pi*2 - phase = np.arange(0, amp_all.size) * np.pi/1 - - #amp_all.shape - b = np.hstack([ np.cos(phase)*amp_all, np.sin(phase) *amp_all]).squeeze() * amp - z_model = (G @ b).reshape(Ny, Nx) - - ax2.axhline(-45, color= col.rels['gt2l']) - ax2.axhline(45, color= col.rels['gt2r']) - plt.pcolor(x, y, z_model, cmap =plt.cm.coolwarm ) - - plt.axis('equal') - plt.xlabel('x (m)') - plt.ylabel('y (m)') - F.save_light(path = plot_path, name = 'fake_2d_publish_dk' +str(dk) +'_s' + str(int(size))) - #plt.show()