diff --git a/conda_env.yml b/conda_env.yml index 032c792..46082c3 100644 --- a/conda_env.yml +++ b/conda_env.yml @@ -9,13 +9,14 @@ channels: - defaults dependencies: - c-compiler - - python>=3.6,<3.9 + - python>=3.9 - cartopy - cvxopt + - rasterio - dask>=1.0 - dask-jobqueue>=0.3 - defusedxml - - h5py<3 + - h5py - lxml - matplotlib - numpy @@ -24,6 +25,7 @@ dependencies: - scikit-image - scipy - gdal + - mintpy - isce2 - cython - setuptools diff --git a/docs/install b/docs/install deleted file mode 100755 index 8202477..0000000 --- a/docs/install +++ /dev/null @@ -1,11 +0,0 @@ -#! /bin/bash -cd $MINOPY_HOME/miaplpy/lib; -python setup.py -echo 'Installing snaphu...'; -cd $MIAPLPY_HOME; -wget --no-check-certificate https://web.stanford.edu/group/radar/softwareandlinks/sw/snaphu/snaphu-v2.0.4.tar.gz -tar -xvf snaphu-v2.0.4.tar.gz -mv snaphu-v2.0.4 snaphu; -rm snaphu-v2.0.4.tar.gz; -sed -i 's/\/usr\/local/$(MIAPLPY_HOME)\/snaphu/g' snaphu/src/Makefile -cd snaphu/src; make diff --git a/docs/installation.md b/docs/installation.md index 5c32a27..c16452f 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -1,78 +1,39 @@ ## Install MiaplPy -#### 1. Set the following environment variables in your source file. It could be ~/.bash_profile file for bash user or ~/.cshrc file for csh/tcsh user. - -``` -if [ -z ${PYTHONPATH+x} ]; then export PYTHONPATH=""; fi - -##--------- MintPy ------------------## -export MINTPY_HOME=~/tools/MintPy -export PYTHONPATH=${PYTHONPATH}:${MINTPY_HOME} -export PATH=${PATH}:${MINTPY_HOME}/mintpy/cli - -#---------- MiaplPy ------------------## -export MIAPLPY_HOME=~/tools/MiaplPy -export PYTHONPATH=${PYTHONPATH}:${MIAPLPY_HOME} -export PATH=${PATH}:${MIAPLPY_HOME}/miaplpy -export PATH=${PATH}:${MIAPLPY_HOME}/snaphu/bin - -``` -#### 2. Download - +#### 1. Download source code ``` cd ~/tools git clone https://github.com/insarlab/MiaplPy.git -git clone https://github.com/insarlab/MintPy.git +cd ~/tools/MiaplPy ``` -#### 3. install dependencies - -Install miniconda if you have not already done so. You may need to close and restart the shell for changes to take effect. +#### 2. Install dependencies ``` -# download and install miniconda -# use wget or curl to download in command line or click from the web brower -# MacOS users: curl https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh -o Miniconda3-latest-MacOSX-x86_64.sh -# Linux users: curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -o Miniconda3-latest-Linux-x86_64.sh - -# Mac users: -miniconda_version=Miniconda3-latest-MacOSX-x86_64.sh -# Linux users: -miniconda_version=Miniconda3-latest-Linux-x86_64.sh - -wget http://repo.continuum.io/miniconda/$miniconda_version --no-check-certificate -O $miniconda_version -chmod +x $miniconda_version -./$miniconda_version -b -p ~/tools/miniconda3 -~/tools/miniconda3/bin/conda init bash +mamba env create --file conda-env.yml ``` - -Run the following in your terminal to install the dependencies to a new environment miaplpy (recommended): +or if you have an existing environment: ``` -conda env create -f $MIAPLPY_HOME/docs/conda_env.yml -conda activate miaplpy +mamba env update --name my-existing-env --file conda-env.yml ``` -Or run the following in your terminal to install the dependencies to your custom environment, the default is base: +#### 3. Install MiaplPy via pip ``` -conda install --yes -c conda-forge --file ~/tools/MiaplPy/docs/requirements.txt +conda activate dolphin-env +python -m pip install . ``` -#### 4. Setup MiaplPy - -I. Compile -``` -cd $MIAPLPY_HOME/miaplpy/lib; -python setup.py -``` -II. Install [SNAPHU](https://web.stanford.edu/group/radar/softwareandlinks/sw/snaphu/) +#### 4. Install [SNAPHU](https://web.stanford.edu/group/radar/softwareandlinks/sw/snaphu/) ``` -cd $MIAPLPY_HOME; +export TOOLS_DIR=~/tools +cd ~/tools; wget --no-check-certificate https://web.stanford.edu/group/radar/softwareandlinks/sw/snaphu/snaphu-v2.0.5.tar.gz tar -xvf snaphu-v2.0.5.tar.gz mv snaphu-v2.0.5 snaphu; rm snaphu-v2.0.5.tar.gz; -sed -i 's/\/usr\/local/$(MIAPLPY_HOME)\/snaphu/g' snaphu/src/Makefile +sed -i 's/\/usr\/local/$(TOOLS_DIR)\/snaphu/g' snaphu/src/Makefile cd snaphu/src; make +export PATH=${TOOLS_DIR}/snaphu/bin:${PATH} ``` ### Notes diff --git a/pyproject.toml b/pyproject.toml index 1ba69c7..2c333f0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,15 +15,39 @@ license = { file = "LICENSE" } dynamic = ["version"] # , "dependencies"] +# Entry points for the command line interface +[project.scripts] +miaplpyApp = "miaplpy.miaplpyApp:main" +'check_ifgs.py' = "miaplpy.check_ifgs:main" +'correct_geolocation.py' = "miaplpy.correct_geolocation:main" +'cpxview.py' = "miaplpy.cpxview:main" +'find_short_baselines.py' = "miaplpy.find_short_baselines:find_baselines" +'generate_temporal_coherence.py' = "miaplpy.generate_temporal_coherence:main" +'generate_unwrap_mask.py' = "miaplpy.generate_unwrap_mask:main" +'load_ifgram.py' = "miaplpy.load_ifgram:main" +'load_slc_geometry.py' = "miaplpy.load_slc_geometry:main" +'network_inversion.py' = "miaplpy.network_inversion:main" +'phase_linking.py' = "miaplpy.phase_linking:main" +'prep_slc_isce.py' = "miaplpy.prep_slc_isce:main" +'scatterview.py' = "miaplpy.scatterview:main" +'simulation.py' = "miaplpy.simulation:simulate_and_calculate_different_method_rms" +'tcoh_view.py' = "miaplpy.tcoh_view:main" +'generate_ifgram.py' = "miaplpy.generate_ifgram:main" +'unwrap_ifgram.py' = "miaplpy.unwrap_ifgram:main" + [tool.setuptools.packages.find] where = ["src"] +#include = ["defaults/*.cfg", "defaults/*.full"] + +[tool.setuptools.package-data] +myModule = ["defaults/*.cfg", "defaults/*.full"] [tool.setuptools.dynamic] -# dependencies = { file = ["requirements.txt"] } +#dependencies = { file = ["requirements.txt"] } version = {attr = "miaplpy.__version__"} [project.urls] Homepage = "https://github.com/insarlab/MiaplPy/" -"Bug Tracker" = "https://github.com/insarlab/MiaplPy/issues" \ No newline at end of file +"Bug Tracker" = "https://github.com/insarlab/MiaplPy/issues" diff --git a/src/miaplpy/__main__.py b/src/miaplpy/__main__.py new file mode 100644 index 0000000..c0f3c85 --- /dev/null +++ b/src/miaplpy/__main__.py @@ -0,0 +1,8 @@ +"""Main module to provide command line interface to the workflows.""" +import sys + +from .miaplpyApp import main + +# https://docs.python.org/3/library/__main__.html#packaging-considerations +# allows `python -m miaplpyApp` to work +sys.exit(main()) diff --git a/src/miaplpy/generate_ifgram.py b/src/miaplpy/generate_ifgram.py index 35db28b..bab5716 100755 --- a/src/miaplpy/generate_ifgram.py +++ b/src/miaplpy/generate_ifgram.py @@ -16,14 +16,21 @@ def enablePrint(): blockPrint() import datetime -import isceobj import numpy as np from miaplpy.objects.arg_parser import MiaplPyParser import h5py from math import sqrt, exp +from osgeo import gdal +import dask.array as da +from pyproj import CRS enablePrint() +DEFAULT_ENVI_OPTIONS = ( + "INTERLEAVE=BIL", + "SUFFIX=ADD" + ) + def main(iargs=None): """ @@ -47,228 +54,127 @@ def main(iargs=None): print(inps.out_dir) os.makedirs(inps.out_dir, exist_ok=True) - resampName = inps.out_dir + '/fine' - resampInt = resampName + '.int' - filtInt = os.path.dirname(resampInt) + '/filt_fine.int' - cor_file = os.path.dirname(resampInt) + '/filt_fine.cor' - - if os.path.exists(cor_file + '.xml'): - return + ifg_file = inps.out_dir + "/filt_fine.int" + cor_file = inps.out_dir + "/filt_fine.cor" - length, width = run_interferogram(inps, resampName) + run_inreferogram(inps, ifg_file) - filter_strength = inps.filter_strength - runFilter(resampInt, filtInt, filter_strength) - - estCoherence(filtInt, cor_file) - #run_interpolation(filtInt, inps.stack_file, length, width) + window_size = (6, 12) + estimate_correlation(ifg_file, cor_file, window_size) return -def run_interferogram(inps, resampName): - if inps.azlooks * inps.rglooks > 1: - extention = '.ml.slc' - else: - extention = '.slc' +def run_inreferogram(inps, ifg_file): + + if os.path.exists(ifg_file): + return with h5py.File(inps.stack_file, 'r') as ds: date_list = np.array([x.decode('UTF-8') for x in ds['date'][:]]) - ref_ind = np.where(date_list==inps.reference)[0] - sec_ind = np.where(date_list==inps.secondary)[0] + ref_ind = np.where(date_list == inps.reference)[0] + sec_ind = np.where(date_list == inps.secondary)[0] phase_series = ds['phase'] - amplitudes = ds['amplitude'] length = phase_series.shape[1] width = phase_series.shape[2] - resampInt = resampName + '.int' + box_size = 3000 - intImage = isceobj.createIntImage() - intImage.setFilename(resampInt) - intImage.setAccessMode('write') - intImage.setWidth(width) - intImage.setLength(length) - intImage.createImage() + dtype = gdal.GDT_CFloat32 + driver = gdal.GetDriverByName('ENVI') + out_raster = driver.Create(ifg_file, width, length, 1, dtype, DEFAULT_ENVI_OPTIONS) + band = out_raster.GetRasterBand(1) - out_ifg = intImage.asMemMap(resampInt) - box_size = 3000 - num_row = int(np.ceil(length / box_size)) - num_col = int(np.ceil(width / box_size)) - for i in range(num_row): - for k in range(num_col): - row_1 = i * box_size - row_2 = i * box_size + box_size - col_1 = k * box_size - col_2 = k * box_size + box_size - if row_2 > length: - row_2 = length - if col_2 > width: - col_2 = width - - ref_phase = phase_series[ref_ind, row_1:row_2, col_1:col_2].reshape(row_2 - row_1, col_2 - col_1) - sec_phase = phase_series[sec_ind, row_1:row_2, col_1:col_2].reshape(row_2 - row_1, col_2 - col_1) - ref_amplitude = amplitudes[ref_ind, row_1:row_2, col_1:col_2].reshape(row_2 - row_1, col_2 - col_1) - sec_amplitude = amplitudes[sec_ind, row_1:row_2, col_1:col_2].reshape(row_2 - row_1, col_2 - col_1) - - ifg = (ref_amplitude * sec_amplitude) * np.exp(1j * (ref_phase - sec_phase)) - - out_ifg[row_1:row_2, col_1:col_2, 0] = ifg[:, :] - - intImage.renderHdr() - intImage.finalizeImage() - - return length, width - - - -def runFilter(infile, outfile, filterStrength): - from mroipac.filter.Filter import Filter - - # Initialize the flattened interferogram - intImage = isceobj.createIntImage() - intImage.load( infile + '.xml') - intImage.setAccessMode('read') - intImage.createImage() - - # Create the filtered interferogram - filtImage = isceobj.createIntImage() - filtImage.setFilename(outfile) - filtImage.setWidth(intImage.getWidth()) - filtImage.setAccessMode('write') - filtImage.createImage() - - objFilter = Filter() - objFilter.wireInputPort(name='interferogram',object=intImage) - objFilter.wireOutputPort(name='filtered interferogram', object=filtImage) - objFilter.goldsteinWerner(alpha=filterStrength) - - intImage.finalizeImage() - filtImage.finalizeImage() - -def runFilterG(infile, outfile, filterStrength): - - # Initialize the flattened interferogram - intImage = isceobj.createIntImage() - intImage.load(infile + '.xml') - intImage.setAccessMode('read') - intImage.createImage() - - # Create the filtered interferogram - filtImage = isceobj.createIntImage() - filtImage.setFilename(outfile) - filtImage.setWidth(intImage.getWidth()) - filtImage.setLength(intImage.getLength()) - filtImage.setAccessMode('write') - filtImage.createImage() - - img = intImage.memMap(mode='r', band=0) - original = np.fft.fft2(img[:, :]) - center = np.fft.fftshift(original) - LowPassCenter = center * gaussianLP(100, img.shape) - LowPass = np.fft.ifftshift(LowPassCenter) - inverse_LowPass = np.fft.ifft2(LowPass) - - out_filtered = filtImage.asMemMap(outfile) - out_filtered[:, :, 0] = inverse_LowPass[:, :] - - filtImage.renderHdr() - - intImage.finalizeImage() - filtImage.finalizeImage() - - -def estCoherence(outfile, corfile): - from mroipac.icu.Icu import Icu - - # Create phase sigma correlation file here - filtImage = isceobj.createIntImage() - filtImage.load(outfile + '.xml') - filtImage.setAccessMode('read') - filtImage.createImage() - - phsigImage = isceobj.createImage() - phsigImage.dataType = 'FLOAT' - phsigImage.bands = 1 - phsigImage.setWidth(filtImage.getWidth()) - phsigImage.setFilename(corfile) - phsigImage.setAccessMode('write') - phsigImage.createImage() - - icuObj = Icu(name='sentinel_filter_icu') - icuObj.configure() - icuObj.unwrappingFlag = False - icuObj.useAmplitudeFlag = False - # icuObj.correlationType = 'NOSLOPE' + for i in range(0, length, box_size): + for j in range(0, width, box_size): + ref_phase = phase_series[ref_ind, i:i+box_size, j:j+box_size].squeeze() + sec_phase = phase_series[sec_ind, i:i+box_size, j:j+box_size].squeeze() - icuObj.icu(intImage=filtImage, phsigImage=phsigImage) - phsigImage.renderHdr() + ifg = np.exp(1j * np.angle(np.exp(1j * ref_phase) * np.exp(-1j * sec_phase))) + band.WriteArray(ifg, j, i) - filtImage.finalizeImage() - phsigImage.finalizeImage() + band.SetNoDataValue(np.nan) + out_raster.FlushCache() + out_raster = None -def run_interpolation(filtifg, tcoh_file, length, width): - from scipy.spatial import Delaunay - from scipy.interpolate import LinearNDInterpolator + write_projection(inps.stack_file, ifg_file) - ifg = np.memmap(filtifg, dtype=np.complex64, mode='r+', shape=(length, width)) - with h5py.File(tcoh_file, 'r') as ds: - tcoh_ds = ds['temporalCoherence'] + return - box_size = 3000 - num_row = int(np.ceil(length / box_size)) - num_col = int(np.ceil(width / box_size)) - for i in range(num_row): - for k in range(num_col): - row_1 = i * box_size - row_2 = i * box_size + box_size - col_1 = k * box_size - col_2 = k * box_size + box_size - if row_2 > length: - row_2 = length - if col_2 > width: - col_2 = width - - ifg_sub = ifg[row_1:row_2, col_1:col_2] - tcoh = tcoh_ds[0, row_1:row_2, col_1:col_2] - mask = np.zeros((tcoh.shape[0], tcoh.shape[1])) - mask[tcoh >= 0.5] = 1 - - y, x = np.where(mask == 1) - yi, xi = np.where(mask == 0) - zifg = np.angle(ifg_sub[y, x]) - points = np.array([[r, c] for r, c in zip(y, x)]) - tri = Delaunay(points) - func = LinearNDInterpolator(tri, zifg, fill_value=0) - interp_points = np.array([[r, c] for r, c in zip(yi.flatten(), xi.flatten())]) - res = np.exp(1j * func(interp_points)) * (np.abs(ifg_sub[yi, xi]).flatten()) - ifg[y + row_1, x + col_1] = ifg_sub[y, x] - ifg[yi + row_1, xi + col_1] = res - - del ifg + +def write_projection(src_file, dst_file) -> None: + if src_file.endswith('.h5'): + with h5py.File(src_file, 'r') as ds: + attrs = dict(ds.attrs) + if 'spatial_ref' in attrs.keys(): + projection = attrs['spatial_ref'][3:-1] + geotransform = [attrs['X_FIRST'], attrs['X_STEP'], 0, attrs['Y_FIRST'], 0, attrs['Y_STEP']] + geotransform = [float(x) for x in geotransform] + nodata = np.nan + else: + geotransform = [0, 1, 0, 0, 0, -1] + projection = CRS.from_epsg(4326).to_wkt() + nodata = np.nan + else: + ds_src = gdal.Open(src_file, gdal.GA_Update) + projection = ds_src.GetProjection() + geotransform = ds_src.GetGeoTransform() + nodata = ds_src.GetRasterBand(1).GetNoDataValue() + + ds_dst = gdal.Open(dst_file, gdal.GA_Update) + ds_dst.SetGeoTransform(geotransform) + ds_dst.SetProjection(projection) + ds_dst.GetRasterBand(1).SetNoDataValue(nodata) + ds_src = ds_dst = None return -def distance(point1,point2): - return sqrt((point1[0]-point2[0])**2 + (point1[1]-point2[1])**2) - -def gaussianLP(D0,imgShape): - base = np.zeros(imgShape[:2]) - rows, cols = imgShape[:2] - center = (rows/2,cols/2) - for x in range(cols): - for y in range(rows): - base[y,x] = exp(((-distance((y,x),center)**2)/(2*(D0**2)))) - return base - -def gaussianHP(D0,imgShape): - base = np.zeros(imgShape[:2]) - rows, cols = imgShape[:2] - center = (rows/2,cols/2) - for x in range(cols): - for y in range(rows): - base[y,x] = 1 - exp(((-distance((y,x),center)**2)/(2*(D0**2)))) - return base +def estimate_correlation(ifg_file, cor_file, window_size): + if os.path.exists(cor_file): + return + ds = gdal.Open(ifg_file) + phase = ds.GetRasterBand(1).ReadAsArray() + length, width = ds.RasterYSize, ds.RasterXSize + nan_mask = np.isnan(phase) + zero_mask = np.angle(phase) == 0 + image = np.exp(1j * np.nan_to_num(np.angle(phase))) + + col_size, row_size = window_size + row_pad = row_size // 2 + col_pad = col_size // 2 + + image_padded = np.pad( + image, ((row_pad, row_pad), (col_pad, col_pad)), mode="constant" + ) + + integral_img = np.cumsum(np.cumsum(image_padded, axis=0), axis=1) + + window_mean = ( + integral_img[row_size:, col_size:] + - integral_img[:-row_size, col_size:] + - integral_img[row_size:, :-col_size] + + integral_img[:-row_size, :-col_size] + ) + window_mean /= row_size * col_size + + cor = np.clip(np.abs(window_mean), 0, 1) + cor[nan_mask] = np.nan + cor[zero_mask] = 0 + + dtype = gdal.GDT_Float32 + driver = gdal.GetDriverByName('ENVI') + out_raster = driver.Create(cor_file, width, length, 1, dtype, DEFAULT_ENVI_OPTIONS) + band = out_raster.GetRasterBand(1) + band.WriteArray(cor, 0, 0) + band.SetNoDataValue(np.nan) + out_raster.FlushCache() + out_raster = None + + write_projection(ifg_file, cor_file) + + return + if __name__ == '__main__': main() diff --git a/src/miaplpy/lib/invert.pxd b/src/miaplpy/lib/invert.pxd index e27dff2..294205f 100755 --- a/src/miaplpy/lib/invert.pxd +++ b/src/miaplpy/lib/invert.pxd @@ -2,10 +2,9 @@ cimport numpy as cnp cimport cython -cdef void write_wrapped(list, bytes, int, int, bytes, bytes) cdef void write_hdf5_block_3D(object, float[:, :, ::1], bytes, list) cdef void write_hdf5_block_2D_int(object, int[:, ::1], bytes, list) - +cdef void write_hdf5_block_2D_float(object, float[:, ::1], bytes, list) cdef class CPhaseLink: cdef object inps, slcStackObj diff --git a/src/miaplpy/lib/invert.pyx b/src/miaplpy/lib/invert.pyx index 25fee84..f296492 100755 --- a/src/miaplpy/lib/invert.pyx +++ b/src/miaplpy/lib/invert.pyx @@ -8,37 +8,25 @@ from libc.stdio cimport printf from miaplpy.objects.slcStack import slcStack import h5py import time -import isce2 -from isceobj.Util.ImageUtil import ImageLib as IML - +from datetime import datetime +from osgeo import gdal, gdal_array +from pyproj import CRS +from miaplpy.objects.crop_geo import create_grid_mapping, create_tyx_dsets from . cimport utils -from .utils import process_patch_c - - -cdef void write_wrapped(list date_list, bytes out_dir, int width, int length, bytes RSLCfile, bytes date): - - cdef int d = date_list.index(date.decode('UTF-8')) - cdef bytes out_name, wrap_date - cdef object fhandle - cdef float complex[:, :, ::1] out_rslc - - printf('write wrapped_phase {}'.format(date.decode('UTF-8'))) - wrap_date = os.path.join(out_dir, b'wrapped_phase', date) - os.makedirs(wrap_date.decode('UTF-8'), exist_ok=True) - out_name = os.path.join(wrap_date, date + b'.slc') - - if not os.path.exists(out_name.decode('UTF-8')): - fhandle = h5py.File(RSLCfile.decode('UTF-8'), 'r') - out_rslc = np.memmap(out_name, dtype='complex64', mode='w+', shape=(length, width)) - out_rslc[:, :] = fhandle['slc'][d, :, :] - fhandle.close() - IML.renderISCEXML(out_name.decode('UTF-8'), bands=1, nyy=length, nxx=width, datatype='complex64', - scheme='BSQ') - else: - IML.renderISCEXML(out_name.decode('UTF-8'), bands=1, nyy=length, nxx=width, datatype='complex64', - scheme='BSQ') - return - +from.utils import process_patch_c + +DEFAULT_TILE_SIZE = [128, 128] +DEFAULT_TIFF_OPTIONS = ( + "COMPRESS=DEFLATE", + "ZLEVEL=4", + "TILED=YES", + f"BLOCKXSIZE={DEFAULT_TILE_SIZE[1]}", + f"BLOCKYSIZE={DEFAULT_TILE_SIZE[0]}", +) +DEFAULT_ENVI_OPTIONS = ( + "INTERLEAVE=BIL", + "SUFFIX=ADD" +) cdef void write_hdf5_block_3D(object fhandle, float[:, :, ::1] data, bytes datasetName, list block): @@ -50,6 +38,11 @@ cdef void write_hdf5_block_2D_int(object fhandle, int[:, ::1] data, bytes datase fhandle[datasetName.decode('UTF-8')][block[0]:block[1], block[2]:block[3]] = data return +cdef void write_hdf5_block_2D_float(object fhandle, float[:, ::1] data, bytes datasetName, list block): + + fhandle[datasetName.decode('UTF-8')][block[0]:block[1], block[2]:block[3]] = data + return + cdef class CPhaseLink: @@ -66,11 +59,12 @@ cdef class CPhaseLink: self.patch_size = np.int32(inps.patch_size) self.ps_shp = np.int32(inps.ps_shp) self.out_dir = self.work_dir + b'/inverted' + #self.num_archived = np.int32(inps.num_archived) os.makedirs(self.out_dir.decode('UTF-8'), exist_ok='True') - self.slcStackObj = slcStack(inps.slc_stack) self.metadata = self.slcStackObj.get_metadata() - self.all_date_list = self.slcStackObj.get_date_list() + #self.all_date_list = self.slcStackObj.get_date_list() + self.all_date_list = self.get_dates(inps.slc_stack) with h5py.File(inps.slc_stack, 'r') as f: self.prep_baselines = f['bperp'][:] self.n_image, self.length, self.width = self.slcStackObj.get_size() @@ -100,15 +94,50 @@ cdef class CPhaseLink: self.window_for_shp() + #self.RSLCfile = self.out_dir + b'/phase_series.h5' self.RSLCfile = self.out_dir + b'/phase_series.h5' - if b'sequential' == self.phase_linking_method[0:10]: self.sequential = True else: self.sequential = False return + def get_projection(self, slc_stack): + cdef object ds, gt + cdef str projection + cdef dict attrs + cdef list geotransform #, extent + with h5py.File(slc_stack, 'r') as ds: + attrs = dict(ds.attrs) + if 'spatial_ref' in attrs.keys(): + projection = attrs['spatial_ref'][3:-1] + geotransform = [attrs['X_FIRST'], attrs['X_STEP'], 0, attrs['Y_FIRST'], 0, attrs['Y_STEP']] + geotransform = [float(x) for x in geotransform] + else: + geotransform = [0, 1, 0, 0, 0, -1] + projection = CRS.from_epsg(4326).to_wkt() + + return projection, geotransform + + def get_dates(self, slc_stack): + cdef object ds, ff, ft + cdef list dates + cdef double[::1] tt + cdef double st + with h5py.File(slc_stack) as ds: + if 'date' in ds.keys(): + dates = list(ds['date'][()]) + return dates + else: + tt = ds['time'][()] + ff = datetime.strptime(ds['time'].attrs['units'].split('seconds since ')[1], '%Y-%m-%d %H:%M:%S.%f') + ft = datetime.strptime('19691231-16', '%Y%m%d-%H') + st = (ff - ft).total_seconds() + dates = [datetime.fromtimestamp(t+st) for t in tt] + return [t.strftime('%Y%m%d') for t in dates] + + def patch_slice(self): """ Slice the image into patches of size patch_size @@ -158,7 +187,6 @@ cdef class CPhaseLink: return - def initiate_output(self): cdef object RSLC, psf @@ -188,7 +216,20 @@ cdef class CPhaseLink: maxshape=(None, self.length, self.width), chunks=True, dtype=np.float32) - + ''' + if b'real_time' == self.phase_linking_method[0:9]: + RSLC.create_dataset('phase_seq', + shape=(self.n_image, self.length, self.width), + maxshape=(None, self.length, self.width), + chunks=True, + dtype=np.float32) + + RSLC.create_dataset('amplitude_seq', + shape=(self.n_image, self.length, self.width), + maxshape=(None, self.length, self.width), + chunks=True, + dtype=np.float32) + ''' RSLC.create_dataset('shp', shape=(self.length, self.width), maxshape=(self.length, self.width), @@ -238,27 +279,17 @@ cdef class CPhaseLink: def get_datakwargs(self): - cdef dict data_kwargs = { - "range_window" : self.range_window, - "azimuth_window" : self.azimuth_window, - "width" : self.width, - "length" : self.length, - "n_image" : self.n_image, - "slcStackObj" : self.slcStackObj, - "distance_threshold" : self.distance_thresh, - "def_sample_rows" : np.array(self.sample_rows), - "def_sample_cols" : np.array(self.sample_cols), - "reference_row" : self.reference_row, - "reference_col" : self.reference_col, - "phase_linking_method" : self.phase_linking_method, - "total_num_mini_stacks" : self.total_num_mini_stacks, - "default_mini_stack_size" : self.mini_stack_default_size, - 'ps_shp': self.ps_shp, - "shp_test": self.shp_test, - "out_dir": self.out_dir, - "time_lag": self.time_lag, - "mask_file": self.mask_file, - } + cdef dict data_kwargs = dict(range_window=self.range_window, azimuth_window=self.azimuth_window, + width=self.width, length=self.length, n_image=self.n_image, + slcStackObj=self.slcStackObj, distance_threshold=self.distance_thresh, + def_sample_rows=np.array(self.sample_rows), + def_sample_cols=np.array(self.sample_cols), reference_row=self.reference_row, + reference_col=self.reference_col, phase_linking_method=self.phase_linking_method, + total_num_mini_stacks=self.total_num_mini_stacks, + default_mini_stack_size=self.mini_stack_default_size, ps_shp=self.ps_shp, + shp_test=self.shp_test, out_dir=self.out_dir, time_lag=self.time_lag, + mask_file=self.mask_file) + #"num_archived": self.num_archived, return data_kwargs @@ -278,15 +309,94 @@ cdef class CPhaseLink: print('time used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s)) return + def set_projection_hdf(self, object fhandle, object projection, list geotransform): + # cdef object grp + + create_grid_mapping(group=fhandle, crs=projection, gt=list(geotransform)) + + #if "georeference" in fhandle: + # grp = fhandle["georeference"] + #else: + # grp = fhandle.create_group("georeference") + + #if not "transform" in grp.keys(): + # grp.create_dataset("transform", data=geotransform, dtype=np.float32) + #grp.attrs["crs"] = projection + #grp.attrs["extent"] = crop_extent + #grp.attrs["pixel_size_x"] = self.pixel_width + #grp.attrs["pixel_size_y"] = self.pixel_height + return + + def set_projection_gdal1_int(self, cnp.ndarray[int, ndim=2] data, int bands, bytes output, str description, + str projection, list geotransform): + cdef object driver, dataset, band1, target_crs + driver = gdal.GetDriverByName('ENVI') + dataset = driver.Create(output, self.width, self.length, 1, gdal.GDT_Int16, DEFAULT_ENVI_OPTIONS) + dataset.SetGeoTransform(list(geotransform)) + dataset.SetProjection(projection) #.to_wkt()) #target_crs.ExportToWkt()) + band1 = dataset.GetRasterBand(1) + band1.SetDescription(description) + gdal_array.BandWriteArray(band1, data, xoff=0, yoff=0) + # band1.WriteArray(data, xoff=0, yoff=0) + band1.SetNoDataValue(np.nan) + + dataset.FlushCache() + dataset = None + + return + + def set_projection_gdal1(self, cnp.ndarray[float, ndim=2] data, int bands, bytes output, str description, + str projection, list geotransform): + cdef object driver, dataset, band1, target_crs + driver = gdal.GetDriverByName('ENVI') + dataset = driver.Create(output, self.width, self.length, 1, gdal.GDT_Float32, DEFAULT_ENVI_OPTIONS) + dataset.SetGeoTransform(list(geotransform)) + dataset.SetProjection(projection) #.to_wkt()) #target_crs.ExportToWkt()) + band1 = dataset.GetRasterBand(1) + band1.SetDescription(description) + gdal_array.BandWriteArray(band1, data, xoff=0, yoff=0) + # band1.WriteArray(data, xoff=0, yoff=0) + band1.SetNoDataValue(np.nan) + + dataset.FlushCache() + dataset = None + + return + + def set_projection_gdalm(self, cnp.ndarray[float, ndim=3] data, int bands, bytes output, list description, + str projection, list geotransform): + cdef int i + cdef object driver, dataset, band + driver = gdal.GetDriverByName('ENVI') + dataset = driver.Create(output, self.width, self.length, bands, gdal.GDT_Float32, DEFAULT_ENVI_OPTIONS) + + for i in range(bands): + band = dataset.GetRasterBand(i+1) + band.SetDescription(description[i]) + gdal_array.BandWriteArray(band, data[i, :, :], xoff=0, yoff=0) + #band.WriteArray(data[i, :, :], xoff=0, yoff=0) + band.SetNoDataValue(np.nan) + del band + + dataset.SetGeoTransform(list(geotransform)) + dataset.SetProjection(projection) #.to_wkt()) + dataset.FlushCache() + dataset = None + + return + def unpatch(self): cdef list block cdef object fhandle, psf cdef int index, box_length, box_width cdef cnp.ndarray[int, ndim=1] box cdef bytes patch_dir - cdef float complex[:, :, ::1] rslc_ref + cdef str projection + cdef float complex[:, :, ::1] rslc_ref, rslc_ref_seq cdef cnp.ndarray[float, ndim=3] temp_coh, ps_prod, eig_values = np.zeros((3, self.length, self.width), dtype=np.float32) cdef cnp.ndarray[float, ndim=2] amp_disp = np.zeros((self.length, self.width), dtype=np.float32) + #cdef cnp.ndarray[int, ndim=2] reference_index_map = np.zeros((self.length, self.width), dtype=np.int32) + cdef list geotransform if os.path.exists(self.RSLCfile.decode('UTF-8')): print('Deleting old phase_series.h5 ...') @@ -300,8 +410,13 @@ cdef class CPhaseLink: print('Concatenate and write wrapped phase time series to HDF5 file phase_series.h5 ') print('open HDF5 file phase_series.h5 in a mode') + dask_chunks = (1, 128 * 10, 128 * 10) + projection, geotransform = self.get_projection(self.inps.slc_stack) + with h5py.File(self.RSLCfile.decode('UTF-8'), 'a') as fhandle: - + #create_grid_mapping(group=fhandle, crs=projection, gt=list(geotransform)) + #create_tyx_dsets(group=fhandle, gt=list(geotransform), times=self.all_date_list, shape=(self.length, self.width)) + for index, box in enumerate(self.box_list): box_width = box[2] - box[0] box_length = box[3] - box[1] @@ -312,6 +427,9 @@ cdef class CPhaseLink: shp = np.load(patch_dir.decode('UTF-8') + '/shp.npy', allow_pickle=True) mask_ps = np.load(patch_dir.decode('UTF-8') + '/mask_ps.npy', allow_pickle=True) ps_prod = np.load(patch_dir.decode('UTF-8') + '/ps_products.npy', allow_pickle=True) + #reference_index = np.load(patch_dir.decode('UTF-8') + '/reference_index.npy', allow_pickle=True) + #if b'real_time' == self.phase_linking_method[0:9]: + # rslc_ref_seq = np.load(patch_dir.decode('UTF-8') + '/phase_ref_seq.npy', allow_pickle=True) temp_coh[temp_coh<0] = 0 @@ -320,82 +438,48 @@ cdef class CPhaseLink: # wrapped interferograms 3D block = [0, self.n_image, box[1], box[3], box[0], box[2]] - #write_hdf5_block_3D(fhandle, rslc_ref, b'slc', block) + ## write_hdf5_block_3D(fhandle, rslc_ref, b'slc', block) write_hdf5_block_3D(fhandle, np.angle(rslc_ref), b'phase', block) write_hdf5_block_3D(fhandle, np.abs(rslc_ref), b'amplitude', block) + #if b'real_time' == self.phase_linking_method[0:9]: + # write_hdf5_block_3D(fhandle, np.angle(rslc_ref_seq), b'phase_seq', block) + # write_hdf5_block_3D(fhandle, np.abs(rslc_ref_seq), b'amplitude_seq', block) # SHP - 2D block = [box[1], box[3], box[0], box[2]] write_hdf5_block_2D_int(fhandle, shp, b'shp', block) amp_disp[block[0]:block[1], block[2]:block[3]] = ps_prod[0, :, :] + #reference_index_map[block[0]:block[1], block[2]:block[3]] = reference_index[:, :] # temporal coherence - 3D block = [0, 2, box[1], box[3], box[0], box[2]] write_hdf5_block_3D(fhandle, temp_coh, b'temporalCoherence', block) eig_values[0:3, block[2]:block[3], block[4]:block[5]] = ps_prod[1:4, :, :] - print('write amplitude dispersion and top eigen values') - amp_disp_file = self.out_dir + b'/amp_dipersion_index' - if not os.path.exists(amp_disp_file.decode('UTF-8')): - amp_disp_memmap = np.memmap(amp_disp_file.decode('UTF-8'), mode='write', dtype='float32', - shape=(self.length, self.width)) - IML.renderISCEXML(amp_disp_file.decode('UTF-8'), bands=1, nyy=self.length, nxx=self.width, - datatype='float32', scheme='BIL') - else: - amp_disp_memmap = np.memmap(amp_disp_file.decode('UTF-8'), mode='r+', dtype='float32', - shape=(self.length, self.width)) - amp_disp_memmap[:, :] = amp_disp[:, :] - amp_disp_memmap = None + ### + print('close HDF5 file phase_series.h5.') + print('write amplitude dispersion and top eigen values') + amp_disp_file = self.out_dir + b'/amp_dipersion_index' + self.set_projection_gdal1(amp_disp, 1, + amp_disp_file, 'Amplitude dispersion index', projection, geotransform) + #self.set_projection_gdal1(amp_disp, 1, amp_disp_file, 'Amplitude dispersion index', projection, geotransform) top_eig_files = self.out_dir + b'/top_eigenvalues' - if not os.path.exists(top_eig_files.decode('UTF-8')): - top_eig_memmap = np.memmap(top_eig_files.decode('UTF-8'), mode='write', dtype='float32', - shape=(3, self.length, self.width)) - IML.renderISCEXML(top_eig_files.decode('UTF-8'), bands=3, nyy=self.length, nxx=self.width, - datatype='float32', scheme='BSQ') - else: - top_eig_memmap = np.memmap(top_eig_files.decode('UTF-8'), mode='r+', dtype='float32', - shape=(3, self.length, self.width)) - - print(top_eig_memmap.shape, np.array(eig_values).shape) - top_eig_memmap[0:3, :, :] = eig_values[:, :, :] - #top_eig_memmap[2, :, :] = eig_values[1, :, :]/eig_values[0, :, :] - rr, cc, kk = np.where(np.isnan(top_eig_memmap)) - top_eig_memmap[:, rr, cc] = np.nan - top_eig_memmap = None - + self.set_projection_gdalm(np.array(eig_values), 3, top_eig_files, + ['Top eigenvalue 1', 'Top eigenvalue 2', 'Top eigenvalue 3'], projection, geotransform) print('write averaged temporal coherence file from mini stacks') temp_coh_file = self.out_dir + b'/tempCoh_average' - - if not os.path.exists(temp_coh_file.decode('UTF-8')): - temp_coh_memmap = np.memmap(temp_coh_file.decode('UTF-8'), mode='write', dtype='float32', - shape=(self.length, self.width)) - IML.renderISCEXML(temp_coh_file.decode('UTF-8'), bands=1, nyy=self.length, nxx=self.width, - datatype='float32', scheme='BIL') - else: - temp_coh_memmap = np.memmap(temp_coh_file.decode('UTF-8'), mode='r+', dtype='float32', - shape=(self.length, self.width)) - - temp_coh_memmap[:, :] = fhandle['temporalCoherence'][0, :, :] - temp_coh_memmap = None - - print('write temporal coherence file from full stack') + self.set_projection_gdal1(fhandle['temporalCoherence'][0, :, :], 1, + temp_coh_file, 'Temporal coherence average', projection, geotransform) temp_coh_file = self.out_dir + b'/tempCoh_full' + self.set_projection_gdal1(fhandle['temporalCoherence'][1, :, :], 1, + temp_coh_file, 'Temporal coherence full', projection, geotransform) + #ref_index_file = self.out_dir + b'/reference_index' + #self.set_projection_gdal1_int(reference_index_map, 1, + # ref_index_file, 'Reference index map', projection, geotransform) - if not os.path.exists(temp_coh_file.decode('UTF-8')): - temp_coh_memmap = np.memmap(temp_coh_file.decode('UTF-8'), mode='write', dtype='float32', - shape=(self.length, self.width)) - IML.renderISCEXML(temp_coh_file.decode('UTF-8'), bands=1, nyy=self.length, nxx=self.width, - datatype='float32', scheme='BIL') - else: - temp_coh_memmap = np.memmap(temp_coh_file.decode('UTF-8'), mode='r+', dtype='float32', - shape=(self.length, self.width)) - - temp_coh_memmap[:, :] = fhandle['temporalCoherence'][1, :, :] - temp_coh_memmap = None - print('close HDF5 file phase_series.h5.') print('write PS mask file') @@ -406,7 +490,4 @@ cdef class CPhaseLink: block = [box[1], box[3], box[0], box[2]] write_hdf5_block_2D_int(psf, mask_ps, b'mask', block) - return - - - + return \ No newline at end of file diff --git a/src/miaplpy/objects/crop_geo.py b/src/miaplpy/objects/crop_geo.py new file mode 100644 index 0000000..56abc06 --- /dev/null +++ b/src/miaplpy/objects/crop_geo.py @@ -0,0 +1,428 @@ +#!/usr/bin/env python3 +############################################################ +# Program is part of MiaplPy # +# Copyright (c) 2022, Sara Mirzaee # +# Author: Sara Mirzaee # +############################################################ +import numpy as np +import rasterio +from rasterio.windows import Window +from typing import Optional, List, Tuple, Union +from pathlib import Path +import h5py +from osgeo import gdal, osr +from os import fspath +import datetime +from pyproj import CRS +from pyproj.transformer import Transformer + +from mintpy.utils import ptime, attribute as attr +from miaplpy.objects.utils import read_attribute +import time +#import rioxarray +#import isce3 +from typing import Optional, Any + +from mintpy.objects import (DATA_TYPE_DICT, + GEOMETRY_DSET_NAMES, + DSET_UNIT_DICT) + +BOOL_ZERO = np.bool_(0) +INT_ZERO = np.int16(0) +FLOAT_ZERO = np.float32(0.0) +CPX_ZERO = np.complex64(0.0) + +dataType = np.complex64 + +slcDatasetNames = ['slc'] +DSET_UNIT_DICT['slc'] = 'i' +gdal.SetCacheMax(2**30) + + +HDF5_OPTS = dict( + # https://docs.h5py.org/en/stable/high/dataset.html#filter-pipeline + chunks=(1280, 1280), + compression="gzip", + compression_opts=4, + shuffle=True, + dtype=np.complex64 +) + + +def create_grid_mapping(group, crs: CRS, gt: list): + """Set up the grid mapping variable.""" + # https://github.com/corteva/rioxarray/blob/21284f67db536d9c104aa872ab0bbc261259e59e/rioxarray/rioxarray.py#L34 + if not 'spatial_ref' in group.keys(): + dset = group.create_dataset('spatial_ref', (), dtype=int) + dset.attrs.update(crs.to_cf()) + # Also add the GeoTransform + gt_string = " ".join([str(x) for x in gt]) + dset.attrs.update( + dict( + GeoTransform=gt_string, + units="unitless", + long_name=( + "Dummy variable containing geo-referencing metadata in attributes" + ), + ) + ) + else: + dset = group['spatial_ref'] + + return dset + + +def create_tyx_dsets( + group, + gt: list, + times: list, + shape: tuple): + """Create the time, y, and x coordinate datasets.""" + y, x = create_yx_arrays(gt, shape) + times, calendar, units = create_time_array(times) + + #if not group.dimensions: + # group.dimensions = dict(time=times.size, y=y.size, x=x.size) + # Create the datasets + if not 'time' in group.keys(): + t_ds = group.create_dataset("time", (len(times),), data=times, dtype=float) + y_ds = group.create_dataset("y", (len(y),), data=y, dtype=float) + x_ds = group.create_dataset("x", (len(x),), data=x, dtype=float) + + t_ds.attrs["standard_name"] = "time" + t_ds.attrs["long_name"] = "time" + t_ds.attrs["calendar"] = calendar + t_ds.attrs["units"] = units + else: + t_ds = group['time'] + y_ds = group['y'] + x_ds = group['x'] + + for name, ds in zip(["y", "x"], [y_ds, x_ds]): + ds.attrs["standard_name"] = f"projection_{name}_coordinate" + ds.attrs["long_name"] = f"{name.replace('_', ' ')} coordinate of projection" + ds.attrs["units"] = "m" + + return t_ds, y_ds, x_ds + + +def create_yx_arrays( + gt: list, shape: tuple +) -> tuple: + """Create the x and y coordinate datasets.""" + ysize, xsize = shape + # Parse the geotransform + x_origin, x_res, _, y_origin, _, y_res = gt + # Make the x/y arrays + # Note that these are the center of the pixels, whereas the GeoTransform + # is the upper left corner of the top left pixel. + y = np.arange(y_origin + y_res / 2, y_origin + y_res * ysize, y_res) + x = np.arange(x_origin + x_res / 2, x_origin + x_res * xsize, x_res) + return y, x + + +def create_time_array(dates: datetime.datetime): + # 'calendar': 'standard', + # 'units': 'seconds since 2017-02-03 00:00:00.000000' + # Create the time array + times = [datetime.datetime.strptime(dd, '%Y%m%d') for dd in dates] + since_time = times[0] + time = np.array([(t - since_time).total_seconds() for t in times]) + calendar = "standard" + units = f"seconds since {since_time.strftime('%Y-%m-%d %H:%M:%S.%f')}" + return time, calendar, units + + +def add_complex_ctype(h5file: h5py.File): + """Add the complex64 type to the root of the HDF5 file. + This is required for GDAL to recognize the complex data type. + """ + with h5py.File(h5file, "a") as hf: + if "complex64" in hf["/"]: + return + ctype = h5py.h5t.py_create(np.complex64) + ctype.commit(hf["/"].id, np.string_("complex64")) + + +def create_geo_dataset_3d( + *, + group, + name: str, + description: str, + fillvalue: float, + attrs: dict, + timelength: int, + dtype,): + + dimensions = ["time", "y", "x"] + if attrs is None: + attrs = {} + attrs.update(long_name=description) + + options = HDF5_OPTS + options["chunks"] = (timelength, *options["chunks"]) + options['dtype'] = dtype + + dset = group.create_dataset( + name, + ndim=3, + fillvalue=fillvalue, + **options, + ) + dset.attrs.update(attrs) + dset.attrs["grid_mapping"] = 'spatial_ref' + return dset + +def get_raster_bounds(xcoord, ycoord, utm_bbox=None): + """Get common bounds among all data""" + x_bounds = [] + y_bounds = [] + + west = min(xcoord) + east = max(xcoord) + north = max(ycoord) + south = min(ycoord) + + x_bounds.append([west, east]) + y_bounds.append([south, north]) + if not utm_bbox is None: + x_bounds.append([utm_bbox[0], utm_bbox[2]]) + y_bounds.append([utm_bbox[1], utm_bbox[3]]) + + bounds = max(x_bounds)[0], max(y_bounds)[0], min(x_bounds)[1], min(y_bounds)[1] + return bounds + + +def bbox_to_utm_sco(bounds, src_epsg, dst_epsg): + t = Transformer.from_crs(src_epsg, dst_epsg, always_xy=True) + left, bottom, right, top = bounds + bbox = (*t.transform(left, bottom), *t.transform(right, top)) # type: ignore + return bbox + + +def bbox_to_utm(bbox, epsg_dst, epsg_src=4326): + """Convert a list of points to a specified UTM coordinate system. + If epsg_src is 4326 (lat/lon), assumes points_xy are in degrees. + """ + xmin, ymin, xmax, ymax = bbox + t = Transformer.from_crs(epsg_src, epsg_dst, always_xy=True) + xs = [xmin, xmax] + ys = [ymin, ymax] + xt, yt = t.transform(xs, ys) + xys = list(zip(xt, yt)) + return *xys[0], *xys[1] + + +class cropSLC: + def __init__(self, pairs_dict: Optional[List[Path]] = None, + geo_bbox: Optional[Tuple[float, float, float, float]] = None): + self.pairsDict = pairs_dict + self.name = 'slc' + self.dates = sorted([date for date in self.pairsDict.keys()]) + self.dsNames = list(self.pairsDict[self.dates[0]].datasetDict.keys()) + self.dsNames = [i for i in slcDatasetNames if i in self.dsNames] + self.numSlc = len(self.pairsDict) + self.bperp = np.zeros(self.numSlc) + dsname0 = self.pairsDict[self.dates[0]].datasetDict['slc'] + self.geo_bbox = geo_bbox + self.bb_utm = None + self.rdr_bbox = None + self.crs, self.geotransform, self.shape = self.get_transform(dsname0) + self.length, self.width = self.shape + + self.lengthc, self.widthc = self.get_size() + + def get_transform(self, src_file): + import pdb; pdb.set_trace() + with h5py.File(src_file, 'r') as ds: + dsg = ds['data']['projection'].attrs + xcoord = ds['data']['x_coordinates'][()] + ycoord = ds['data']['y_coordinates'][()] + shape = (ds['data']['y_coordinates'].shape[0], ds['data']['x_coordinates'].shape[0]) + #crs = CRS.from_wkt(dsg['spatial_ref'].decode("utf-8")) + crs = dsg['spatial_ref'].decode("utf-8") + x_step = float(ds['data']['x_spacing'][()]) + y_step = float(ds['data']['y_spacing'][()]) + x_first = min(ds['data']['x_coordinates'][()]) + y_first = max(ds['data']['y_coordinates'][()]) + geotransform = (x_first, x_step, 0, y_first, 0, y_step) + if self.geo_bbox is None: + x_last = max(ds['data']['x_coordinates'][()]) + y_last = min(ds['data']['y_coordinates'][()]) + self.bb_utm = (x_first, y_first, x_last, y_last) + else: + self.bb_utm = bbox_to_utm(self.geo_bbox, epsg_src=4326, epsg_dst=crs.to_epsg()) + + bounds = get_raster_bounds(xcoord, ycoord, self.bb_utm) + + xindex = np.where(np.logical_and(xcoord >= bounds[0], xcoord <= bounds[2]))[0] + yindex = np.where(np.logical_and(ycoord >= bounds[1], ycoord <= bounds[3]))[0] + row1, row2 = min(yindex), max(yindex) + col1, col2 = min(xindex), max(xindex) + + self.rdr_bbox = (col1, row1, col2, row2) + + return crs, geotransform, shape + + def get_subset_transform(self): + # Define the cropping extent + width = self.rdr_bbox[2] - self.rdr_bbox[0] + length = self.rdr_bbox[3] - self.rdr_bbox[1] + crop_extent = (self.bb_utm[0], self.bb_utm[1], self.bb_utm[2], self.bb_utm[3]) + crop_transform = rasterio.transform.from_bounds(self.bb_utm[0], + self.bb_utm[1], + self.bb_utm[2], + self.bb_utm[3], width, length) + return crop_transform, crop_extent + + def obs_get_rdr_bbox(self): + # calculate the image coordinates + col1 = int((self.bb_utm[0] - self.geotransform[0]) / self.geotransform[1]) + col2 = int((self.bb_utm[2] - self.geotransform[0]) / self.geotransform[1]) + row1 = int((self.bb_utm[3] - self.geotransform[3]) / self.geotransform[5]) + row2 = int((self.bb_utm[1] - self.geotransform[3]) / self.geotransform[5]) + + if col2 > self.width: + col2 = self.width + bb_utm2 = col2 * self.geotransform[1] + self.geotransform[0] + self.bb_utm = (self.bb_utm[0], self.bb_utm[1], bb_utm2, self.bb_utm[3]) + + if row2 > self.length: + row2 = self.length + bb_utm1 = row2 * self.geotransform[5] + self.geotransform[3] + self.bb_utm = (self.bb_utm[0], bb_utm1, self.bb_utm[2], self.bb_utm[3]) + + if col1 < 0: + col1 = 0 + bb_utm0 = self.geotransform[0] + self.bb_utm = (bb_utm0, self.bb_utm[1], self.bb_utm[2], self.bb_utm[3]) + if row1 < 0: + row1 = 0 + bb_utm3 = self.geotransform[3] + self.bb_utm = (self.bb_utm[0], self.bb_utm[1], self.bb_utm[2], bb_utm3) + + print('crop_geo: ', [col1, row1, col2, row2]) + return col1, row1, col2, row2 + + def get_size(self): + length = self.rdr_bbox[3] - self.rdr_bbox[1] + width = self.rdr_bbox[2] - self.rdr_bbox[0] + return length, width + + + def get_date_list(self): + self.dateList = sorted([date for date in self.pairsDict.keys()]) + return self.dateList + + + def read_subset(self, slc_file): + with h5py.File(slc_file, 'r') as f: + subset_slc = f['data']['VV'][self.rdr_bbox[1]:self.rdr_bbox[3], + self.rdr_bbox[0]:self.rdr_bbox[2]] + + return subset_slc + + def get_metadata(self): + slcObj = [v for v in self.pairsDict.values()][0] + self.metadata = slcObj.get_metadata() + if 'UNIT' in self.metadata.keys(): + self.metadata.pop('UNIT') + return self.metadata + + def write2hdf5(self, outputFile='slcStack.h5', access_mode='a', compression=None, extra_metadata=None): + + dsNames = [i for i in slcDatasetNames if i in self.dsNames] + maxDigit = max([len(i) for i in dsNames]) + self.outputFile = outputFile + print('create HDF5 file {} with {} mode'.format(self.outputFile, access_mode)) + dsName = 'slc' + + f = h5py.File(self.outputFile, access_mode) + print('create HDF5 file {} with {} mode'.format(self.outputFile, access_mode)) + + #create_grid_mapping(group=f, crs=self.crs, gt=list(self.geotransform)) + #create_tyx_dsets(group=f, gt=list(self.geotransform), times=self.dates, shape=(self.lengthc, self.widthc)) + + dsShape = (self.numSlc, self.lengthc, self.widthc) + dsDataType = dataType + dsCompression = compression + + self.bperp = np.zeros(self.numSlc) + + print(('create dataset /{d:<{w}} of {t:<25} in size of {s}' + ' with compression = {c}').format(d=dsName, + w=maxDigit, + t=str(dsDataType), + s=dsShape, + c=dsCompression)) + + if dsName in f.keys(): + ds = f[dsName] + else: + ds = f.create_dataset(dsName, + shape=dsShape, + maxshape=(None, dsShape[1], dsShape[2]), + dtype=dsDataType, + chunks=True, + compression=dsCompression) + + ds.attrs.update(long_name="SLC complex data") + ds.attrs["grid_mapping"] = 'spatial_ref' + + prog_bar = ptime.progressBar(maxValue=self.numSlc) + + for i in range(self.numSlc): + box = self.rdr_bbox + slcObj = self.pairsDict[self.dates[i]] + dsSlc, metadata = slcObj.read(dsName, box=box) + ds[i, :, :] = dsSlc[:, :] + + self.bperp[i] = slcObj.get_perp_baseline() + prog_bar.update(i + 1, suffix='{}'.format(self.dates[i][0])) + + prog_bar.close() + ds.attrs['MODIFICATION_TIME'] = str(time.time()) + + ############################### + # 1D dataset containing dates of all images + dsName = 'date' + dsDataType = np.string_ + dsShape = (self.numSlc, 1) + print('create dataset /{d:<{w}} of {t:<25} in size of {s}'.format(d=dsName, + w=maxDigit, + t=str(dsDataType), + s=dsShape)) + + data = np.array(self.dates, dtype=dsDataType) + if not dsName in f.keys(): + f.create_dataset(dsName, data=data) + + ############################### + # 1D dataset containing perpendicular baseline of all pairs + dsName = 'bperp' + dsDataType = np.float32 + dsShape = (self.numSlc,) + print('create dataset /{d:<{w}} of {t:<25} in size of {s}'.format(d=dsName, + w=maxDigit, + t=str(dsDataType), + s=dsShape)) + data = np.array(self.bperp, dtype=dsDataType) + if not dsName in f.keys(): + f.create_dataset(dsName, data=data) + + ############################### + # Attributes + self.get_metadata() + if extra_metadata: + self.metadata.update(extra_metadata) + # print('add extra metadata: {}'.format(extra_metadata)) + self.metadata = attr.update_attribute4subset(self.metadata, self.rdr_bbox) + + self.metadata['FILE_TYPE'] = 'timeseries' # 'slc' + for key, value in self.metadata.items(): + f.attrs[key] = value + + f.close() + + print('Finished writing to {}'.format(self.outputFile)) + return self.outputFile \ No newline at end of file diff --git a/src/miaplpy/prep_slc_isce.py b/src/miaplpy/prep_slc_isce.py index e4a0d71..1414da5 100755 --- a/src/miaplpy/prep_slc_isce.py +++ b/src/miaplpy/prep_slc_isce.py @@ -151,7 +151,6 @@ def extract_isce_metadata(meta_file, geom_dir=None, rsc_file=None, update_mode=T metadata['LAT_REF2'] = metadata_multi_looked['LAT_REF2'] metadata['LAT_REF3'] = metadata_multi_looked['LAT_REF3'] metadata['LAT_REF4'] = metadata_multi_looked['LAT_REF4'] - print(metadata_multi_looked) # NCORRLOOKS for coherence calibration rgfact = float(metadata['rangeResolution']) / float(metadata['rangePixelSize']) diff --git a/src/miaplpy/unwrap_ifgram.py b/src/miaplpy/unwrap_ifgram.py index fcac4c1..45a4cb3 100755 --- a/src/miaplpy/unwrap_ifgram.py +++ b/src/miaplpy/unwrap_ifgram.py @@ -19,7 +19,7 @@ def enablePrint(): sys.stdout = sys.__stdout__ blockPrint() -#import isce +import isce import isceobj from isceobj.Util.ImageUtil import ImageLib as IML from contrib.UnwrapComp.unwrapComponents import UnwrapComponents @@ -191,6 +191,8 @@ def unwrap(self): print(error) if 'ERROR' in error.decode('UTF-8') or 'Error' in error.decode('UTF-8'): # or len(error.decode('UTF-8'))>0: raise RuntimeError(error) + + if os.path.exists(self.out_unwrapped): diff --git a/src/miaplpy/version.py b/src/miaplpy/version.py index d6d82bd..5609e6f 100755 --- a/src/miaplpy/version.py +++ b/src/miaplpy/version.py @@ -8,6 +8,7 @@ ########################################################################### Tag = collections.namedtuple('Tag', 'version date') release_history = ( + Tag('0.2.1', '2023-08-22'), Tag('0.2.0', '2021-09-14'), Tag('0.1.0', '2021-04-23'), )