From 4cab0af0f59107a0f119174a735e27aa04626737 Mon Sep 17 00:00:00 2001 From: Yu Morishita Date: Tue, 24 Nov 2020 13:50:41 +0900 Subject: [PATCH] Compress hdf5 files --- bin/LiCSBAS13_sb_inv.py | 18 ++++++++++-------- bin/LiCSBAS16_filt_ts.py | 15 +++++++++------ 2 files changed, 19 insertions(+), 14 deletions(-) diff --git a/bin/LiCSBAS13_sb_inv.py b/bin/LiCSBAS13_sb_inv.py index e9ba662..049b186 100755 --- a/bin/LiCSBAS13_sb_inv.py +++ b/bin/LiCSBAS13_sb_inv.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 """ -v1.4.6 20201119 Yu Morishita, GSI +v1.4.7 20201124 Yu Morishita, GSI This script inverts the SB network of unw to obtain the time series and velocity using NSBAS (López-Quiroz et al., 2009; Doin et al., 2011) approach. @@ -69,6 +69,8 @@ """ #%% Change log ''' +v1.4.7 20201124 Yu Morishita, GSI + - Comporess hdf5 file v1.4.6 20201119 Yu Morishita, GSI - Change default cmap for wrapped phase from insar to SCM.romaO v1.4.5 20201118 Yu Morishita, GSI @@ -131,7 +133,7 @@ def main(argv=None): argv = sys.argv start = time.time() - ver="1.4.6"; date=20201119; author="Y. Morishita" + ver="1.4.7"; date=20201124; author="Y. Morishita" print("\n{} ver{} {} {}".format(os.path.basename(argv[0]), ver, date, author), flush=True) print("{} {}".format(os.path.basename(argv[0]), ' '.join(argv[1:])), flush=True) @@ -162,6 +164,7 @@ def main(argv=None): cmap_noise_r = 'viridis_r' cmap_wrap = SCM.romaO q = multi.get_context('fork') + compress = 'gzip' #%% Read options @@ -434,10 +437,10 @@ def main(argv=None): cumh5.create_dataset('imdates', data=[np.int32(imd) for imd in imdates]) if not np.all(np.abs(np.array(bperp))<=1):# if not dummy cumh5.create_dataset('bperp', data=bperp) - cum = cumh5.require_dataset('cum', (n_im, length, width), dtype=np.float32) - vel = cumh5.require_dataset('vel', (length, width), dtype=np.float32) - vconst = cumh5.require_dataset('vintercept', (length, width), dtype=np.float32) - gap = cumh5.require_dataset('gap', (n_im-1, length, width), dtype=np.int8) + cum = cumh5.require_dataset('cum', (n_im, length, width), dtype=np.float32, compression=compress) + vel = cumh5.require_dataset('vel', (length, width), dtype=np.float32, compression=compress) + vconst = cumh5.require_dataset('vintercept', (length, width), dtype=np.float32, compression=compress) + gap = cumh5.require_dataset('gap', (n_im-1, length, width), dtype=np.int8, compression=compress) if width == width_geo and length == length_geo: ## if geocoded cumh5.create_dataset('corner_lat', data=lat1) @@ -534,7 +537,7 @@ def main(argv=None): p = q.Pool(n_para_gap) _result = np.array(p.map(count_gaps_wrapper, range(n_para_gap)), dtype=object) p.close() - + ns_gap_patch[ix_unnan_pt] = np.hstack(_result[:, 0]) #n_pt gap_patch[:, ix_unnan_pt] = np.hstack(_result[:, 1]) #n_im-1, n_pt ns_ifg_noloop_patch[ix_unnan_pt] = np.hstack(_result[:, 2]) @@ -751,7 +754,6 @@ def main(argv=None): print('Output directory: {}\n'.format(os.path.relpath(tsadir))) - #%% def count_gaps_wrapper(i): print(" Running {:2}/{:2}th patch...".format(i+1, n_para_gap), flush=True) diff --git a/bin/LiCSBAS16_filt_ts.py b/bin/LiCSBAS16_filt_ts.py index 6506076..189616f 100755 --- a/bin/LiCSBAS16_filt_ts.py +++ b/bin/LiCSBAS16_filt_ts.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 """ -v1.4.4 20201119 Yu Morishita, GSI +v1.4.5 20201124 Yu Morishita, GSI This script applies spatio-temporal filter (HP in time and LP in space with gaussian kernel, same as StaMPS) to the time series of displacement. Deramping (1D, bilinear, or 2D polynomial) can also be applied if -r option is used. Topography-correlated components (linear with elevation) can also be subtracted with --hgt_linear option simultaneously with deramping before spatio-temporal filtering. The impact of filtering (deramp and linear elevation as well) can be visually checked by showing 16filt*/*png. A stable reference point is determined after the filtering as well as Step 1-3. @@ -64,6 +64,8 @@ """ #%% Change log ''' +v1.4.5 20201124 Yu Morishita, GSI + - Comporess hdf5 file v1.4.4 20201119 Yu Morishita, GSI - Change default cmap for wrapped phase from insar to SCM.romaO v1.4.3 20201118 Yu Morishita, GSI @@ -122,7 +124,7 @@ def main(argv=None): argv = sys.argv start = time.time() - ver="1.4.4"; date=20201119; author="Y. Morishita" + ver="1.4.5"; date=20201124; author="Y. Morishita" print("\n{} ver{} {} {}".format(os.path.basename(argv[0]), ver, date, author), flush=True) print("{} {}".format(os.path.basename(argv[0]), ' '.join(argv[1:])), flush=True) @@ -158,6 +160,7 @@ def main(argv=None): cmap_noise_r = 'viridis_r' cmap_wrap = SCM.romaO q = multi.get_context('fork') + compress = 'gzip' #%% Read options @@ -270,7 +273,7 @@ def main(argv=None): ### Save dates and other info into cumf cumfh5.create_dataset('imdates', data=cumh5['imdates']) - cumfh5.create_dataset('gap', data=cumh5['gap']) + cumfh5.create_dataset('gap', data=cumh5['gap'], compression=compress) if 'bperp' in list(cumh5.keys()): ## if dummy, no bperp field cumfh5.create_dataset('bperp', data=cumh5['bperp']) else: @@ -422,7 +425,7 @@ def main(argv=None): #%% Filter each image - cum_filt = cumfh5.require_dataset('cum', (n_im, length, width), dtype=np.float32) + cum_filt = cumfh5.require_dataset('cum', (n_im, length, width), dtype=np.float32, compression=compress) print('\nHP filter in time, LP filter in space,', flush=True) print('with {} parallel processing...'.format(n_para), flush=True) @@ -523,8 +526,8 @@ def main(argv=None): vconst_mskd.tofile(vconstfile+'.mskd') vel_mskd.tofile(velfile+'.mskd') - cumfh5.create_dataset('vel', data=vel.reshape(length, width)) - cumfh5.create_dataset('vintercept', data=vconst.reshape(length, width)) + cumfh5.create_dataset('vel', data=vel.reshape(length, width), compression=compress) + cumfh5.create_dataset('vintercept', data=vconst.reshape(length, width), compression=compress) #%% Add info and close