From 8ed98c16cc8289412c40fb9028e343e19d0e9a49 Mon Sep 17 00:00:00 2001 From: Zhang Yunjun Date: Fri, 17 Jun 2022 13:29:06 -0700 Subject: [PATCH] integrate advanced time func with UQ into the routine workflow (#795) * ts2vel: add uncertaintyQuantification & timeSeriesCovFile option + add the following two new template options into smallbaselineApp.cfg - mintpy.velocity.uncertaintyQuantification = residue / covariance / bootstrap, to replace the previous mintpy.velocity.bootstrap option - mintpy.velocity.timeSeriesCovFile with the default value of no + timeseries2velocity: re-use arg_group.add_reference_argument(plot=False) in timeseries2velocity.py + load_data: write S1A/B_date.txt file from SAFE_files.txt file if the latter exists in its default location (same level as reference folder) * ts2vel: add timeFunc.* options to smallbaselineApp.cfg + integrate the advanced time func fitting into smallbaselineApp.py, via the following template options: - mintpy.timeFunc.polynomial - mintpy.timeFunc.periodic - mintpy.timeFunc.step - mintpy.timeFunc.exp - mintpy.timeFunc.log + rename the following options for consistency **[BREAKING CHANGES]**: - mintpy.velocity.excludeDate --> mintpy.timeFunc.excludeDate - mintpy.velocity.startDate --> mintpy.timeFunc.startDate - mintpy.velocity.endDate --> mintpy.timeFunc.endDate - mintpy.velocity.uncertaintyQuantifiction --> mintpy.timeFunc.uncertaintyQuantification - mintpy.velocity.timeSeriesCovFile --> mintpy.timeFunc.timeSeriesCovFile - mintpy.velocity.bootstrapCount --> mintpy.timeFunc.bootstrapCount + utils.time_func.py: - add MODEL_EXAMPLE as the single-sourced example for the time function model/configuration setup - add inps2model() to parse/convert the namespace "inps" object into time function dict "model" object - add get_num_param() to estimate the num of unknown parameters from a given model dict object + utils.readfile.read_template(): add "skip_chars" argument to skip certain characters in the template values, e.g. the commonly template typo from users "[" and "]". This is to simplify the read_template2inps() functions. - leverage the new read_template() in following scripts: - dem_error.py - geocode.py - modify_network.py - reference_point.py - timeseries2velocity.py - the insarmaps-related code is removed, and they are messy and they can and should be implemented separately in insarmaps repo if needed. + utils.ptime.py: add get_exclude_date_list() from previous ts2vel.py + utils.arg_group.py: - add_time_func_argument(): fix typo and better alignment for comments to improve the readability - adjust indentation throughout the script + timeseries2velocity: support time function inputs from template files - add time function inputs into the config keys to be saved in the h5 file - remove the comments on contributing authors, as they are visible in the git commit history - merge init_exp_log_dicts() and read_inps2model() into utils.time_func.inps2model() - refactor and move read_exclude_date() into utils.ptime.get_exclude_date_list() - read_template2inps(): support the new time function template options - model2hdf5_dataset(): modify the periodic/exp/log dataset names, to be more clear on the time unit + move ifgram_reconstrunction.py to /legacy since it's not frequently used at all + move tropo_pyaps.py to /legacy since it's unavailable anymore (due to the takedown of EarthDef and Python2) and not actively used. --- mintpy/defaults/smallbaselineApp.cfg | 39 +- mintpy/defaults/smallbaselineApp_auto.cfg | 22 +- mintpy/dem_error.py | 5 +- mintpy/geocode.py | 5 +- mintpy/{ => legacy}/ifgram_reconstruction.py | 0 mintpy/{ => legacy}/tropo_pyaps.py | 0 mintpy/load_data.py | 17 +- mintpy/modify_network.py | 12 +- mintpy/reference_point.py | 4 +- mintpy/solid_earth_tides.py | 2 +- mintpy/timeseries2velocity.py | 457 +++++++------------ mintpy/tropo_pyaps3.py | 5 +- mintpy/utils/arg_group.py | 143 +++--- mintpy/utils/ptime.py | 44 +- mintpy/utils/readfile.py | 91 +--- mintpy/utils/time_func.py | 167 +++++-- setup.py | 2 - 17 files changed, 531 insertions(+), 484 deletions(-) rename mintpy/{ => legacy}/ifgram_reconstruction.py (100%) rename mintpy/{ => legacy}/tropo_pyaps.py (100%) diff --git a/mintpy/defaults/smallbaselineApp.cfg b/mintpy/defaults/smallbaselineApp.cfg index 65bc0c58e..ceffbf50e 100644 --- a/mintpy/defaults/smallbaselineApp.cfg +++ b/mintpy/defaults/smallbaselineApp.cfg @@ -276,17 +276,34 @@ mintpy.reference.date = auto #[reference_date.txt / 20090214 / no], auto for r ########## 10. velocity -## Estimate linear velocity and its standard deviation from time-series -## and from tropospheric delay file if exists. -## reference: Fattahi and Amelung (2015, JGR) -mintpy.velocity.excludeDate = auto #[exclude_date.txt / 20080520,20090817 / no], auto for exclude_date.txt -mintpy.velocity.startDate = auto #[20070101 / no], auto for no -mintpy.velocity.endDate = auto #[20101230 / no], auto for no - -## Bootstrapping -## reference: Efron and Tibshirani (1986, Stat. Sci.) -mintpy.velocity.bootstrap = auto #[yes / no], auto for no, use bootstrap -mintpy.velocity.bootstrapCount = auto #[int>1], auto for 400, number of iterations for bootstrapping +## Estimate a suite of time functions [linear velocity by default] +## from final displacement file (and from tropospheric delay file if exists) +mintpy.timeFunc.startDate = auto #[20070101 / no], auto for no +mintpy.timeFunc.endDate = auto #[20101230 / no], auto for no +mintpy.timeFunc.excludeDate = auto #[exclude_date.txt / 20080520,20090817 / no], auto for exclude_date.txt + +## Fit a suite of time functions +## reference: Hetland et al. (2012, JGR) equation (2-9) +## polynomial function is defined by its degree in integer. 1 for linear, 2 for quadratic, etc. +## periodic function(s) are defined by a list of periods in decimal years. 1 for annual, 0.5 for semi-annual, etc. +## step function(s) are defined by a list of onset times in str in YYYYMMDD(THHMM) format +## exp & log function(s) are defined by an onset time followed by an charateristic time in integer days. +## Multiple exp and log functions can be overlaied on top of each other, achieved via e.g.: +## 20110311,60,120 - two functions sharing the same onset time OR +## 20110311,60;20170908,120 - separated by ";" +mintpy.timeFunc.polynomial = auto #[int >= 0], auto for 1, degree of the polynomial function +mintpy.timeFunc.periodic = auto #[1,0.5 / list_of_float / no], auto for no, periods in decimal years +mintpy.timeFunc.step = auto #[20110311,20170908 / 20120928T1733 / no], auto for no, step function(s) +mintpy.timeFunc.exp = auto #[20110311,60 / 20110311,60,120 / 20110311,60;20170908,120 / no], auto for no +mintpy.timeFunc.log = auto #[20110311,60 / 20110311,60,120 / 20110311,60;20170908,120 / no], auto for no + +## Uncertainty quantification methods: +## a. residue - propagate from fitting residue assuming normal dist. in time (Fattahi & Amelung, 2015, JGR) +## b. covariance - propagate from time series (co)variance matrix +## c. bootstrap - bootstrapping (independently resampling with replacement; Efron & Tibshirani, 1986, Stat. Sci.) +mintpy.timeFunc.uncertaintyQuantification = auto #[residue, covariance, bootstrap], auto for residue +mintpy.timeFunc.timeSeriesCovFile = auto #[filename / no], auto for no, time series covariance file +mintpy.timeFunc.bootstrapCount = auto #[int>1], auto for 400, number of iterations for bootstrapping ########## 11.1 geocode (post-processing) diff --git a/mintpy/defaults/smallbaselineApp_auto.cfg b/mintpy/defaults/smallbaselineApp_auto.cfg index ce3c66c89..a7819d6ac 100644 --- a/mintpy/defaults/smallbaselineApp_auto.cfg +++ b/mintpy/defaults/smallbaselineApp_auto.cfg @@ -125,13 +125,21 @@ mintpy.reference.date = reference_date.txt ########## velocity -mintpy.velocity.excludeDate = exclude_date.txt -mintpy.velocity.startDate = no -mintpy.velocity.endDate = no - -## bootstrap -mintpy.velocity.bootstrap = no -mintpy.velocity.bootstrapCount = 400 +mintpy.timeFunc.startDate = no +mintpy.timeFunc.endDate = no +mintpy.timeFunc.excludeDate = exclude_date.txt + +## time functions +mintpy.timeFunc.polynomial = 1 +mintpy.timeFunc.periodic = no +mintpy.timeFunc.step = no +mintpy.timeFunc.exp = no +mintpy.timeFunc.log = no + +## uncertainty quantification +mintpy.timeFunc.uncertaintyQuantification = residue +mintpy.timeFunc.timeSeriesCovFile = no +mintpy.timeFunc.bootstrapCount = 400 ########## geocode diff --git a/mintpy/dem_error.py b/mintpy/dem_error.py index d9a0ee5b0..403aef951 100755 --- a/mintpy/dem_error.py +++ b/mintpy/dem_error.py @@ -176,7 +176,7 @@ def read_template2inps(template_file, inps=None): inps = cmd_line_parse() iDict = vars(inps) print('read options from template file: '+os.path.basename(template_file)) - template = readfile.read_template(template_file) + template = readfile.read_template(template_file, skip_chars=['[', ']']) template = ut.check_template_auto_value(template) # Read template option @@ -189,8 +189,7 @@ def read_template2inps(template_file, inps=None): if key in ['polyOrder']: iDict[key] = int(value) elif key in ['excludeDate','stepFuncDate']: - value = value.replace('[','').replace(']','').replace(',', ' ') - iDict[key] = ptime.yyyymmdd(value.split()) + iDict[key] = ptime.yyyymmdd(value.split(',')) # computing configurations dask_key_prefix = 'mintpy.compute.' diff --git a/mintpy/geocode.py b/mintpy/geocode.py index 0812bb2cd..a2a842fd3 100755 --- a/mintpy/geocode.py +++ b/mintpy/geocode.py @@ -177,7 +177,7 @@ def read_template2inps(template_file, inps): if not inps: inps = cmd_line_parse() inps_dict = vars(inps) - template = readfile.read_template(template_file) + template = readfile.read_template(template_file, skip_chars=['[', ']']) template = ut.check_template_auto_value(template) prefix = 'mintpy.geocode.' @@ -186,8 +186,7 @@ def read_template2inps(template_file, inps): value = template[prefix + key] if value: if key in ['SNWE', 'laloStep']: - value = value.replace('[','').replace(']','').replace(',',' ') - inps_dict[key] = [float(i) for i in value.split()] + inps_dict[key] = [float(i) for i in value.split(',')] elif key in ['interpMethod']: inps_dict[key] = value elif key == 'fillValue': diff --git a/mintpy/ifgram_reconstruction.py b/mintpy/legacy/ifgram_reconstruction.py similarity index 100% rename from mintpy/ifgram_reconstruction.py rename to mintpy/legacy/ifgram_reconstruction.py diff --git a/mintpy/tropo_pyaps.py b/mintpy/legacy/tropo_pyaps.py similarity index 100% rename from mintpy/tropo_pyaps.py rename to mintpy/legacy/tropo_pyaps.py diff --git a/mintpy/load_data.py b/mintpy/load_data.py index 735a10d09..128a838ba 100755 --- a/mintpy/load_data.py +++ b/mintpy/load_data.py @@ -737,7 +737,7 @@ def prepare_metadata(iDict): elif processor == 'isce': from mintpy import prep_isce - from mintpy.utils.isce_utils import get_processor + from mintpy.utils import s1_utils, isce_utils # --meta-file meta_files = sorted(glob.glob(iDict['mintpy.load.metaFile'])) @@ -756,11 +756,9 @@ def prepare_metadata(iDict): 'mintpy.load.rgOffFile', 'mintpy.load.azOffFile'] obs_keys = [i for i in obs_keys if i in iDict['dset_name2template_key'].values()] obs_paths = [iDict[key] for key in obs_keys if iDict[key].lower() != 'auto'] + stack_processor = isce_utils.get_processor(meta_file) if os.path.isfile(meta_file) else 'topsStack' if len(obs_paths) > 0: - - # ifgramStack - processor = get_processor(meta_file) if os.path.isfile(meta_file) else 'topsStack' - if processor == 'alosStack': + if stack_processor == 'alosStack': obs_dir = os.path.dirname(obs_paths[0]) else: obs_dir = os.path.dirname(os.path.dirname(obs_paths[0])) @@ -798,6 +796,15 @@ def prepare_metadata(iDict): except: warnings.warn('prep_isce.py failed. Assuming its result exists and continue...') + # [optional] for topsStack: SAFE_files.txt --> S1A/B_date.txt + if stack_processor == 'topsStack': + safe_list_file = os.path.join(os.path.dirname(os.path.dirname(meta_file)), 'SAFE_files.txt') + if os.path.isfile(safe_list_file): + mintpy_dir = os.path.dirname(os.path.dirname(iDict['outfile'][0])) + s1_utils.get_s1ab_date_list_file(mintpy_dir=mintpy_dir, + safe_list_file=safe_list_file, + print_msg=True) + elif processor == 'aria': from mintpy import prep_aria diff --git a/mintpy/modify_network.py b/mintpy/modify_network.py index 4b862ed12..19fc1143f 100755 --- a/mintpy/modify_network.py +++ b/mintpy/modify_network.py @@ -169,7 +169,7 @@ def read_template2inps(template_file, inps=None): inps = cmd_line_parse() inpsDict = vars(inps) print('read options from template file: '+os.path.basename(template_file)) - template = readfile.read_template(inps.template_file) + template = readfile.read_template(inps.template_file, skip_chars=['[', ']']) template = ut.check_template_auto_value(template) # Update inps if key existed in template file @@ -187,12 +187,12 @@ def read_template2inps(template_file, inps=None): elif key in ['maskFile', 'referenceFile']: inpsDict[key] = value elif key == 'aoiYX': - tmp = [i.replace('[','').replace(']','').strip() for i in value.split(',')] + tmp = [i.strip() for i in value.split(',')] sub_y = sorted([int(i.strip()) for i in tmp[0].split(':')]) sub_x = sorted([int(i.strip()) for i in tmp[1].split(':')]) inps.aoi_pix_box = (sub_x[0], sub_y[0], sub_x[1], sub_y[1]) elif key == 'aoiLALO': - tmp = [i.replace('[','').replace(']','').strip() for i in value.split(',')] + tmp = [i.strip() for i in value.split(',')] sub_lat = sorted([float(i.strip()) for i in tmp[0].split(':')]) sub_lon = sorted([float(i.strip()) for i in tmp[1].split(':')]) inps.aoi_geo_box = (sub_lon[0], sub_lat[1], sub_lon[1], sub_lat[0]) @@ -204,11 +204,9 @@ def read_template2inps(template_file, inps=None): elif key in ['startDate', 'endDate']: inpsDict[key] = ptime.yyyymmdd(value) elif key == 'excludeDate': - value = value.replace('[','').replace(']','').replace(',', ' ') - inpsDict[key] = ptime.yyyymmdd(value.split()) + inpsDict[key] = ptime.yyyymmdd(value.split(',')) elif key == 'excludeIfgIndex': - value = value.replace('[','').replace(']','').replace(',', ' ') - inpsDict[key] += value.split() + inpsDict[key] += value.split(',') inpsDict[key] = read_input_index_list(inpsDict[key], stackFile=inps.file) # Turn reset on if 1) no input options found to drop ifgram AND 2) there is template input diff --git a/mintpy/reference_point.py b/mintpy/reference_point.py index 9bc5ea382..a7803fd61 100755 --- a/mintpy/reference_point.py +++ b/mintpy/reference_point.py @@ -129,7 +129,7 @@ def read_template_file2inps(template_file, inps=None): if not inps: inps = cmd_line_parse(['']) inps_dict = vars(inps) - template = readfile.read_template(template_file) + template = readfile.read_template(template_file, skip_chars=['[', ']']) template = ut.check_template_auto_value(template) prefix = 'mintpy.reference.' @@ -147,14 +147,12 @@ def read_template_file2inps(template_file, inps=None): if key in template.keys(): value = template[key] if value: - value = value.replace('[','').replace(']','') inps.ref_y, inps.ref_x = [int(i) for i in value.split(',')] key = prefix+'lalo' if key in template.keys(): value = template[key] if value: - value = value.replace('[','').replace(']','') inps.ref_lat, inps.ref_lon = [float(i) for i in value.split(',')] return inps diff --git a/mintpy/solid_earth_tides.py b/mintpy/solid_earth_tides.py index 07be2b2c6..cf493fbc1 100755 --- a/mintpy/solid_earth_tides.py +++ b/mintpy/solid_earth_tides.py @@ -54,7 +54,7 @@ """ def create_parser(): - parser = argparse.ArgumentParser(description='Solid Earth tides (SET) correction', + parser = argparse.ArgumentParser(description='Solid Earth tides (SET) correction via PySolid', formatter_class=argparse.RawTextHelpFormatter, epilog='{}\n{}\n{}'.format(REFERENCE, TEMPLATE, EXAMPLE)) diff --git a/mintpy/timeseries2velocity.py b/mintpy/timeseries2velocity.py index 0676a19ff..c183aecc3 100755 --- a/mintpy/timeseries2velocity.py +++ b/mintpy/timeseries2velocity.py @@ -2,11 +2,8 @@ ############################################################ # Program is part of MintPy # # Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi # -# Author: Zhang Yunjun, Heresh Fattahi, 2013 # +# Author: Zhang Yunjun, Heresh Fattahi, Yuan-Kai Liu, 2013 # ############################################################ -# Add bootstrap method for std. dev. estimation, Emre Havazli, May 2020. -# Add poly, periodic and step func., Yuan-Kai Liu, Aug 2020. -# Add exp and log func., Yuan-Kai Liu, Jun 2021. import os @@ -23,12 +20,21 @@ dataType = np.float32 # key configuration parameter name -key_prefix = 'mintpy.velocity.' +key_prefix = 'mintpy.timeFunc.' configKeys = [ + # date 'startDate', 'endDate', 'excludeDate', - 'bootstrap', + # time functions + 'polynomial', + 'periodic', + 'step', + 'exp', + 'log', + # uncertainty quantification + 'uncertaintyQuantification', + 'timeSeriesCovFile', 'bootstrapCount', ] @@ -38,30 +44,27 @@ REFERENCE = """references: Fattahi, H., and F. Amelung (2015), InSAR bias and uncertainty due to the systematic and stochastic - tropospheric delay, Journal of Geophysical Research: Solid Earth, 120(12), 8758-8773, doi:10.1002/2015JB012419. - + tropospheric delay, J. Geophy. Res. Solid Earth, 120(12), 8758-8773, doi:10.1002/2015JB012419. Efron, B., and R. Tibshirani (1986), Bootstrap methods for standard errors, confidence intervals, - and other measures of statistical accuracy, Statistical science, 54-75, doi:10.1214/ss/1177013815. + and other measures of statistical accuracy, Statistical Science, 54-75, doi:10.1214/ss/1177013815. """ EXAMPLE = """example: - timeseries2velocity.py timeseries_ERA5_demErr.h5 - timeseries2velocity.py timeseries_ERA5_demErr_ramp.h5 -t KyushuT73F2980_2990AlosD.template - timeseries2velocity.py timeseries.h5 --start-date 20080201 --end-date 20100508 - timeseries2velocity.py timeseries.h5 --exclude exclude_date.txt - - timeseries2velocity.py LS-PARAMS.h5 - timeseries2velocity.py NSBAS-PARAMS.h5 - timeseries2velocity.py TS-PARAMS.h5 - - # bootstrapping for STD calculation - timeseries2velocity.py timeseries_ERA5_demErr.h5 --bootstrap + timeseries2velocity.py timeseries_ERA5_demErr.h5 + timeseries2velocity.py timeseries_ERA5_demErr_ramp.h5 -t KyushuAlosDT73.txt + timeseries2velocity.py timeseries.h5 --start-date 20080201 --end-date 20100508 + timeseries2velocity.py timeseries.h5 --ex exclude_date.txt # complex time functions - timeseries2velocity.py timeseries_ERA5_ramp_demErr.h5 --poly 3 --period 1 0.5 --step 20170910 - timeseries2velocity.py timeseries_ERA5_demErr.h5 --poly 1 --exp 20170910 90 - timeseries2velocity.py timeseries_ERA5_demErr.h5 --poly 1 --log 20170910 60.4 - timeseries2velocity.py timeseries_ERA5_demErr.h5 --poly 1 --log 20170910 60.4 200 --log 20171026 200.7 + timeseries2velocity.py timeseries_ERA5_demErr.h5 --poly 3 --period 1 0.5 --step 20170910 + timeseries2velocity.py timeseries_ERA5_demErr.h5 --poly 1 --exp 20170910 90 + timeseries2velocity.py timeseries_ERA5_demErr.h5 --poly 1 --log 20170910 60.4 + timeseries2velocity.py timeseries_ERA5_demErr.h5 --poly 1 --log 20170910 60.4 200 --log 20171026 200.7 + + # uncertainty quantification of the estimated time functions + timeseries2velocity.py timeseries_ERA5_demErr.h5 --uq residue + timeseries2velocity.py timeseries_ERA5_demErr.h5 --uq covariance --ts-cov timeseriesCov.h5 + timeseries2velocity.py timeseries_ERA5_demErr.h5 --uq bootstrap """ DROP_DATE_TXT = """exclude_date.txt: @@ -77,46 +80,41 @@ def create_parser(): epilog=TEMPLATE+'\n'+REFERENCE+'\n'+EXAMPLE) # inputs - parser.add_argument('timeseries_file', - help='Time series file for velocity inversion.') + parser.add_argument('timeseries_file', help='Time series file for time function estimation.') parser.add_argument('--template', '-t', dest='template_file', help='template file with options') - parser.add_argument('--ts-cov-file', dest='ts_cov_file', - help='Time-series (co)variance file for velocity STD calculation') # outputs parser.add_argument('-o', '--output', dest='outfile', help='output file name') parser.add_argument('--update', dest='update_mode', action='store_true', help='Enable update mode, and skip estimation if:\n'+ - '1) output velocity file already exists, readable '+ + '1) output file already exists, readable '+ 'and newer than input file\n' + '2) all configuration parameters are the same.') # reference in time and space - # for input file without reference info, e.g. ERA5.h5 - parser.add_argument('--ref-lalo', dest='ref_lalo', metavar=('LAT', 'LON'), type=float, nargs=2, - help='Change reference point LAT LON for estimation.') - parser.add_argument('--ref-yx', dest='ref_yx', metavar=('Y', 'X'), type=int, nargs=2, - help='Change reference point Y X for estimation.') - parser.add_argument('--ref-date', dest='ref_date', metavar='DATE', - help='Change reference date for estimation.') + # useful for input file without reference info, e.g. ERA5.h5 + parser = arg_group.add_reference_argument(parser, plot=False) # dates of interest - date = parser.add_argument_group('dates of interest') - date.add_argument('--start-date','-s', dest='startDate', - help='start date for velocity estimation') - date.add_argument('--end-date','-e', dest='endDate', - help='end date for velocity estimation') - date.add_argument('--exclude', '--ex', dest='excludeDate', nargs='+', default=[], - help='date(s) not included in velocity estimation, i.e.:\n' + + date = parser.add_argument_group('Dates of interest') + date.add_argument('-s','--start-date', dest='startDate', + help='start date for time function estimation') + date.add_argument('-e','--end-date', dest='endDate', + help='end date for time function estimation') + date.add_argument('--ex','--ex-date', dest='excludeDate', nargs='+', default=[], + help='date(s) not included in time function estimation, i.e.:\n' + '--exclude 20040502 20060708 20090103\n' + '--exclude exclude_date.txt\n'+DROP_DATE_TXT) - # bootstrap - bootstrap = parser.add_argument_group('bootstrapping', 'estimating the mean / STD of the velocity estimator') - bootstrap.add_argument('--bootstrap', '--bootstrapping', dest='bootstrap', action='store_true', - help='Enable bootstrapping to estimate the mean and STD of the velocity estimator.') - bootstrap.add_argument('--bc', '--bootstrap-count', dest='bootstrapCount', type=int, default=400, - help='number of iterations for bootstrapping (default: %(default)s).') + # Uncertainty quantification + uq = parser.add_argument_group('Uncertainty quantification (UQ)', 'Estimating the time function parameters STD') + uq.add_argument('--uq', '--uncertainty', dest='uncertaintyQuantification', metavar='VAL', + default='residue', choices={'residue', 'covariance', 'bootstrap'}, + help='Uncertainty quantification method (default: %(default)s).') + uq.add_argument('--ts-cov','--ts-cov-file', dest='timeSeriesCovFile', + help='4D time-series (co)variance file for time function STD calculation') + uq.add_argument('--bc', '--bootstrap-count', dest='bootstrapCount', type=int, default=400, + help='number of iterations for bootstrapping (default: %(default)s).') # time functions parser = arg_group.add_timefunc_argument(parser) @@ -138,26 +136,31 @@ def cmd_line_parse(iargs=None): """Command line parser.""" parser = create_parser() inps = parser.parse_args(args=iargs) - inps.key = readfile.read_attribute(inps.timeseries_file)['FILE_TYPE'] - if inps.key not in ['timeseries', 'giantTimeseries', 'HDFEOS']: - raise Exception('input file is {}, NOT timeseries!'.format(inps.key)) - - # check bootstrap count number - if inps.bootstrap and inps.bootstrapCount <= 1: - inps.bootstrap = False - print('bootstrap-count should be larger than 1, otherwise it does not make sense') - print('turn OFF bootstrapping and continue without it.') - - if inps.bootstrap: - print('bootstrapping is turned ON.') - if (inps.polynomial != 1 or inps.periodic or inps.step or inps.exp or inps.log): - raise ValueError('bootstrapping currently support polynomial ONLY and ONLY with the order of 1!') + + # check if input file is time series + inps.file_type = readfile.read_attribute(inps.timeseries_file)['FILE_TYPE'] + if inps.file_type not in ['timeseries', 'giantTimeseries', 'HDFEOS']: + raise Exception('input file is {}, NOT timeseries!'.format(inps.file_type)) if inps.template_file: inps = read_template2inps(inps.template_file, inps) - # Initialize the dictionaries of exp and log funcs - inps = init_exp_log_dicts(inps) + # --uq + if inps.uncertaintyQuantification == 'bootstrap': + # check 1 - bootstrap count number + if inps.bootstrapCount <= 1: + inps.uncertaintyQuantification = 'residue' + print('WARNING: bootstrapCount should be larger than 1!') + print('Change the uncertainty quantification method from bootstrap to residue, and continue.') + # check 2 - advanced time func + if (inps.polynomial != 1 or inps.periodic or inps.step or inps.exp or inps.log): + raise ValueError('bootstrapping support polynomial with the order of 1 ONLY!') + + elif inps.uncertaintyQuantification == 'covariance': + if not inps.timeSeriesCovFile or not os.path.isfile(inps.timeSeriesCovFile): + inps.uncertaintyQuantification = 'residue' + print('WARNING: NO time series covariance file found!') + print('Change the uncertainty quantification method from covariance to residue, and continue.') # --ref-lalo option if inps.ref_lalo: @@ -169,50 +172,16 @@ def cmd_line_parse(iargs=None): print('input reference point in (lat, lon): ({}, {})'.format(inps.ref_lalo[0], inps.ref_lalo[1])) print('corresponding point in (y, x): ({}, {})'.format(inps.ref_yx[0], inps.ref_yx[1])) - return inps - - -def init_exp_log_dicts(inps): - """Initialize the dictionaries of exp and log funcs - By trarnslating inps.exp/log into inps.expDict/logDict. - """ - # --exp option: convert cmd inputs into dict format - inps.expDict = dict() - if inps.exp: - for exp_list in inps.exp: - onset_time, char_times = exp_list[0], exp_list[1:] - if len(onset_time) == 8: - if len(char_times) > 0: - inps.expDict[onset_time] = np.array(char_times).astype(float).tolist() - - else: - msg = 'NO characteristic time found: {}\n'.format(char_times) - msg += 'one or more characteristic time(s) are required for each onset date' - msg += ' for the exp function, e.g.:\n' - msg += '--exp 20181026 60 OR\n' - msg += '--exp 20161231 80.5 200 # append as many char_times as you like!' - raise ValueError(msg) - else: - raise ValueError('input onset time is NOT in YYYYMMDD format: {}'.format(onset_time)) - - # --log option: convert cmd inputs into dict format - inps.logDict = dict() - if inps.log: - for log_list in inps.log: - onset_time, char_times = log_list[0], log_list[1:] - if len(onset_time) == 8: - if len(char_times) > 0: - inps.logDict[onset_time] = np.array(char_times).astype(float).tolist() - - else: - msg = 'NO characteristic time found: {}\n'.format(char_times) - msg += 'one or more characteristic time(s) are required for each onset date' - msg += ' for the log function, e.g.:\n' - msg += '--exp 20181026 60 OR\n' - msg += '--exp 20161231 80.5 200 # append as many char_times as you like!' - raise ValueError(msg) - else: - raise ValueError('input onset time is NOT in YYYYMMDD format: {}'.format(onset_time)) + # --output + if not inps.outfile: + # get suffix + ts_file_base = os.path.splitext(os.path.basename(inps.timeseries_file))[0] + if ts_file_base in ['timeseriesRg', 'timeseriesAz']: + suffix = ts_file_base.split('timeseries')[-1] + else: + suffix = '' + # compose default output filename + inps.outfile = f'velocity{suffix}.h5' return inps @@ -223,23 +192,35 @@ def read_template2inps(template_file, inps=None): inps = cmd_line_parse() iDict = vars(inps) print('read options from template file: '+os.path.basename(template_file)) - template = readfile.read_template(inps.template_file) + template = readfile.read_template(inps.template_file, skip_chars=['[', ']']) template = ut.check_template_auto_value(template) # Read template option - prefix = 'mintpy.velocity.' + prefix = 'mintpy.timeFunc.' keyList = [i for i in list(iDict.keys()) if prefix+i in template.keys()] for key in keyList: value = template[prefix+key] - if key in ['bootstrap']: - iDict[key] = value if value: if key in ['startDate', 'endDate']: iDict[key] = ptime.yyyymmdd(value) + elif key in ['excludeDate']: - value = value.replace('[','').replace(']','').replace(',', ' ') - iDict[key] = ptime.yyyymmdd(value.split()) - elif key in ['bootstrapCount']: + iDict[key] = ptime.yyyymmdd(value.split(',')) + + elif key in ['periodic']: + iDict[key] = [float(x) for x in value.replace(';',',').split(',')] + + elif key in ['step']: + iDict[key] = value.replace(';',',').split(',') + + elif key in ['exp', 'log']: + value = value.replace('/',';').replace('|',';') + iDict[key] = [x.split(',') for x in value.split(';')] + + elif key in ['uncertaintyQuantification', 'timeSeriesCovFile']: + iDict[key] = value + + elif key in ['polynomial', 'bootstrapCount']: iDict[key] = int(value) key = 'mintpy.compute.maxMemory' @@ -282,181 +263,93 @@ def run_or_skip(inps): ############################################################################ -def read_exclude_date(inps, dateListAll): - # Merge ex_date/startDate/endDate into ex_date - yy_list_all = ptime.yyyymmdd2years(dateListAll) - exDateList = [] - - # ex_date - exDateList += ptime.read_date_list(list(inps.excludeDate), date_list_all=dateListAll) - if exDateList: - print('exclude date:'+str(exDateList)) - - # startDate - if inps.startDate: - print('start date: '+inps.startDate) - yy_min = ptime.yyyymmdd2years(ptime.yyyymmdd(inps.startDate)) - for i in range(len(dateListAll)): - date = dateListAll[i] - if yy_list_all[i] < yy_min and date not in exDateList: - print(' remove date: '+date) - exDateList.append(date) - - # endDate - if inps.endDate: - print('end date: '+inps.endDate) - yy_max = ptime.yyyymmdd2years(ptime.yyyymmdd(inps.endDate)) - for i in range(len(dateListAll)): - date = dateListAll[i] - if yy_list_all[i] > yy_max and date not in exDateList: - print(' remove date: '+date) - exDateList.append(date) - exDateList = sorted(list(set(exDateList))) - return exDateList - - def read_date_info(inps): """Read dates used in the estimation and its related info. + Parameters: inps - Namespace - Returns: inps - Namespace + Returns: inps - Namespace, adding the following new fields: + date_list - list of str, dates used for estimation + dropDate - 1D np.ndarray in bool in size of all available dates """ - if inps.key == 'timeseries': - tsobj = timeseries(inps.timeseries_file) - elif inps.key == 'giantTimeseries': - tsobj = giantTimeseries(inps.timeseries_file) - elif inps.key == 'HDFEOS': - tsobj = HDFEOS(inps.timeseries_file) - tsobj.open() - inps.excludeDate = read_exclude_date(inps, tsobj.dateList) - - # exclude dates without obs data [for offset time-series only for now] + if inps.file_type == 'timeseries': + ts_obj = timeseries(inps.timeseries_file) + elif inps.file_type == 'giantTimeseries': + ts_obj = giantTimeseries(inps.timeseries_file) + elif inps.file_type == 'HDFEOS': + ts_obj = HDFEOS(inps.timeseries_file) + else: + raise ValueError('Un-recognized time series ') + ts_obj.open() + + # exclude dates - user inputs + ex_date_list = ptime.get_exclude_date_list( + date_list=ts_obj.dateList, + start_date=inps.startDate, + end_date=inps.endDate, + exclude_date=inps.excludeDate) + + # exclude dates - no obs data [for offset time-series only for now] if os.path.basename(inps.timeseries_file).startswith('timeseriesRg'): - date_list = timeseries(inps.timeseries_file).get_date_list() data, atr = readfile.read(inps.timeseries_file) flag = np.nansum(data, axis=(1,2)) == 0 - flag[date_list.index(atr['REF_DATE'])] = 0 + flag[ts_obj.dateList.index(atr['REF_DATE'])] = 0 if np.sum(flag) > 0: print('number of empty dates to exclude: {}'.format(np.sum(flag))) - inps.excludeDate += np.array(date_list)[flag].tolist() - inps.excludeDate = sorted(list(set(inps.excludeDate))) - - # Date used for estimation inps.dateList - inps.dateList = [i for i in tsobj.dateList if i not in inps.excludeDate] - inps.numDate = len(inps.dateList) - inps.startDate = inps.dateList[0] - inps.endDate = inps.dateList[-1] + ex_date_list += np.array(ts_obj.dateList)[flag].tolist() + ex_date_list = sorted(list(set(ex_date_list))) + + # dates used for estimation - inps.date_list + inps.date_list = [i for i in ts_obj.dateList if i not in ex_date_list] + + # flag array for ts data reading + inps.dropDate = np.array([i not in ex_date_list for i in ts_obj.dateList], dtype=np.bool_) + + # print out msg print('-'*50) - print('dates from input file: {}\n{}'.format(tsobj.numDate, tsobj.dateList)) + print('dates from input file: {}\n{}'.format(ts_obj.numDate, ts_obj.dateList)) print('-'*50) - if len(inps.dateList) == len(tsobj.dateList): - print('using all dates to calculate the velocity') + if len(inps.date_list) == len(ts_obj.dateList): + print('using all dates to calculate the time function') else: - print('dates used to estimate the velocity: {}\n{}'.format(inps.numDate, inps.dateList)) + print(f'dates used to estimate the time function: {len(inps.date_list)}\n{inps.date_list}') print('-'*50) - # flag array for ts data reading - inps.dropDate = np.array([i not in inps.excludeDate for i in tsobj.dateList], dtype=np.bool_) - - # output file name - if not inps.outfile: - fbase = os.path.splitext(os.path.basename(inps.timeseries_file))[0] - outname = 'velocity' - if inps.key == 'giantTimeseries': - prefix = os.path.basename(inps.timeseries_file).split('PARAMS')[0] - outname = prefix + outname - elif fbase in ['timeseriesRg', 'timeseriesAz']: - suffix = fbase.split('timeseries')[-1] - outname = outname + suffix - outname += '.h5' - inps.outfile = outname - return inps -def read_inps2model(inps, date_list=None, print_msg=True): - """get model info from inps""" - # check model date limits - if not date_list: - date_list = inps.dateList - dmin, dmax = date_list[0], date_list[-1] - ymin = ptime.yyyymmdd2years(dmin) - ymax = ptime.yyyymmdd2years(dmax) - - if inps.step: - for d_step in inps.step: - y_step = ptime.yyyymmdd2years(d_step) - if not (ymin < y_step < ymax): - raise ValueError(f'input step date "{d_step}" exceed date list min/max: {dmin}, {dmax}') - - if inps.expDict: - for d_onset in inps.expDict.keys(): - y_onset = ptime.yyyymmdd2years(d_onset) - if y_onset >= ymax: - raise ValueError(f'input exp onset date "{d_onset}" >= the last date: {dmax}') - - if inps.logDict: - for d_onset in inps.logDict.keys(): - y_onset = ptime.yyyymmdd2years(d_onset) - if y_onset >= ymax: - raise ValueError(f'input log onset date "{d_onset}" >= the last date: {dmax}') - - model = dict() - model['polynomial'] = inps.polynomial - model['periodic'] = inps.periodic - model['step'] = inps.step - model['exp'] = inps.expDict - model['log'] = inps.logDict - - # msg - if print_msg: - print('estimate deformation model with the following assumed time functions:') - for key, value in model.items(): - print(' {:<10} : {}'.format(key, value)) - - if 'polynomial' not in model.keys(): - raise ValueError('linear/polynomial model is NOT included! Are you sure?!') - - # number of parameters - num_param = ( - model['polynomial'] + 1 - + len(model['periodic']) * 2 - + len(model['step']) - + sum([len(val) for key, val in model['exp'].items()]) - + sum([len(val) for key, val in model['log'].items()]) - ) - - return model, num_param - - -############################################################################ def run_timeseries2time_func(inps): - # basic info + # basic file info atr = readfile.read_attribute(inps.timeseries_file) length, width = int(atr['LENGTH']), int(atr['WIDTH']) - num_date = inps.numDate - dates = np.array(inps.dateList) + + # read date info + inps = read_date_info(inps) + num_date = len(inps.date_list) + dates = np.array(inps.date_list) seconds = atr.get('CENTER_LINE_UTC', 0) # use the 1st date as reference if not found, e.g. timeseriesResidual.h5 file if "REF_DATE" not in atr.keys() and not inps.ref_date: - inps.ref_date = inps.dateList[0] + inps.ref_date = inps.date_list[0] print('WARNING: No REF_DATE found in time-series file or input in command line.') - print(' Set "--ref-date {}" and continue.'.format(inps.dateList[0])) + print(' Set "--ref-date {}" and continue.'.format(inps.date_list[0])) - # get deformation model from parsers - model, num_param = read_inps2model(inps) + # get deformation model from inputs + model = time_func.inps2model(inps, date_list=inps.date_list) + num_param = time_func.get_num_param(model) ## output preparation # time_func_param: attributes + date0, date1 = inps.date_list[0], inps.date_list[-1] atrV = dict(atr) atrV['FILE_TYPE'] = 'velocity' atrV['UNIT'] = 'm/year' - atrV['START_DATE'] = inps.dateList[0] - atrV['END_DATE'] = inps.dateList[-1] - atrV['DATE12'] = '{}_{}'.format(inps.dateList[0], inps.dateList[-1]) + atrV['START_DATE'] = date0 + atrV['END_DATE'] = date1 + atrV['DATE12'] = f'{date0}_{date1}' if inps.ref_yx: atrV['REF_Y'] = inps.ref_yx[0] atrV['REF_X'] = inps.ref_yx[1] @@ -483,19 +376,19 @@ def run_timeseries2time_func(inps): if key in atrR.keys(): atrR.pop(key) # prepare ds_name_dict manually, instead of using ref_file, to support --ex option - date_len = len(inps.dateList[0]) + date_digit = len(inps.date_list[0]) ds_name_dict = { - "date" : [np.dtype(f'S{date_len}'), (num_date,), np.array(inps.dateList, dtype=np.string_)], - "timeseries" : [np.float32, (num_date, length, width), None] + "date" : [np.dtype(f'S{date_digit}'), (num_date,), np.array(inps.date_list, np.string_)], + "timeseries" : [np.float32, (num_date, length, width), None] } writefile.layout_hdf5(inps.res_file, ds_name_dict=ds_name_dict, metadata=atrR) - + ## estimation # calc number of box based on memory limit memoryAll = (num_date + num_param * 2 + 2) * length * width * 4 - if inps.bootstrap: + if inps.uncertaintyQuantification == 'bootstrap': memoryAll += inps.bootstrapCount * num_param * length * width * 4 num_box = int(np.ceil(memoryAll * 3 / (inps.maxMemory * 1024**3))) box_list = cluster.split_box2sub_boxes(box=(0, 0, width, length), @@ -525,7 +418,7 @@ def run_timeseries2time_func(inps): # for file w/o reference info. e.g. ERA5.h5 if inps.ref_date: print('referecing to date: {}'.format(inps.ref_date)) - ref_ind = inps.dateList.index(inps.ref_date) + ref_ind = inps.date_list.index(inps.ref_date) ts_data -= np.tile(ts_data[ref_ind, :, :], (ts_data.shape[0], 1, 1)) if inps.ref_yx: @@ -535,26 +428,26 @@ def run_timeseries2time_func(inps): ts_data -= np.tile(ref_val.reshape(ts_data.shape[0], 1, 1), (1, ts_data.shape[1], ts_data.shape[2])) - ts_data = ts_data[inps.dropDate, :, :].reshape(inps.numDate, -1) + ts_data = ts_data[inps.dropDate, :, :].reshape(num_date, -1) if atrV['UNIT'] == 'mm': ts_data *= 1./1000. ts_cov = None - if inps.ts_cov_file: - print(f'reading time-series covariance matrix from file {inps.ts_cov_file} ...') - ts_cov = readfile.read(inps.ts_cov_file, box=box)[0] + if inps.uncertaintyQuantification == 'covariance': + print(f'reading time-series covariance matrix from file {inps.timeSeriesCovFile} ...') + ts_cov = readfile.read(inps.timeSeriesCovFile, box=box)[0] if len(ts_cov.shape) == 4: # full covariance matrix in 4D --> 3D - if inps.numDate < ts_cov.shape[0]: + if num_date < ts_cov.shape[0]: ts_cov = ts_cov[inps.dropDate, :, :, :] ts_cov = ts_cov[:, inps.dropDate, :, :] - ts_cov = ts_cov.reshape(inps.numDate, inps.numDate, -1) + ts_cov = ts_cov.reshape(num_date, num_date, -1) elif len(ts_cov.shape) == 3: # diaginal variance matrix in 3D --> 2D - if inps.numDate < ts_cov.shape[0]: + if num_date < ts_cov.shape[0]: ts_cov = ts_cov[inps.dropDate, :, :] - ts_cov = ts_cov.reshape(inps.numDate, -1) + ts_cov = ts_cov.reshape(num_date, -1) ## set zero value to a fixed small value to avoid divide by zero #epsilon = 1e-5 @@ -586,7 +479,7 @@ def run_timeseries2time_func(inps): ### estimation / solve Gm = d print('estimating time functions via linalg.lstsq ...') - if inps.bootstrap: + if inps.uncertaintyQuantification == 'bootstrap': ## option 1 - least squares with bootstrapping # Bootstrapping is a resampling method which can be used to estimate properties # of an estimator. The method relies on independently sampling the data set with @@ -600,7 +493,7 @@ def run_timeseries2time_func(inps): prog_bar = ptime.progressBar(maxValue=inps.bootstrapCount) for i in range(inps.bootstrapCount): # bootstrap resampling - boot_ind = rng.choice(inps.numDate, size=inps.numDate, replace=True) + boot_ind = rng.choice(num_date, size=num_date, replace=True) boot_ind.sort() # estimation @@ -620,14 +513,14 @@ def run_timeseries2time_func(inps): del m_boot # get design matrix to calculate the residual time series - G = time_func.get_design_matrix4time_func(inps.dateList, model=model, ref_date=inps.ref_date, seconds=seconds) + G = time_func.get_design_matrix4time_func(inps.date_list, model=model, ref_date=inps.ref_date, seconds=seconds) else: ## option 2 - least squares with uncertainty propagation G, m[:, mask], e2 = time_func.estimate_time_func( model=model, - date_list=inps.dateList, + date_list=inps.date_list, dis_ts=ts_data, seconds=seconds) #del ts_data @@ -655,7 +548,7 @@ def run_timeseries2time_func(inps): # = (e_hat.T * e_hat) / (N - P) # which is the equation (10) from Fattahi and Amelung (2015, JGR) - if ts_cov is not None: + if inps.uncertaintyQuantification == 'covariance': # option 2.1 - linear propagation from time-series (co)variance matrix # TO DO: save the full covariance matrix of the time function parameters # only the STD is saved right now @@ -682,7 +575,7 @@ def run_timeseries2time_func(inps): prog_bar.update(i+1, every=200, suffix='{}/{} pixels'.format(i+1, num_pixel2inv)) prog_bar.close() - else: + elif inps.uncertaintyQuantification == 'residue': # option 2.3 - assume obs errors following normal dist. in time print('estimating time function STD from time-series fitting residual ...') G_inv = linalg.inv(np.dot(G.T, G)) @@ -794,7 +687,7 @@ def model2hdf5_dataset(model, m=None, m_std=None, mask=None, ds_shape=None): elif period == 0.5: dsNames = [f'semiAnnual{x}' for x in dsNameSuffixes] else: - dsNames = [f'periodY{period}{x}' for x in dsNameSuffixes] + dsNames = [f'period{period}Y{x}' for x in dsNameSuffixes] # calculate the amplitude and phase of the periodic signal # following equation (9-10) in Minchew et al. (2017, JGR) @@ -840,7 +733,7 @@ def model2hdf5_dataset(model, m=None, m_std=None, mask=None, ds_shape=None): for exp_onset in model['exp'].keys(): for exp_tau in model['exp'][exp_onset]: # dataset name - dsName = 'exp{}Tau{}'.format(exp_onset, exp_tau) + dsName = 'exp{}Tau{}D'.format(exp_onset, exp_tau) # assign ds_dict if m is not None: @@ -862,7 +755,7 @@ def model2hdf5_dataset(model, m=None, m_std=None, mask=None, ds_shape=None): for log_onset in model['log'].keys(): for log_tau in model['log'][log_onset]: # dataset name - dsName = 'log{}Tau{}'.format(log_onset, log_tau) + dsName = 'log{}Tau{}D'.format(log_onset, log_tau) # assign ds_dict if m is not None: @@ -886,19 +779,17 @@ def main(iargs=None): inps = cmd_line_parse(iargs) start_time = time.time() - inps = read_date_info(inps) - # --update option if inps.update_mode and run_or_skip(inps) == 'skip': return inps.outfile + # run run_timeseries2time_func(inps) # time info m, s = divmod(time.time()-start_time, 60) print('time used: {:02.0f} mins {:02.1f} secs.'.format(m, s)) - - return inps.outfile + return ############################################################################ diff --git a/mintpy/tropo_pyaps3.py b/mintpy/tropo_pyaps3.py index 22ee2f629..53d93c232 100755 --- a/mintpy/tropo_pyaps3.py +++ b/mintpy/tropo_pyaps3.py @@ -95,8 +95,7 @@ def create_parser(): - parser = argparse.ArgumentParser(description='Tropospheric correction using weather models\n' + - ' PyAPS is used to download and calculate the delay for each acquisition.', + parser = argparse.ArgumentParser(description='Tropospheric correction using weather models via PyAPS', formatter_class=argparse.RawTextHelpFormatter, epilog=REFERENCE+'\n'+DATA_INFO+'\n'+EXAMPLE) @@ -556,7 +555,7 @@ def check_pyaps_account_config(tropo_model): for opt in SECTION_OPTS[section]: val = cfg.get(section, opt) if not val or val in default_values: - raise ValueError('PYAPS: No account info found for {} in {} section in file: {}'.format(tropo_model, section, cfg_file)) + raise ValueError(f'PYAPS: No account info found for {tropo_model} in {section} section in file: {cfg_file}') return diff --git a/mintpy/utils/arg_group.py b/mintpy/utils/arg_group.py index 68a194965..f8801f84d 100644 --- a/mintpy/utils/arg_group.py +++ b/mintpy/utils/arg_group.py @@ -36,11 +36,14 @@ def add_data_disp_argument(parser): data.add_argument('--noflip', dest='auto_flip', action='store_false', help='turn off auto flip for radar coordinate file') - data.add_argument('--nmli','--num-multilook','--multilook-num', dest='multilook_num', type=int, default=1, metavar='NUM', - help='multilook data in X and Y direction with a factor for display (default: %(default)s).') + data.add_argument('--nmli','--num-multilook','--multilook-num', dest='multilook_num', + type=int, default=1, metavar='NUM', + help='multilook data in X and Y direction with a factor for display ' + '(default: %(default)s).') data.add_argument('--nomultilook', '--no-multilook', dest='multilook', action='store_false', help='do not multilook, for high quality display. \n' - 'If multilook is True and multilook_num=1, multilook_num will be estimated automatically.\n' + 'If multilook is True and multilook_num=1, ' + 'multilook_num will be estimated automatically.\n' 'Useful when displaying big datasets.') data.add_argument('--alpha', dest='transparency', type=float, help='Data transparency. \n' @@ -65,13 +68,16 @@ def add_dem_argument(parser): 'Set to 0.0 for no smoothing; (default: %(default)s).') dem.add_argument('--contour-step', dest='dem_contour_step', metavar='NUM', type=float, default=200.0, help='Background topography contour step in meters (default: %(default)s).') - dem.add_argument('--contour-linewidth', dest='dem_contour_linewidth', metavar='NUM', type=float, default=0.5, + dem.add_argument('--contour-lw','--contour-linewidth', dest='dem_contour_linewidth', + metavar='NUM', type=float, default=0.5, help='Background topography contour linewidth (default: %(default)s).') dem.add_argument('--shade-az', dest='shade_azdeg', type=float, default=315., metavar='DEG', - help='The azimuth (0-360, degrees clockwise from North) of the light source (default: %(default)s).') + help='The azimuth (0-360, degrees clockwise from North) of the light source ' + '(default: %(default)s).') dem.add_argument('--shade-alt', dest='shade_altdeg', type=float, default=45., metavar='DEG', - help='The altitude (0-90, degrees up from horizontal) of the light source (default: %(default)s).') + help='The altitude (0-90, degrees up from horizontal) of the light source ' + '(default: %(default)s).') dem.add_argument('--shade-min', dest='shade_min', type=float, default=-4000., metavar='MIN', help='The min height in m of colormap of shaded relief topography (default: %(default)s).') @@ -103,7 +109,7 @@ def add_figure_argument(parser): # colormap fig.add_argument('-c', '--colormap', dest='colormap', - help='colormap used for display, i.e. jet, cmy, RdBu, hsv, jet_r, temperature, viridis, etc.\n' + help='colormap used for display, i.e. jet, cmy, RdBu, hsv, jet_r, viridis, etc.\n' 'More at https://mintpy.readthedocs.io/en/latest/api/colormaps/') fig.add_argument('--cm-lut','--cmap-lut', dest='cmap_lut', type=int, default=256, metavar='NUM', help='number of increment of colormap lookup table (default: %(default)s).') @@ -158,7 +164,8 @@ def add_figure_argument(parser): help='disable automatic tight layout for multiple subplots') fig.add_argument('--coord', dest='fig_coord', choices=['radar', 'geo'], default='geo', - help='Display in radar/geo coordination system (for geocoded file only; default: %(default)s).') + help='Display in radar/geo coordination system ' + '(for geocoded file only; default: %(default)s).') fig.add_argument('--animation', action='store_true', help='enable animation mode') @@ -176,12 +183,15 @@ def add_gps_argument(parser): help='Show GPS site name') gps.add_argument('--gps-ms', dest='gps_marker_size', type=float, default=6, help='Plot GPS value as scatter in size of ms**2 (default: %(default)s).') - gps.add_argument('--gps-comp', dest='gps_component', choices={'enu2los', 'hz2los', 'up2los', 'horz', 'vert'}, + gps.add_argument('--gps-comp', dest='gps_component', + choices={'enu2los', 'hz2los', 'up2los', 'horz', 'vert'}, help='Plot GPS in color indicating deformation velocity direction') gps.add_argument('--gps-redo', dest='gps_redo', action='store_true', - help='Re-calculate GPS observations in LOS direction, instead of read from existing CSV file.') + help='Re-calculate GPS observations in LOS direction, ' + 'instead of read from existing CSV file.') gps.add_argument('--ref-gps', dest='ref_gps_site', type=str, help='Reference GPS site') - gps.add_argument('--ex-gps', dest='ex_gps_sites', type=str, nargs='*', help='Exclude GPS sites, require --gps-comp.') + gps.add_argument('--ex-gps', dest='ex_gps_sites', type=str, nargs='*', + help='Exclude GPS sites, require --gps-comp.') gps.add_argument('--gps-start-date', dest='gps_start_date', type=str, metavar='YYYYMMDD', help='start date of GPS data, default is date of the 1st SAR acquisition') @@ -189,9 +199,10 @@ def add_gps_argument(parser): help='start date of GPS data, default is date of the last SAR acquisition') gps.add_argument('--horz-az','--hz-az', dest='horz_az_angle', type=float, default=-90., help='Azimuth angle (anti-clockwise from the north) of the horizontal movement in degrees\n' - 'E.g.: -90. for east direction [default]\n' - ' 0. for north direction\n' - 'Set to the azimuth angle of the strike-slip fault to show the fault-parallel displacement.') + 'E.g.: -90. for east direction [default]\n' + ' 0. for north direction\n' + 'Set to the azimuth angle of the strike-slip fault to ' + 'show the fault-parallel displacement.') return parser @@ -257,8 +268,8 @@ def add_map_argument(parser): '--scalebar 0.2 0.8 0.8 #for upper right corner\n') mapg.add_argument('--noscalebar', '--nosbar', dest='disp_scalebar', action='store_false', help='do not display scale bar.') - mapg.add_argument('--scalebar-pad','--sbar-pad', dest='scalebar_pad', type=float, - default=0.05, help='scale bar label pad in ratio of scalebar width (default: %(default)s).') + mapg.add_argument('--scalebar-pad','--sbar-pad', dest='scalebar_pad', type=float, default=0.05, + help='scale bar label pad in ratio of scalebar width (default: %(default)s).') return parser @@ -303,26 +314,29 @@ def add_point_argument(parser): return parser -def add_reference_argument(parser): +def add_reference_argument(parser, plot=True): """Argument group parser for (spatial / temporal) referencing options""" - ref = parser.add_argument_group('Reference', 'Show / Modify reference in time and space for display') - # reference date - ref.add_argument('--ref-date', dest='ref_date', metavar='DATE', - help='Change reference date for display') - # reference pixel + goal = 'display' if plot else 'estimation' + ref = parser.add_argument_group('Reference date / point', + f'Modify reference in time / space for {goal}') + + # reference date / pixel + ref.add_argument('--ref-date', dest='ref_date', metavar='DATE', + help=f'Change reference date for {goal}') ref.add_argument('--ref-lalo', dest='ref_lalo', metavar=('LAT', 'LON'), type=float, nargs=2, - help='Change reference point LAT LON for display') + help=f'Change reference point in LAT/LON for {goal}') ref.add_argument('--ref-yx', dest='ref_yx', metavar=('Y', 'X'), type=int, nargs=2, - help='Change reference point Y X for display') - - # reference pixel style - ref.add_argument('--noreference', dest='disp_ref_pixel', - action='store_false', help='do not show reference point') - ref.add_argument('--ref-marker', dest='ref_marker', default='ks', - help='marker of reference pixel (default: %(default)s).') - ref.add_argument('--ref-size', dest='ref_marker_size', metavar='NUM', type=int, default=6, - help='marker size of reference point (default: %(default)s).') + help=f'Change reference point in Y/X for {goal}') + + # reference pixel - plotting style + if plot: + ref.add_argument('--noreference', dest='disp_ref_pixel', + action='store_false', help='do not show reference point') + ref.add_argument('--ref-marker', dest='ref_marker', default='ks', + help='marker of reference pixel (default: %(default)s).') + ref.add_argument('--ref-size', dest='ref_marker_size', metavar='NUM', type=int, default=6, + help='marker size of reference point (default: %(default)s).') return parser @@ -346,44 +360,53 @@ def add_save_argument(parser): def add_subset_argument(parser, geo=True): """Argument group parser for subset options""" sub = parser.add_argument_group('Subset', 'Display dataset in subset range') - sub.add_argument('--sub-x','--subx','--subset-x', dest='subset_x', type=int, nargs=2, metavar=('XMIN', 'XMAX'), - help='subset display in x/cross-track/range direction') - sub.add_argument('--sub-y','--suby','--subset-y', dest='subset_y', type=int, nargs=2, metavar=('YMIN', 'YMAX'), - help='subset display in y/along-track/azimuth direction') + sub.add_argument('--sub-x','--subx','--subset-x', dest='subset_x', type=int, nargs=2, + metavar=('XMIN', 'XMAX'), help='subset display in x/cross-track/range direction') + sub.add_argument('--sub-y','--suby','--subset-y', dest='subset_y', type=int, nargs=2, + metavar=('YMIN', 'YMAX'), help='subset display in y/along-track/azimuth direction') if geo: - sub.add_argument('--sub-lat','--sublat','--subset-lat', dest='subset_lat', type=float, nargs=2, metavar=('LATMIN', 'LATMAX'), - help='subset display in latitude') - sub.add_argument('--sub-lon','--sublon','--subset-lon', dest='subset_lon', type=float, nargs=2, metavar=('LONMIN', 'LONMAX'), - help='subset display in longitude') + sub.add_argument('--sub-lat','--sublat','--subset-lat', dest='subset_lat', type=float, nargs=2, + metavar=('LATMIN', 'LATMAX'), help='subset display in latitude') + sub.add_argument('--sub-lon','--sublon','--subset-lon', dest='subset_lon', type=float, nargs=2, + metavar=('LONMIN', 'LONMAX'), help='subset display in longitude') return parser def add_timefunc_argument(parser): """Argument group parser for time functions""" model = parser.add_argument_group('Deformation Model', 'A suite of time functions') + model.add_argument('--poly', '--polynomial', '--poly-order', dest='polynomial', type=int, default=1, - help='a polynomial function with the input degree (default: %(default)s). E.g.:\n' + - '--poly 1 # linear\n' + - '--poly 2 # quadratic\n' + - '--poly 3 # cubic\n') + help='a polynomial function with the input degree (default: %(default)s). E.g.:\n' + '--poly 1 # linear\n' + '--poly 2 # quadratic\n' + '--poly 3 # cubic\n') + model.add_argument('--periodic', '--period', '--peri', dest='periodic', type=float, nargs='+', default=[], - help='periodic function(s) with period in decimal years (default: %(default)s). E.g.:\n' + - '--periodic 1.0 # an annual cycle\n' + - '--periodic 1.0 0.5 # an annual cycle plus a semi-annual cycle\n') + help='periodic function(s) with period in decimal years (default: %(default)s). E.g.:\n' + '--periodic 1.0 # an annual cycle\n' + '--periodic 1.0 0.5 # an annual cycle plus a semi-annual cycle\n') + model.add_argument('--step', dest='step', type=str, nargs='+', default=[], - help='step function(s) at YYYYMMDD (default: %(default)s). E.g.:\n' + - '--step 20061014 # coseismic step at 2006-10-14T00:00\n' + - '--step 20110311 20120928T1733 # coseismic steps at 2011-03-11T00:00 and 2012-09-28T17:33\n') + help='step function(s) at YYYYMMDD (default: %(default)s). E.g.:\n' + '--step 20061014 # coseismic step at 2006-10-14T00:00\n' + '--step 20110311 20120928T1733 # coseismic steps at 2011-03-11T00:00 and 2012-09-28T17:33\n') + model.add_argument('--exp', '--exponential', dest='exp', type=str, nargs='+', action='append', default=[], - help='exponential function(s) at YYYYMMDD with characteristic time(s) tau in decimal days (default: %(default)s). E.g.:\n' + - '--exp 20181026 60 # exp onset at 2006-10-14T00:00 with tau=60 days\n' + - '--exp 20181026T1355 60 120 # exp onset at 2006-10-14T13:55 with tau=60 days overlayed by a tau=145 days\n' + - '--exp 20161231 80.5 --exp 20190125 100 # 1st exp onset at 2011-03-11 with tau=80.5 days and\n' + - ' # 2nd exp onset at 2012-09-28 with tau=100 days') + help='exponential function(s) defined by onset time(s) and characteristic time(s) tau in days (default: %(default)s). E.g.:\n' + '--exp 20181026 60 # one exp w/ onset at 2018-10-26 w/ tau=60 days\n' + '--exp 20181026T1355 60 120 # 1st exp w/ onset at 2018-10-26T13:55 w/ tau=60 days\n' + ' # 2nd exp w/ onset at 2018-10-26T13:55 w/ tau=120 days\n' + '--exp 20161231 80 --exp 20190125 100 # 1st exp w/ onset at 2016-12-31 w/ tau=80 days\n' + ' # 2nd exp w/ onset at 2019-01-25 w/ tau=100 days') + model.add_argument('--log', '--logarithmic', dest='log', type=str, nargs='+', action='append', default=[], - help='logarithmic function(s) at YYYYMMDD with characteristic time(s) tau in decimal days (default: %(default)s). E.g.:\n' + - '--log 20181016 90.4 # log onset at 2006-10-14T00:00 with tau=90.4 days\n' + - '--log 20181016T1733 90.4 240 # log onset at 2006-10-14T17:33 with tau=90.4 days overlayed by a tau=240 days\n' + - '--log 20161231 60 --log 20190125 180.2 # 1st log onset at 2011-03-11 with tau=60 days and\n' + - ' # 2nd log onset at 2012-09-28 with tau=180.2 days\n') + help='logarithmic function(s) defined by onset time(s) and characteristic time(s) tau in days (default: %(default)s). E.g.:\n' + '--log 20181016 90 # one log w/ onset at 2018-10-16 w/ tau=90 days\n' + '--log 20181016T1733 90 240 # 1st log w/ onset at 2018-10-16T17:33 w/ tau=90 days\n' + ' # 2nd log w/ onset at 2018-10-16T17:33 w/ tau=240 days\n' + '--log 20161231 60 --log 20190125 180 # 1st log w/ onset at 2016-12-31 w/ tau=60 days\n' + ' # 2nd log w/ onset at 2019-01-25 w/ tau=180 days\n') + return parser + diff --git a/mintpy/utils/ptime.py b/mintpy/utils/ptime.py index 82c0dba44..c359167f4 100644 --- a/mintpy/utils/ptime.py +++ b/mintpy/utils/ptime.py @@ -374,6 +374,49 @@ def read_date_list(date_list_in, date_list_all=None): return date_list_out +def get_exclude_date_list(date_list, start_date=None, end_date=None, exclude_date=None): + """Get exclude date list from input options (start/end/ex_date). + + Parameters: date_list - list of str, all dates in YYYYMMDD(THHMM) format + start_date - str, starting date + end_date - str, ending date + exclude_date - list of str, exclude date in YYYYMMDD or text file + Returns: ex_date_list - list of str, exclude date + """ + + year_list = yyyymmdd2years(date_list) + ex_date_list = [] + + # exclude_date + if exclude_date: + ex_date_list += read_date_list(list(exclude_date), date_list_all=date_list) + print(f'exclude date: {ex_date_list}') + + # start_date + if start_date: + print(f'start date: {start_date}') + year_min = yyyymmdd2years(yyyymmdd(start_date)) + for year, date_str in zip(year_list, date_list): + if year < year_min and date_str not in ex_date_list: + print(f' remove date: {date_str}') + ex_date_list.append(date_str) + + # end_date + if end_date: + print(f'end date: {end_date}') + year_max = yyyymmdd2years(yyyymmdd(end_date)) + for year, date_str in zip(year_list, date_list): + if year > year_max and date_str not in ex_date_list: + print(f' remove date: {date_str}') + ex_date_list.append(date_str) + + ex_date_list = sorted(list(set(ex_date_list))) + + return ex_date_list + + + +################################################################ def date_list2tbase(date_list): """Get temporal Baseline in days with respect to the 1st date Parameters: date_list - list of string, date in YYYYMMDD or YYMMDD format @@ -474,4 +517,3 @@ def utc2solar_time(utc_time, longitude): solar_time = dt.datetime.combine(utc_time.date(), dt.time(0)) + dt.timedelta(minutes=tst) return solar_time - diff --git a/mintpy/utils/readfile.py b/mintpy/utils/readfile.py index 987a2e884..0d39c646b 100644 --- a/mintpy/utils/readfile.py +++ b/mintpy/utils/readfile.py @@ -1204,91 +1204,50 @@ def standardize_metadata(metaDictIn, standardKeys=None): ######################################################################### -def read_template(fname, delimiter='=', print_msg=True): - """Reads the template file into a python dictionary structure. - Parameters: fname : str - full path to the template file - delimiter : str - string to separate the key and value - print_msg : bool - print message or not - Returns: template_dict : dict - file content - Examples: - tmpl = read_template(KyushuT424F610_640AlosA.template) - tmpl = read_template(R1_54014_ST5_L0_F898.000.pi, ':') +def read_template(fname, delimiter='=', skip_chars=None): + """Read the template file into a dictionary structure. + + Parameters: fname - str, full path to the template file + delimiter - str, string to separate the key and value + skip_chars - list of str, skip certain charaters in values + Returns: template - dict, file content + Examples: template = read_template('KyushuAlosAT424.txt') + template = read_template('smallbaselineApp.cfg') """ - template_dict = {} - - # insarmaps: the below logic for plotattributes object can be made much more simple - # if we assume that any plot attribute coming after a > belongs to the - # same object. Must Ask Falk and Yunjun if we can assume this to eliminate - # all these conditionals - plotAttributeDict = {} - insidePlotObject = False - plotAttributes = [] - - def is_plot_attribute(attribute): - tokens = attribute.split(".") - if tokens is None: - return False - return tokens[0] == "plot" and len(tokens) > 1 - - # read input text file or string - lines = None + + if skip_chars and isinstance(skip_chars, str): + skip_chars = [skip_chars] + + # read input text file / string if os.path.isfile(fname): with open(fname, 'r') as f: lines = f.readlines() elif isinstance(fname, str): lines = fname.split('\n') + lines = [x.strip() for x in lines] - # loop to parser/read each line + # parse line by line + template = {} for line in lines: - line = line.strip() # split on the 1st occurrence of delimiter c = [i.strip() for i in line.split(delimiter, 1)] # ignore commented lines or those without variables - if len(c) < 2 or line.startswith(('%', '#', '!')): - # next - - # insarmaps: - if line.startswith(">"): - plotAttributeDict = {} - insidePlotObject = True - # otherwise, if previously inside attributes object, we are now outside - # unless the line is a comment - elif insidePlotObject and not line.startswith('%') and not line.startswith('#'): - # just came from being inside plot object, but now we are outside - insidePlotObject = False - plotAttributes.append(plotAttributeDict) - - else: + if len(c) >= 2 and not line.startswith(('%', '#', '!')): key = c[0] value = str.replace(c[1], '\n', '').split("#")[0].strip() value = os.path.expanduser(value) # translate ~ symbol value = os.path.expandvars(value) # translate env variables + # skip certain characters by replacing them with empty str + if skip_chars: + for skip_char in skip_chars: + value.replace(skip_char, '') + if value != '': - template_dict[key] = value + template[key] = value - # insarmaps: - if insidePlotObject: - if is_plot_attribute(key): - plotAttributeDict[key] = value - else: - # just came from being inside plot object, but now we are outside - insidePlotObject = False - plotAttributes.append(plotAttributeDict) - template_dict[key] = value - - # insarmaps: what if no \n at end of file? write out last plot attributes dict - if insidePlotObject: - plotAttributes.append(plotAttributeDict) - if len(plotAttributes) > 0: - template_dict["plotAttributes"] = json.dumps(plotAttributes) - - return template_dict + return template def read_roipac_rsc(fname, delimiter=' '): diff --git a/mintpy/utils/time_func.py b/mintpy/utils/time_func.py index befa63df2..6cc06d507 100644 --- a/mintpy/utils/time_func.py +++ b/mintpy/utils/time_func.py @@ -12,25 +12,30 @@ from mintpy.utils import ptime +MODEL_EXAMPLE = """time function configuration: + model = { + 'polynomial' : 2, # int, polynomial degree with 1 (linear), 2 (quadratic), 3 (cubic), etc. + 'periodic' : [1.0, 0.5], # list of float, period(s) in years. 1.0 (annual), 0.5 (semiannual), etc. + 'step' : ['20061014'], # list of str, date(s) in YYYYMMDD. + 'exp' : {'20181026': [60], # dict, key for onset time in YYYYMMDD(THHMM) and value for char times in integer days. + ... + }, + 'log' : {'20161231': [80], # dict, key for onset time in YYYYMMDD(THHMM) and value for char times in integer days. + '20190125': [100, 200], + ... + }, + ... + } +""" + + def estimate_time_func(model, date_list, dis_ts, ref_date=None, seconds=0): """Deformation model estimator, using a suite of linear, periodic, step, exponential, and logarithmic function(s). Problem setup: Gm = d - Parameters: model - dict of time functions, e.g.: - {'polynomial' : 2, # int, polynomial degree with 1 (linear), 2 (quadratic), 3 (cubic), etc. - 'periodic' : [1.0, 0.5], # list of float, period(s) in years. 1.0 (annual), 0.5 (semiannual), etc. - 'step' : ['20061014'], # list of str, date(s) in YYYYMMDD. - 'exp' : {'20181026': [60], # dict, key for onset time in YYYYMMDD and value for char times in days. - ... - }, - 'log' : {'20161231': [80.5], # dict, key for onset time in YYYYMMDD and value for char times in days. - '20190125': [100, 200], - ... - }, - ... - } + Parameters: model - dict, time functions config, e.g. {cfg} date_list - list of str, dates in YYYYMMDD format dis_ts - 2D np.ndarray, displacement observation in size of (num_date, num_pixel) ref_date - reference date from date_list @@ -38,7 +43,7 @@ def estimate_time_func(model, date_list, dis_ts, ref_date=None, seconds=0): Returns: G - 2D np.ndarray, design matrix in size of (num_date, num_param) m - 2D np.ndarray, parameter solution in size of (num_param, num_pixel) e2 - 1D np.ndarray, sum of squared residual in size of (num_pixel,) - """ + """.format(cfg=MODEL_EXAMPLE) G = get_design_matrix4time_func(date_list, model, ref_date=ref_date, seconds=seconds) @@ -63,30 +68,135 @@ def estimate_time_func(model, date_list, dis_ts, ref_date=None, seconds=0): return G, m, e2 +def inps2model(inps, date_list=None, print_msg=True): + """Convert time function inputs from namespace (inps) into dict object. + + Parameters: inps - namespace, parsed time function inputs. E.g.: + Namespace( + date_list=['20141213', '20141225', ...] # optional + polynomial=1, + periodic=[1.0, 0.5], + step=['20110311', '20120928T1733'], + exp=[['20170910', '60', '200'], ['20171026', '200']], + log=[['20170910', '60', '200'], ['20171026', '200']], + ) + date_list - list of str, date in YYYYMMDD(THHMM) format + Returns: model - dict, time function configuration, e.g. {cfg} + """.format(cfg=MODEL_EXAMPLE) + + if not date_list: + if hasattr(inps, 'dateList'): + date_list = inps.date_list + else: + raise ValueError('date_list NOT given or found in inps!') + + dmin, dmax = date_list[0], date_list[-1] + ymin = ptime.yyyymmdd2years(dmin) + ymax = ptime.yyyymmdd2years(dmax) + + ## inps --> model + model = dict() + model['polynomial'] = inps.polynomial + model['periodic'] = inps.periodic + model['step'] = inps.step + + if inps.periodic: + # check 1 - positive value + if any(x <= 0 for x in inps.periodic): + raise ValueError(f'Zero or negative input period ({inps.periodic}) found!') + + if inps.step: + # check 1 - min/max limit + for d_step in inps.step: + if not (ymin < ptime.yyyymmdd2years(d_step) < ymax): + raise ValueError(f'input step date ({d_step}) exceeds date limit: ({dmin} / {dmax})!') + + for func_name, strs_list in zip(['exp', 'log'], [inps.exp, inps.log]): + func_dict = dict() + + if strs_list: + for strs in strs_list: + onset_time, char_times = strs[0], strs[1:] + char_times = [float(x) for x in char_times] + + # check 1 - onset_time - str format + date_fmt = ptime.get_date_str_format(onset_time) + if date_fmt not in ['%Y%m%d', '%Y%m%dT%H%M']: + raise ValueError(f'input onset time {onset_time} is NOT in YYYYMMDD(THHMM) format!') + + # check 2 - onset_time - min/max limit + if ptime.yyyymmdd2years(onset_time) >= ymax: + raise ValueError(f'input exp onset date ({onset_time}) >= the last date: {dmax}') + + # check 3 - char_time - input format + if len(char_times) == 0: + msg = 'NO characteristic time inputs found!\n' + msg += '1+ characteristic time(s) are required for each onset date' + msg += f' for the {func_name} function, e.g.:\n' + msg += f'--{func_name} 20181026 60 OR\n' + msg += f'--{func_name} 20161231 80 200 # append as many char_times as you like!' + raise ValueError(msg) + + elif any(x <= 0 for x in char_times): + raise ValueError(f'Zero or negative characteristic time ({char_times}) found!') + + else: + int_char_times = [int(x) for x in char_times] + for int_char_time, char_time in zip(int_char_times, char_times): + if int_char_time != char_time: + msg = 'WARNING: float-format characteristic time detected.' + msg += '\nIgnore the fraction part of a day and continue: ' + msg += f'{char_time} --> {int_char_time} days.' + print(msg) + + # save as dict + func_dict[onset_time] = int_char_times + + # save to model + model[func_name] = func_dict + + ## print out model summary + if print_msg: + print('estimate deformation model with the following assumed time functions:') + for key, value in model.items(): + print(' {:<10} : {}'.format(key, value)) + + # warning if no polynomial found + if 'polynomial' not in model.keys(): + raise ValueError('linear/polynomial model is NOT included! Are you sure?!') + + return model + + +def get_num_param(model): + """Get the number of unknown paramters from the given time function configuration. + + Parameters: model - dict, time functions config, e.g. {cfg} + Returns: num_param - int, number of unknown parameters + """.format(cfg=MODEL_EXAMPLE) + + num_param = ( + model['polynomial'] + 1 + + len(model['periodic']) * 2 + + len(model['step']) + + sum([len(val) for key, val in model['exp'].items()]) + + sum([len(val) for key, val in model['log'].items()]) + ) + + return num_param + #################################### Design Matrices ########################################## def get_design_matrix4time_func(date_list, model=None, ref_date=None, seconds=0): """Design matrix (function model) for time functions parameter estimation. Parameters: date_list - list of str in YYYYMMDD format, size=num_date - model - dict of time functions, e.g.: - {'polynomial' : 2, # int, polynomial degree with 1 (linear), 2 (quadratic), 3 (cubic), etc. - 'periodic' : [1.0, 0.5], # list of float, period(s) in years. 1.0 (annual), 0.5 (semiannual), etc. - 'step' : ['20061014'], # list of str, date(s) in YYYYMMDD. - 'exp' : {'20181026': [60], # dict, key for onset time in YYYYMMDD and value for char. times in days. - ... - }, - 'log' : {'20161231': [80.5], # dict, key for onset time in YYYYMMDD and value for char. times in days. - '20190125': [100, 200], - ... - }, - ... - } + model - dict of time functions, e.g. {cfg} ref_date - reference date from date_list seconds - float or str, acquisition time of the day info in seconds. Returns: A - 2D array of design matrix in size of (num_date, num_param) num_param = (poly_deg + 1) + 2*len(periodic) + len(steps) + len(exp_taus) + len(log_taus) - """ + """.format(cfg=MODEL_EXAMPLE) ## prepare time info # convert list of date into array of years in float @@ -305,4 +415,3 @@ def get_design_matrix4log_func(date_list, log_dict, seconds=0): i += 1 return A - diff --git a/setup.py b/setup.py index 7518b694d..2112de44f 100644 --- a/setup.py +++ b/setup.py @@ -99,7 +99,6 @@ def do_setup(): 'generate_mask.py = mintpy.generate_mask:main', 'geocode.py = mintpy.geocode:main', 'ifgram_inversion.py = mintpy.ifgram_inversion:main', - 'ifgram_reconstruction.py = mintpy.ifgram_reconstruction:main', 'image_math.py = mintpy.image_math:main', 'image_stitch.py = mintpy.image_stitch:main', 'info.py = mintpy.info:main', @@ -152,7 +151,6 @@ def do_setup(): 'tropo_gacos.py = mintpy.tropo_gacos:main', 'tropo_phase_elevation.py = mintpy.tropo_phase_elevation:main', 'tropo_pyaps3.py = mintpy.tropo_pyaps3:main', - 'tropo_pyaps.py = mintpy.tropo_pyaps:main', 'tsview.py = mintpy.tsview:main', 'unwrap_error_bridging.py = mintpy.unwrap_error_bridging:main', 'unwrap_error_phase_closure.py = mintpy.unwrap_error_phase_closure:main',