From e0abe4cfc7a1e7ad8b2e9ea429df305daa0b65e7 Mon Sep 17 00:00:00 2001 From: Zhang Yunjun Date: Mon, 20 Feb 2023 00:18:10 +0800 Subject: [PATCH] multilook: support for CPU/GPU ampcor + add `--search-win --xcorr-win --margin` to feed options from pycuampcor, for exact multilooking file size + add `--off-file` for both CPU and GPU ampcor, to use existing offset file as the size reference + readfile.read_isce_xml: support isce configuration file, e.g. topsApp.xml --- src/mintpy/cli/multilook.py | 27 ++++++++-- src/mintpy/multilook.py | 98 ++++++++++++++++++++++++------------ src/mintpy/utils/readfile.py | 9 +++- 3 files changed, 97 insertions(+), 37 deletions(-) diff --git a/src/mintpy/cli/multilook.py b/src/mintpy/cli/multilook.py index de5bc8329..1d2af66a8 100755 --- a/src/mintpy/cli/multilook.py +++ b/src/mintpy/cli/multilook.py @@ -16,8 +16,9 @@ multilook.py srtm30m.dem -x 10 -y 10 -o srtm300m.dem multilook.py lat.rdr.full.vrt lon.rdr.full.vrt -x 9 -y 3 - # Ignore / skip marginal pixels - multilook.py ../../geom_reference/hgt.rdr.full -r 300 -a 100 --margin 58 58 58 58 -o hgt.rdr + # generate multilooked lookup table (lat/lon.rdr) for dense offsets + multilook.py lat.rdr.full.vrt -x 128 -y 64 -o lat.rdr.mli --off-file dense_offsets.bil + multilook.py ../../geom_reference/lat.rdr.full -x 300 -y 100 -o lat.rdr --off-file offset.bip """ @@ -28,6 +29,7 @@ def create_parser(subparsers=None): parser = create_argument_parser( name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers) + # basic parser.add_argument('file', nargs='+', help='File(s) to multilook') parser.add_argument('-r','--range','-x', dest='lks_x', type=int, default=1, help='number of multilooking in range /x direction (default: %(default)s).') @@ -35,11 +37,22 @@ def create_parser(subparsers=None): help='number of multilooking in azimuth/y direction (default: %(default)s).') parser.add_argument('-o', '--outfile', help='Output file name. Disabled when more than 1 input files') + parser.add_argument('-m','--method', dest='method', type=str, default='mean', choices=['mean', 'median', 'nearest'], help='downsampling method (default: %(default)s) \n' 'e.g. nearest for geometry, average for observations') - parser.add_argument('--margin', dest='margin', type=int, nargs=4, metavar=('TOP','BOTTOM','LEFT','RIGHT'), - default=[0,0,0,0], help='number of pixels on the margin to skip, (default: %(default)s).') + + # offset + ampcor = parser.add_argument_group('Ampcor options', 'Ampcor options for dense offsets to account for the extra margin') + ampcor.add_argument('--search','--search-win', dest='search_win', type=int, nargs=2, metavar=('X','Y'), + help='Ampcor (half) search window in (width, height) in pixel, e.g. 20 x 20.') + ampcor.add_argument('--xcorr','--xcorr-win', dest='xcorr_win', type=int, nargs=2, metavar=('X','Y'), + help='Ampcor cross-correlation window in (width, height) in pixel e.g. 32 x 32.') + ampcor.add_argument('--margin', dest='margin', type=int, default=0, + help='Ampcor margin offset (default: %(default)s).') + ampcor.add_argument('--off-file', dest='off_file', type=str, + help='Ampcor offset file as reference for the size.') + return parser @@ -82,7 +95,11 @@ def main(iargs=None): lks_x=inps.lks_x, outfile=inps.outfile, method=inps.method, - margin=inps.margin) + search_win=inps.search_win, + xcorr_win=inps.xcorr_win, + margin=inps.margin, + off_file=inps.off_file, + ) print('Done.') diff --git a/src/mintpy/multilook.py b/src/mintpy/multilook.py index 47cd9538e..26100d211 100644 --- a/src/mintpy/multilook.py +++ b/src/mintpy/multilook.py @@ -65,7 +65,7 @@ def multilook_data(data, lks_y=1, lks_x=1, method='mean'): elif method == 'median': out_data = np.nanmedian(temp, axis=(1, 3)) - # [obsolete] approach 2: indexing + averaging: first in col/x, then in row/y + # approach 2: indexing + averaging: first in col/x, then in row/y # out_len, out_wid = np.floor(shape / (lks_y, lks_x)).astype(int) # out_data_col = np.zeros((shape[0], out_wid), dtype=data.dtype) # out_data = np.zeros((out_len, out_wid), dtype=data.dtype) @@ -116,41 +116,69 @@ def multilook_data(data, lks_y=1, lks_x=1, method='mean'): return out_data -def multilook_file(infile, lks_y, lks_x, outfile=None, method='mean', margin=[0,0,0,0], max_memory=4): - """ Multilook input file - Parameters: infile - str, path of input file to be multilooked. - lks_y - int, number of looks in y / row direction. - lks_x - int, number of looks in x / column direction. - margin - list of 4 int, number of pixels to be skipped during multilooking. - useful for offset product, where the marginal pixels are ignored during - cross correlation matching. - outfile - str, path of output file - Returns: outfile - str, path of output file +def multilook_file(infile, lks_y, lks_x, outfile=None, method='mean', max_memory=4, + search_win=None, xcorr_win=None, margin=0, off_file=None): + """ Multilook input file. + + Parameters: infile - str, path of input file to be multilooked. + lks_y - int, number of looks in y / row direction. + lks_x - int, number of looks in x / column direction. + outfile - str, path of output file + max_memory - float, maximum used memory in GB + search_win - list(int), ampcor (half) search window in (width, length) + xcorr_win - list(int), ampcor cross-correlation window in (width, length) + margin - int, ampcor margin + off_file - str, path to the ampcor dense offsets file + Returns: outfile - str, path of output file """ lks_y = int(lks_y) lks_x = int(lks_x) - # use GDAL if input is VRT file - if infile.endswith('.vrt'): - outfile = multilook_gdal(infile, lks_y, lks_x, outfile) - return outfile - # input file info - atr = readfile.read_attribute(infile) + if infile.endswith('.vrt'): + atr = readfile.read_gdal_vrt(infile) + else: + atr = readfile.read_attribute(infile) length, width = int(atr['LENGTH']), int(atr['WIDTH']) - k = atr['FILE_TYPE'] - print('multilooking {} {} file: {}'.format(atr['PROCESSOR'], k, infile)) - print('number of looks in y / azimuth direction: %d' % lks_y) - print('number of looks in x / range direction: %d' % lks_x) + print(f'multilooking file: {infile}') print(f'multilook method: {method}') + print(f'number of looks in y / x direction: {lks_y} / {lks_x}') + + # box + if search_win: + x0 = margin + search_win[0] + y0 = margin + search_win[1] + # Note: this is the formular for PyCuAmpcor/cuDenseOffsets.py + # the one for Ampcor CPU is different from the GPU version + out_wid = (width - 2*margin - 2*search_win[0] - xcorr_win[0]) // lks_x + 1 + out_len = (length - 2*margin - 2*search_win[1] - xcorr_win[1]) // lks_x + 1 + x1 = x0 + out_wid * lks_x + y1 = y0 + out_len * lks_y + box = (x0, y0, x1, y1) + + elif off_file: + # use the existing ampcor dense offsets file as reference + # since the formular for Ampcor CPU version is unknown + # assuming symmetrical margins in top/bottom/left/right + atr_off = readfile.read_attribute(off_file) + out_wid, out_len = int(atr_off['WIDTH']), int(atr_off['LENGTH']) + x0 = (width - out_wid * lks_x) // 2 + y0 = (length - out_len * lks_y) // 2 + x1 = x0 + out_wid * lks_x + y1 = y0 + out_len * lks_y + box = (x0, y0, x1, y1) + if not 0 <= x0 < x1 <= width or not 0 <= y0 < y1 <= length: + msg = f'Calculated bbox ({box}) exceeds input file coverage (width={width}, length={length})!' + raise ValueError(msg) - # margin --> box - if margin is not [0,0,0,0]: # top, bottom, left, right - box = (margin[2], margin[0], width - margin[3], length - margin[1]) - print(f'number of pixels to skip in top/bottom/left/right boundaries: {margin}') else: box = (0, 0, width, length) + # use GDAL if input is VRT file + if infile.endswith('.vrt'): + outfile = multilook_gdal(infile, lks_y, lks_x, box=box, out_file=outfile) + return outfile + # output file name fbase, fext = os.path.splitext(infile) if not outfile: @@ -243,11 +271,12 @@ def multilook_file(infile, lks_y, lks_x, outfile=None, method='mean', margin=[0, return outfile -def multilook_gdal(in_file, lks_y, lks_x, out_file=None): +def multilook_gdal(in_file, lks_y, lks_x, box=None, out_file=None): """Apply multilooking via gdal_translate. Parameters: in_file - str, path to the input GDAL VRT file lks_y/x - int, number of looks in Y/X direction + box - tuple(int), area of interest in x0, y0, x1, y1 out_file - str, path to the output data file Returns: out_file - str, path to the output data file """ @@ -266,16 +295,23 @@ def multilook_gdal(in_file, lks_y, lks_x, out_file=None): print(f'output: {out_file}') ds = gdal.Open(in_file, gdal.GA_ReadOnly) - in_wid = ds.RasterXSize - in_len = ds.RasterYSize + if not box: + box = (0, 0, ds.RasterXSize, ds.RasterYSize) + + in_wid = box[2] - box[0] + in_len = box[3] - box[1] out_wid = int(in_wid / lks_x) out_len = int(in_len / lks_y) - src_wid = out_wid * lks_x - src_len = out_len * lks_y + + src_x0 = box[0] + src_y0 = box[1] + src_x1 = src_x0 + out_wid * lks_x + src_y1 = src_y0 + out_len * lks_y # link: https://stackoverflow.com/questions/68025043/adding-a-progress-bar-to-gdal-translate - options_str = f'-of ENVI -a_nodata 0 -outsize {out_wid} {out_len} -srcwin 0 0 {src_wid} {src_len} ' + options_str = f'-of ENVI -a_nodata 0 -outsize {out_wid} {out_len} ' + options_str += f' -srcwin {src_x0} {src_y0} {src_x1} {src_y1} ' gdal.Translate(out_file, ds, options=options_str, callback=gdal.TermProgress_nocb) # generate GDAL .vrt file diff --git a/src/mintpy/utils/readfile.py b/src/mintpy/utils/readfile.py index 6917831cf..7d9e54fc5 100644 --- a/src/mintpy/utils/readfile.py +++ b/src/mintpy/utils/readfile.py @@ -1306,7 +1306,7 @@ def auto_no_data_value(meta): # known file types # isce2: dense offsets from topsApp.py if processor == 'isce' and fbase.endswith('dense_offsets') and fext == '.bil' and num_band == 2: - no_data_value = -10000 + no_data_value = -10000. else: # default value for unknown file types @@ -1551,6 +1551,13 @@ def read_isce_xml(fname): if meta is not None: xmlDict['NoDataValue'] = meta.text + # configeration file, e.g. topsApp.xml + elif root.tag.endswith('App'): + for child in root[0].findall('property'): + key = child.get('name').lower() + value = child.find('value') + xmlDict[key] = value.text if value is not None else child.text + # standardize metadata keys xmlDict = standardize_metadata(xmlDict)