diff --git a/applications/alos2App.py b/applications/alos2App.py index e601bb015..5c44f15f0 100755 --- a/applications/alos2App.py +++ b/applications/alos2App.py @@ -375,6 +375,20 @@ mandatory = False, doc = 'apply polynomial fit in adaptive filtering window') +FIT_ADAPTIVE_ORDER_ION = Application.Parameter('fitAdaptiveOrderIon', + public_name='polynomial fit order in adaptive filtering window', + default=2, + type=int, + mandatory=False, + doc='polynomial fit order in adaptive filtering window') + +RM_OUTLIERS_ION = Application.Parameter('rmOutliersIon', + public_name = 'remove outliers in adaptive filtering window', + default = False, + type = bool, + mandatory = False, + doc = 'remove outliers in adaptive filtering window') + FILT_SECONDARY_ION = Application.Parameter('filtSecondaryIon', public_name = 'whether do secondary filtering of ionosphere phase', default = True, @@ -681,6 +695,8 @@ class Alos2InSAR(Application): FIT_ION, FILT_ION, FIT_ADAPTIVE_ION, + FIT_ADAPTIVE_ORDER_ION, + RM_OUTLIERS_ION, FILT_SECONDARY_ION, FILTERING_WINSIZE_MAX_ION, FILTERING_WINSIZE_MIN_ION, diff --git a/applications/alos2burstApp.py b/applications/alos2burstApp.py index 43c264f60..4b0a1e67b 100755 --- a/applications/alos2burstApp.py +++ b/applications/alos2burstApp.py @@ -358,6 +358,20 @@ mandatory = False, doc = 'apply polynomial fit in adaptive filtering window') +FIT_ADAPTIVE_ORDER_ION = Application.Parameter('fitAdaptiveOrderIon', + public_name='polynomial fit order in adaptive filtering window', + default=2, + type=int, + mandatory=False, + doc='polynomial fit order in adaptive filtering window') + +RM_OUTLIERS_ION = Application.Parameter('rmOutliersIon', + public_name = 'remove outliers in adaptive filtering window', + default = False, + type = bool, + mandatory = False, + doc = 'remove outliers in adaptive filtering window') + FILT_SECONDARY_ION = Application.Parameter('filtSecondaryIon', public_name = 'whether do secondary filtering of ionosphere phase', default = True, @@ -608,6 +622,8 @@ class Alos2burstInSAR(Application): FIT_ION, FILT_ION, FIT_ADAPTIVE_ION, + FIT_ADAPTIVE_ORDER_ION, + RM_OUTLIERS_ION, FILT_SECONDARY_ION, FILTERING_WINSIZE_MAX_ION, FILTERING_WINSIZE_MIN_ION, diff --git a/components/isceobj/Alos2Proc/Alos2ProcPublic.py b/components/isceobj/Alos2Proc/Alos2ProcPublic.py index d3432abcf..5691a3983 100644 --- a/components/isceobj/Alos2Proc/Alos2ProcPublic.py +++ b/components/isceobj/Alos2Proc/Alos2ProcPublic.py @@ -1198,7 +1198,7 @@ def create_multi_index2(width2, l1, l2): return ((l2 - l1) / 2.0 + np.arange(width2) * l2) / l1 -def computePhaseDiff(data1, data22, coherenceWindowSize=5, coherenceThreshold=0.85): +def computePhaseDiff(data1, data22, coherenceWindowSize=5, coherenceThreshold=0.85, coherenceThreshold1=None, coherenceThreshold2=None, diffile=None, corfile=None, corfile1=None, corfile2=None): import copy import numpy as np from isceobj.Alos2Proc.Alos2ProcPublic import cal_coherence_1 @@ -1208,7 +1208,44 @@ def computePhaseDiff(data1, data22, coherenceWindowSize=5, coherenceThreshold=0. dataDiff = data1 * np.conj(data2) cor = cal_coherence_1(dataDiff, win=coherenceWindowSize) - index = np.nonzero(np.logical_and(cor>coherenceThreshold, dataDiff!=0)) + cor1 = cal_coherence_1(data1, win=coherenceWindowSize) + cor2 = cal_coherence_1(data2, win=coherenceWindowSize) + + if (coherenceThreshold1 is None) and (coherenceThreshold2 is None): + tof = np.logical_and(cor>coherenceThreshold, dataDiff!=0) + + elif (coherenceThreshold1 is not None) and (coherenceThreshold2 is None): + tof = np.logical_and(np.logical_and(cor>coherenceThreshold, cor1>coherenceThreshold1), dataDiff!=0) + + elif (coherenceThreshold1 is None) and (coherenceThreshold2 is not None): + tof = np.logical_and(np.logical_and(cor>coherenceThreshold, cor2>coherenceThreshold2), dataDiff!=0) + + else: + tof = np.logical_and(np.logical_and(np.logical_and(cor>coherenceThreshold, cor1>coherenceThreshold1), cor2>coherenceThreshold2), dataDiff!=0) + + index = np.nonzero(tof) + index_invalid = np.nonzero(np.logical_not(tof)) + + print('number of invalid samples: {}'.format(index_invalid[0].size)) + print('total number of samples: {}'.format(dataDiff.size)) + print('percentage of invalid samples: {}%'.format(index_invalid[0].size/dataDiff.size*100.0)) + + length, width = dataDiff.shape + if diffile is not None: + dataDiff2 = copy.deepcopy(dataDiff) + dataDiff2[index_invalid] = 0 + dataDiff2.astype(np.complex64).tofile(diffile) + create_xml(diffile, width, length, 'int') + del dataDiff2 + if corfile is not None: + cor.astype(np.float32).tofile(corfile) + create_xml(corfile, width, length, 'float') + if corfile1 is not None: + cor1.astype(np.float32).tofile(corfile1) + create_xml(corfile1, width, length, 'float') + if corfile2 is not None: + cor2.astype(np.float32).tofile(corfile2) + create_xml(corfile2, width, length, 'float') #check if there are valid pixels if index[0].size == 0: diff --git a/components/isceobj/Alos2Proc/runFrameMosaic.py b/components/isceobj/Alos2Proc/runFrameMosaic.py index 75fac03b9..74828db10 100644 --- a/components/isceobj/Alos2Proc/runFrameMosaic.py +++ b/components/isceobj/Alos2Proc/runFrameMosaic.py @@ -134,7 +134,7 @@ def runFrameMosaic(self): self._insar.procDoc.addAllFromCatalog(catalog) -def frameMosaic(track, inputFiles, outputfile, rangeOffsets, azimuthOffsets, numberOfRangeLooks, numberOfAzimuthLooks, updateTrack=False, phaseCompensation=False, phaseDiffFixed=None, snapThreshold=None, resamplingMethod=0): +def frameMosaic(track, inputFiles, outputfile, rangeOffsets, azimuthOffsets, numberOfRangeLooks, numberOfAzimuthLooks, updateTrack=False, phaseCompensation=False, phaseDiffFixed=None, snapThreshold=None, snapFrame=None, corFrame=None, waterBody=None, resamplingMethod=0): ''' mosaic frames @@ -149,6 +149,14 @@ def frameMosaic(track, inputFiles, outputfile, rangeOffsets, azimuthOffsets, num phaseCompensation: whether do phase compensation for each frame phaseDiffFixed: if provided, the estimated value will snap to one of these values, which is nearest to the estimated one. snapThreshold: this is used with phaseDiffFixed + snapFrame: indicate whether snap to fixed values for each frame phase diff, must be specified if phaseDiffFixed!=None. default: false + corFrame: indicate whether use individual frame coherence to exclude low quality pixels. default: false + waterBody: use water body to exclude water bodies (in these areas data upper frame and lower frame may not be consistently coregistered + and then lead to large phase differences, e.g. a whole swath is water and only geometrical offset is used). + Both corFrame and waterBody are for this purposes. waterBody definetely works better, so when it is specified, there is no + need for corFrame. + waterBody is provided in mosaicked geometry and size. + default: no resamplingMethod: 0: amp resampling. 1: int resampling. 2: slc resampling ''' import numpy as np @@ -166,6 +174,11 @@ def frameMosaic(track, inputFiles, outputfile, rangeOffsets, azimuthOffsets, num numberOfFrames = len(track.frames) frames = track.frames + if snapFrame is None: + snapFrame = [False for x in range(numberOfFrames-1)] + if corFrame is None: + corFrame = [False for x in range(numberOfFrames-1)] + rectWidth = [] rectLength = [] for i in range(numberOfFrames): @@ -179,6 +192,18 @@ def frameMosaic(track, inputFiles, outputfile, rangeOffsets, azimuthOffsets, num rangeOffsets1 = [i/numberOfRangeLooks for i in rangeOffsets] azimuthOffsets1 = [i/numberOfAzimuthLooks for i in azimuthOffsets] + #consider frame/swath azimuth sensing start differences caused by swath mosaicking + for j in range(numberOfFrames): + if j == 0: + continue + else: + swath1 = track.frames[j-1].swaths[0] + swath2 = track.frames[j].swaths[0] + frame1 = track.frames[j-1] + frame2 = track.frames[j] + delta_az = -((frame2.sensingStart - frame1.sensingStart).total_seconds() - (swath2.sensingStart - swath1.sensingStart).total_seconds()) / swath1.azimuthLineInterval + azimuthOffsets1[j] += delta_az / numberOfAzimuthLooks + #get offset relative to the first frame rangeOffsets2 = [0.0] azimuthOffsets2 = [0.0] @@ -358,42 +383,78 @@ def frameMosaic(track, inputFiles, outputfile, rangeOffsets, azimuthOffsets, num lowerframe[:,xs[i]:xe[i]+1] = readImage(rinfs[i], rectWidth[i], rectLength[i], 0, rectWidth[i]-1, 0, ye[i-1]-ys[i]) else: lowerframe[:,xs[i]:xe[i]+1] = readImageFromVrt(rinfs[i], 0, rectWidth[i]-1, 0, ye[i-1]-ys[i]) - #get a polynomial - diff = np.sum(upperframe * np.conj(lowerframe), axis=0) - (firstLine, lastLine, firstSample, lastSample) = findNonzero(np.reshape(diff, (1, outWidth))) - #here i use mean value(deg=0) in case difference is around -pi or pi. - #!!!!!there have been updates, now deg must be 0 - deg = 0 - p = np.polyfit(np.arange(firstSample, lastSample+1), np.angle(diff[firstSample:lastSample+1]), deg) + + if waterBody is not None: + #SRTM convention: water body. (0) --- land; (-1) --- water; (-2) --- no data. + #wbd = np.zeros((ye[i-1]-ys[i]+1, outWidth), dtype=np.int8) + wbd = readImage(waterBody, outWidth, ye[i-1]-ys[i]+1, 0, outWidth-1, ys[i], ye[i-1], dataType=np.int8) + msk = (wbd==0)+0 + upperframe *= msk + lowerframe *= msk + del wbd, msk + + # #get a polynomial + # diff = np.sum(upperframe * np.conj(lowerframe), axis=0) + # (firstLine, lastLine, firstSample, lastSample) = findNonzero(np.reshape(diff, (1, outWidth))) + # #here i use mean value(deg=0) in case difference is around -pi or pi. + # #!!!!!there have been updates, now deg must be 0 + # deg = 0 + # p = np.polyfit(np.arange(firstSample, lastSample+1), np.angle(diff[firstSample:lastSample+1]), deg) #need to use a more sophisticated method to compute the mean phase difference - (phaseDiffEst[i], numberOfValidSamples[i]) = computePhaseDiff(upperframe, lowerframe, coherenceWindowSize=9, coherenceThreshold=0.80) + #(phaseDiffEst[i], numberOfValidSamples[i]) = computePhaseDiff(upperframe, lowerframe, coherenceWindowSize=9, coherenceThreshold=0.80) - #snap phase difference to fixed values - if phaseDiffFixed is not None: - (outputValue, snapped) = snap(phaseDiffEst[i], phaseDiffFixed, snapThreshold) - if snapped == True: - phaseDiffUsed[i] = outputValue - phaseDiffSource[i] = 'estimated+snap' + DEBUG = False + if DEBUG == True: + diffile = 'phase_difference_frame{}-frame{}_valid.int'.format(i, i+1) + corfile = 'coherence_diff_frame{}-frame{}.float'.format(i, i+1) + corfile1 = 'coherence_upper_frame{}-frame{}.float'.format(i, i+1) + corfile2 = 'coherence_lower_frame{}-frame{}.float'.format(i, i+1) + corfile = None + corfile1 = None + corfile2 = None + + if corFrame[i-1] == True: + (phaseDiffEst[i], numberOfValidSamples[i]) = computePhaseDiff(upperframe, lowerframe, coherenceWindowSize=9, coherenceThreshold=0.80, coherenceThreshold1=0.3, coherenceThreshold2=0.3, diffile=diffile, corfile=corfile, corfile1=corfile1, corfile2=corfile2) else: - phaseDiffUsed[i] = phaseDiffEst[i] - phaseDiffSource[i] = 'estimated' + (phaseDiffEst[i], numberOfValidSamples[i]) = computePhaseDiff(upperframe, lowerframe, coherenceWindowSize=9, coherenceThreshold=0.80, diffile=diffile, corfile=corfile, corfile1=corfile1, corfile2=corfile2) + + diffDir = 'frame_mosaic' + os.makedirs(diffDir, exist_ok=True) + + for fx in [diffile, corfile, corfile1, corfile2]: + os.rename(fx, os.path.join(diffDir, fx)) + os.rename(fx+'.vrt', os.path.join(diffDir, fx+'.vrt')) + os.rename(fx+'.xml', os.path.join(diffDir, fx+'.xml')) + else: - phaseDiffUsed[i] = phaseDiffEst[i] - phaseDiffSource[i] = 'estimated' + if corFrame[i-1] == True: + (phaseDiffEst[i], numberOfValidSamples[i]) = computePhaseDiff(upperframe, lowerframe, coherenceWindowSize=9, coherenceThreshold=0.80, coherenceThreshold1=0.3, coherenceThreshold2=0.3) + else: + (phaseDiffEst[i], numberOfValidSamples[i]) = computePhaseDiff(upperframe, lowerframe, coherenceWindowSize=9, coherenceThreshold=0.80) + + + phaseDiffUsed[i] = phaseDiffEst[i] + phaseDiffSource[i] = 'estimated' + + #snap phase difference to fixed values + if phaseDiffFixed is not None: + if snapFrame[i-1] == True: + (outputValue, snapped) = snap(phaseDiffEst[i], phaseDiffFixed, snapThreshold) + if snapped == True: + phaseDiffUsed[i] = outputValue + phaseDiffSource[i] = 'estimated+snap' + #use new phase constant value - p[-1] = phaseDiffUsed[i] + # p[-1] = phaseDiffUsed[i] - phaseOffsetPolynomials.append(p) + # phaseOffsetPolynomials.append(p) #check fit result - DEBUG = False if DEBUG: - #create a dir and work in this dir - diffDir = 'frame_mosaic' - os.makedirs(diffDir, exist_ok=True) + #work in this dir os.chdir(diffDir) #dump phase difference @@ -401,21 +462,21 @@ def frameMosaic(track, inputFiles, outputfile, rangeOffsets, azimuthOffsets, num (upperframe * np.conj(lowerframe)).astype(np.complex64).tofile(diffFilename) create_xml(diffFilename, outWidth, ye[i-1]-ys[i]+1, 'int') - #plot phase difference vs range - import matplotlib.pyplot as plt - x = np.arange(firstSample, lastSample+1) - y = np.angle(diff[firstSample:lastSample+1]) - plt.plot(x, y, label='original phase difference') - plt.plot(x, np.polyval(p, x), label='fitted phase difference') - plt.legend() + # #plot phase difference vs range + # import matplotlib.pyplot as plt + # x = np.arange(firstSample, lastSample+1) + # y = np.angle(diff[firstSample:lastSample+1]) + # plt.plot(x, y, label='original phase difference') + # plt.plot(x, np.polyval(p, x), label='fitted phase difference') + # plt.legend() - plt.minorticks_on() - plt.tick_params('both', length=10, which='major') - plt.tick_params('both', length=5, which='minor') + # plt.minorticks_on() + # plt.tick_params('both', length=10, which='major') + # plt.tick_params('both', length=5, which='minor') - plt.xlabel('Range Sample Number [Samples]') - plt.ylabel('Phase Difference [Rad]') - plt.savefig('phase_difference_frame{}-frame{}.pdf'.format(i, i+1)) + # plt.xlabel('Range Sample Number [Samples]') + # plt.ylabel('Phase Difference [Rad]') + # plt.savefig('phase_difference_frame{}-frame{}.pdf'.format(i, i+1)) os.chdir('../') @@ -430,7 +491,8 @@ def frameMosaic(track, inputFiles, outputfile, rangeOffsets, azimuthOffsets, num cJ = np.complex64(1j) phaseOffset = np.ones(outWidth, dtype=np.complex64) for j in range(i+1): - phaseOffset *= np.exp(cJ*np.polyval(phaseOffsetPolynomials[j], np.arange(outWidth))) + #phaseOffset *= np.exp(cJ*np.polyval(phaseOffsetPolynomials[j], np.arange(outWidth))) + phaseOffset *= np.exp(cJ*phaseDiffUsed[j]) #get start line number (starts with zero) if i == 0: diff --git a/components/isceobj/Alos2Proc/runFrameOffset.py b/components/isceobj/Alos2Proc/runFrameOffset.py index 59356688d..635e94c9f 100644 --- a/components/isceobj/Alos2Proc/runFrameOffset.py +++ b/components/isceobj/Alos2Proc/runFrameOffset.py @@ -97,23 +97,18 @@ def frameOffset(track, image, outputfile, crossCorrelation=True, matchingMode=0) swath1 = track.frames[j-1].swaths[0] swath2 = track.frames[j].swaths[0] - #consider frame/swath azimuth sensing start differences caused by swath mosaicking - # tested with alos2App.py, alos2burstApp.py and alosStack - frame1 = track.frames[j-1] - frame2 = track.frames[j] - delta_az = -((frame2.sensingStart - frame1.sensingStart).total_seconds() - (swath2.sensingStart - swath1.sensingStart).total_seconds()) / swath1.azimuthLineInterval #offset from geometry offsetGeometrical = computeFrameOffset(swath1, swath2) rangeOffsetGeometrical.append(offsetGeometrical[0]) - azimuthOffsetGeometrical.append(offsetGeometrical[1]+delta_az) + azimuthOffsetGeometrical.append(offsetGeometrical[1]) #offset from cross-correlation if crossCorrelation: offsetMatching = estimateFrameOffset(swath1, swath2, image1, image2, matchingMode=matchingMode) if offsetMatching != None: rangeOffsetMatching.append(offsetMatching[0]) - azimuthOffsetMatching.append(offsetMatching[1]+delta_az) + azimuthOffsetMatching.append(offsetMatching[1]) else: print('******************************************************************') print('WARNING: bad matching offset, we are forced to use') diff --git a/components/isceobj/Alos2Proc/runIonFilt.py b/components/isceobj/Alos2Proc/runIonFilt.py index 751276f2f..ac1895d76 100644 --- a/components/isceobj/Alos2Proc/runIonFilt.py +++ b/components/isceobj/Alos2Proc/runIonFilt.py @@ -122,6 +122,8 @@ def runIonFilt(self): fit = self.fitIon filt = self.filtIon fitAdaptive = self.fitAdaptiveIon + fitAdaptiveOrder = self.fitAdaptiveOrderIon + rmOutliers = self.rmOutliersIon filtSecondary = self.filtSecondaryIon if (fit == False) and (filt == False): raise Exception('either fit ionosphere or filt ionosphere should be True when doing ionospheric correction\n') @@ -191,6 +193,10 @@ def runIonFilt(self): cor[np.nonzero(cor<0)] = 0.0 cor[np.nonzero(cor>1)] = 0.0 + #remove these values so that there are no outliers in the computed std + cor[np.nonzero(cor<0.05)] = 0.05 + cor[np.nonzero(cor>0.95)] = 0.95 + #remove water body. Not helpful, just leave it here wbd = np.fromfile('wbd'+ml2+'.wbd', dtype=np.int8).reshape(length, width) cor[np.nonzero(wbd==-1)] = 0.0 @@ -289,7 +295,7 @@ def ion_std(fl, fu, numberOfLooks, cor): ion -= ion_fit * (ion!=0) #filter the rest of the ionosphere if filt: - (ion_filt, std_out, window_size_out) = adaptive_gaussian(ion, std, size_min, size_max, std_out0, fit=fitAdaptive) + (ion_filt, std_out, window_size_out) = adaptive_gaussian(ion, std, size_min, size_max, std_out0, fit=fitAdaptive, order=fitAdaptiveOrder, rm_outliers=rmOutliers) if filtSecondary: print('applying secondary filtering with window size {}'.format(size_secondary)) g2d = gaussian(size_secondary, size_secondary/2.0, scale=1.0) @@ -553,7 +559,45 @@ def polyfit_2d(data, weight, order): return (data_fit, coeff) -def adaptive_gaussian(data, std, size_min, size_max, std_out0, fit=True): +def remove_outliers(data, multiples_of_sigma=3.0, invalid_value=0, number_of_valid=25): + ''' + remove outliers in a 2d numpy array + CRL, 16-FEB-2024 + + data: input data, 2d numpy array + multiples_of_sigma: data values > multiples_of_sigma*std are outliers + this value should depend on number of valid samples? + invalid_value: invalid values in data that are excluded in the analysis + number_of_valid: if number of valid samples are larger than this value, outliers + removal is performed. default value: 25, 1/4 of a 10*10 array + ''' + + index = np.nonzero(data!=invalid_value) + + #if two few valid samples, do not remove outliers + if index[0].size <= number_of_valid: + std = None + mean = None + else: + #empty array not allowed in np.mean and np.std + mean = np.mean(data[index], dtype=np.float64) + std = np.std(data[index], dtype=np.float64) + + #find outliers, exclude only large outliers + #data[index] is 1d array + #index_outlier = np.nonzero(np.absolute(data[index]-mean) > multiples_of_sigma*std) + index_outlier = np.nonzero(data[index]-mean > multiples_of_sigma*std) + if index_outlier[0].size != 0: + #compute mean value without outliers, and give it to outlier samples + data[(index[0][index_outlier], index[1][index_outlier])] = invalid_value + mean = np.mean(data[np.nonzero(data!=invalid_value)], dtype=np.float64) + #std = np.std(data[np.nonzero(data!=invalid_value)], dtype=np.float64) + data[(index[0][index_outlier], index[1][index_outlier])] = mean + + return mean, std + + +def adaptive_gaussian(data, std, size_min, size_max, std_out0, fit=True, order=2, rm_outliers=False): ''' This program performs Gaussian filtering with adaptive window size. Cunren Liang, 11-JUN-2020 @@ -564,10 +608,22 @@ def adaptive_gaussian(data, std, size_min, size_max, std_out0, fit=True): size_max: maximum filter window size (size_min <= size_max, size_min == size_max is allowed) std_out0: standard deviation of output data fit: whether do fitting before gaussian filtering + order: fit order, 1, or 2, other orders are not allowed + 1: if ionosphere is not spatially fast changing, this is better. result is smoother. + 2: if ionosphere is spatially fast changing, this is better. + rm_outliers: whether remove outliers in the weight in each filtering window, pretty slow ''' import scipy.signal as ss + if fit: + if order == 1: + n_coeff = 3 + elif order == 2: + n_coeff = 6 + else: + raise Exception('order supported: 1, 2\n') + (length, width) = data.shape #assume zero-value samples are invalid @@ -670,30 +726,38 @@ def adaptive_gaussian(data, std, size_min, size_max, std_out0, fit=True): width_valid = last_column - first_column + 1 #index in filter window - if first_line == 0: - last_line2 = size - 1 - first_line2 = last_line2 - (length_valid - 1) - else: - first_line2 = 0 - last_line2 = first_line2 + (length_valid - 1) - if first_column == 0: - last_column2 = size - 1 - first_column2 = last_column2 - (width_valid - 1) - else: - first_column2 = 0 - last_column2 = first_column2 + (width_valid - 1) + # if first_line == 0: + # last_line2 = size - 1 + # first_line2 = last_line2 - (length_valid - 1) + # else: + # first_line2 = 0 + # last_line2 = first_line2 + (length_valid - 1) + # if first_column == 0: + # last_column2 = size - 1 + # first_column2 = last_column2 - (width_valid - 1) + # else: + # first_column2 = 0 + # last_column2 = first_column2 + (width_valid - 1) + + first_line2 = first_line - i + size_half + last_line2 = last_line - i + size_half + first_column2 = first_column - j + size_half + last_column2 = last_column - j + size_half #prepare data and weight within the window data_window = np.zeros((size, size)) wgt_window = np.zeros((size, size)) data_window[first_line2:last_line2+1, first_column2:last_column2+1] = data[first_line:last_line+1, first_column:last_column+1] wgt_window[first_line2:last_line2+1, first_column2:last_column2+1] = wgt[first_line:last_line+1, first_column:last_column+1] + + #this is pretty slow + if rm_outliers: + remove_outliers(wgt_window, multiples_of_sigma=5.0, invalid_value=0, number_of_valid=25) + #number of valid samples in the filtering window n_valid = np.sum(data_window!=0) #2. fit - #order, n_coeff = (1, 3) - order, n_coeff = (2, 6) if fit: #must have enough samples to do fitting #even if order is 2, n_coeff * 3 is much smaller than size_min*size_min in most cases. diff --git a/components/isceobj/Alos2Proc/runSwathMosaic.py b/components/isceobj/Alos2Proc/runSwathMosaic.py index 27f490cc1..7c355bf13 100644 --- a/components/isceobj/Alos2Proc/runSwathMosaic.py +++ b/components/isceobj/Alos2Proc/runSwathMosaic.py @@ -640,19 +640,21 @@ def swathMosaicParameters(frame, rangeOffsets, azimuthOffsets, numberOfRangeLook frame.azimuthLineInterval = frame.swaths[0].azimuthLineInterval -def readImage(inputfile, numberOfSamples, numberOfLines, startSample, endSample, startLine, endLine): +def readImage(inputfile, numberOfSamples, numberOfLines, startSample, endSample, startLine, endLine, dataType=np.complex64): ''' read a chunk of image the indexes (startSample, endSample, startLine, endLine) are included and start with zero memmap is not used, because it is much slower + + dataType: must be numpy data type ''' - data = np.zeros((endLine-startLine+1, endSample-startSample+1), dtype=np.complex64) + data = np.zeros((endLine-startLine+1, endSample-startSample+1), dtype=dataType) with open(inputfile,'rb') as fp: #for i in range(endLine-startLine+1): for i in range(startLine, endLine+1): - fp.seek((i*numberOfSamples+startSample)*8, 0) - data[i-startLine] = np.fromfile(fp, dtype=np.complex64, count=endSample-startSample+1) + fp.seek((i*numberOfSamples+startSample)*np.dtype(dataType).itemsize, 0) + data[i-startLine] = np.fromfile(fp, dtype=dataType, count=endSample-startSample+1) return data diff --git a/contrib/stack/alosStack/Stack.py b/contrib/stack/alosStack/Stack.py index be583cf73..b36d4c332 100644 --- a/contrib/stack/alosStack/Stack.py +++ b/contrib/stack/alosStack/Stack.py @@ -69,6 +69,20 @@ mandatory=False, doc='water body file') +NUMBER_OF_PARALLEL_THREADS = Application.Parameter('numberOfParallelThreads', + public_name='number of parallel threads', + default=4, + type=int, + mandatory=False, + doc="number of parallel threads") + +NUMBER_OF_PARALLEL_RUNS = Application.Parameter('numberOfParallelRuns', + public_name='number of parallel runs', + default=10, + type=int, + mandatory=False, + doc="number of parallel runs") + DATE_REFERENCE_STACK = Application.Parameter('dateReferenceStack', public_name='reference date of the stack', default=None, @@ -317,6 +331,8 @@ from alos2App import FIT_ION from alos2App import FILT_ION from alos2App import FIT_ADAPTIVE_ION +from alos2App import FIT_ADAPTIVE_ORDER_ION +from alos2App import RM_OUTLIERS_ION from alos2App import FILT_SECONDARY_ION from alos2App import FILTERING_WINSIZE_MAX_ION from alos2App import FILTERING_WINSIZE_MIN_ION @@ -340,6 +356,8 @@ class Stack(Application): DEM, DEM_GEO, WBD, + NUMBER_OF_PARALLEL_THREADS, + NUMBER_OF_PARALLEL_RUNS, DATE_REFERENCE_STACK, GRID_FRAME, GRID_SWATH, @@ -400,6 +418,8 @@ class Stack(Application): FIT_ION, FILT_ION, FIT_ADAPTIVE_ION, + FIT_ADAPTIVE_ORDER_ION, + RM_OUTLIERS_ION, FILT_SECONDARY_ION, FILTERING_WINSIZE_MAX_ION, FILTERING_WINSIZE_MIN_ION, diff --git a/contrib/stack/alosStack/StackPulic.py b/contrib/stack/alosStack/StackPulic.py index a19257eaa..52b12cb7e 100644 --- a/contrib/stack/alosStack/StackPulic.py +++ b/contrib/stack/alosStack/StackPulic.py @@ -323,3 +323,99 @@ def formInterferogram(slcReference, slcSecondary, interferogram, amplitude, numb create_xml(interferogram, width2, length2, 'int') create_xml(amplitude, width2, length2, 'amp') + +def formInterferogramStack(slcs, pairs, interferograms, amplitudes=None, numberRangeLooks=1, numberAzimuthLooks=1): + ''' + create a stack of interferograms and amplitudes + slcs: slc file list + pairs: the indices of reference and secondary of each pair in slc file list + 2-d list. format [[ref index, sec index], [ref index, sec index], [ref index, sec index]...] + length of pairs = length of interferograms = length of amplitudes + there should be one-to-one relationship between them + interferograms: interferogram file list + amplitudes: amplitude file list + numberRangeLooks: number of range looks + numberAzimuthLooks: number of azimuth looks + ''' + + import os + import numpy as np + import isce, isceobj + from isceobj.Alos2Proc.Alos2ProcPublic import multilook + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + + img = isceobj.createImage() + img.load(slcs[0]+'.xml') + width = img.width + length = img.length + + width2 = int(width / numberRangeLooks) + length2 = int(length / numberAzimuthLooks) + + nslcs = len(slcs) + npairs = len(pairs) + + print('openning {} slc files'.format(nslcs)) + slcfps = [] + for i in range(nslcs): + slcfps.append(open(slcs[i],'rb')) + + print('openning {} interferogram files'.format(npairs)) + interferogramfps = [] + for i in range(npairs): + interferogramfps.append(open(interferograms[i],'wb')) + + slcDates = np.zeros((nslcs, numberAzimuthLooks, width), dtype=np.complex64) + if amplitudes is not None: + amplitudeDates = np.zeros((nslcs, length2, width2), dtype=np.float32) + + + print('forming {} interferograms'.format(npairs)) + for k in range(length2): + if (((k+1)%10) == 0): + print("processing line %6d of %6d" % (k+1, length2), end='\r', flush=True) + + for i in range(nslcs): + slcDates[i, :, :] = np.fromfile(slcfps[i], dtype=np.complex64, count=numberAzimuthLooks * width).reshape(numberAzimuthLooks, width) + if amplitudes is not None: + amplitudeDates[i, k, :] = np.sqrt(multilook(slcDates[i, :, :].real*slcDates[i, :, :].real+slcDates[i, :, :].imag*slcDates[i, :, :].imag, numberAzimuthLooks, numberRangeLooks, mean=False)).reshape(width2) + + for i in range(npairs): + inf = multilook(slcDates[pairs[i][0], :, :]*np.conjugate(slcDates[pairs[i][1], :, :]), numberAzimuthLooks, numberRangeLooks, mean=False) + inf.tofile(interferogramfps[i]) + print("processing line %6d of %6d" % (length2, length2)) + + print('closing {} slc files'.format(nslcs)) + for i in range(nslcs): + slcfps[i].close() + + print('closing {} interferograms'.format(npairs)) + for i in range(npairs): + interferogramfps[i].close() + + print('creating interferogram vrt and xml files') + cwd = os.getcwd() + for i in range(npairs): + os.chdir(os.path.dirname(os.path.abspath(interferograms[i]))) + create_xml(os.path.basename(interferograms[i]), width2, length2, 'int') + os.chdir(cwd) + + + #create amplitude files + if amplitudes is not None: + print('writing amplitude files') + for i in range(npairs): + print("writing amplitude file %6d of %6d" % (i+1, npairs), end='\r', flush=True) + amp = amplitudeDates[pairs[i][0], :, :] + 1j * amplitudeDates[pairs[i][1], :, :] + index = np.nonzero( (np.real(amp)==0) + (np.imag(amp)==0) ) + #it has been tested, this would not change the original values in amplitudeDates + amp[index]=0 + amp.astype(np.complex64).tofile(amplitudes[i]) + print("writing amplitude file %6d of %6d" % (npairs, npairs)) + + #create vrt and xml files + cwd = os.getcwd() + for i in range(npairs): + os.chdir(os.path.dirname(os.path.abspath(amplitudes[i]))) + create_xml(os.path.basename(amplitudes[i]), width2, length2, 'amp') + os.chdir(cwd) diff --git a/contrib/stack/alosStack/alosStack.xml b/contrib/stack/alosStack/alosStack.xml index 06ca4a4e3..289ea8589 100644 --- a/contrib/stack/alosStack/alosStack.xml +++ b/contrib/stack/alosStack/alosStack.xml @@ -113,6 +113,15 @@ IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 8, pp. 4492-450 + + + + + @@ -356,6 +365,17 @@ FBD (indicated by ^) is the acquisition mode code. + + + + + + diff --git a/contrib/stack/alosStack/alosStack_tutorial.txt b/contrib/stack/alosStack/alosStack_tutorial.txt index f8cd94268..064b2acf2 100755 --- a/contrib/stack/alosStack/alosStack_tutorial.txt +++ b/contrib/stack/alosStack/alosStack_tutorial.txt @@ -88,64 +88,57 @@ cp ${ISCE_STACK}/alosStack/alosStack.xml ./ ${ISCE_STACK}/alosStack/create_cmds.py -stack_par alosStack.xml 4. Do most of the single date processing. Run -./cmd_1.sh +cmd/cmd_1.sh -In cmd_1.sh and other command files, note that you can split the 'for loop' in each step into a number -of parallel runs. See command file for details. - -Higly recommended parallel processing steps in each command file. - -cmd_1.sh: -estimate SLC offsets -resample to a common grid (WD1 SLC size may be up to 7.2 G, so each run requires this much memory!) - -cmd_2.sh -form interferograms (does not requires a lot of computation, more parallel runs recommended) -mosaic interferograms (does not requires a lot of computation, more parallel runs recommended) - -cmd_3.sh -subband interferograms (does not requires a lot of computation, more parallel runs recommended) - -cmd_4.sh -all steps +This script runs a number of steps as can be easily seen from the content of the script. Some steps can +run parallelly. The number of runs can be set in the step script, or in the input file alosStack.xml +before generating these scripts. +Note that cmd_1_7_resample_to_a_common_grid.sh may require a lot of memeory. Each parallel run needs about +7.2 G because the WD1 SLC size may be up to 7.2 G. So total memeory required = 7.2 G * nruns, where nruns +is the number of parallel runs. 5. InSAR processing before ionosphere correction. Run -./cmd_2.sh +cmd/cmd_2.sh -6. Ionosphere correction (if do ionospheric phase estimation, by default True). If the following parameter of -the input xml file is True (default) +6. Ionosphere correction. Run +cmd/cmd_3.sh + +If the following parameter of the input xml file is True (default) -Run -./cmd_3.sh +Ionospheric phase estimation is performed, otherwise cmd/cmd_3.sh is empty. + -After it finishes, check the images in folder 'fig_ion' to see if ionosphere estimation is OK for each -pair. The anomalies include dense fringes or slight phase difference between adjacent swaths in ScanSAR -interferograms after removing ionosphere. There might also be dense fringes elsewhere. These are all anomalies -and the associated ionosphere estimation results should not be used in the next steps. +After it finishes, actually nothing was done in the last two step scripts as the commands in the two step +scripts are commented out. Before running them: -At the end of this command file, there is a step called 'estimate ionospheric phase for each date'. If you found -some pairs with ionosphere estimation anomalies, specify them by adding argument '-exc_pair' to the command ion_ls.py. -Make sure all dates are still connected after excluding these pairs, and then run ion_ls.py. +check the images in folder 'fig_ion' to see if ionosphere estimation is OK for each pair. The anomalies +include dense fringes or slight phase difference between adjacent swaths/frames in ScanSAR interferograms after +removing ionosphere. There might also be dense fringes elsewhere. These are all anomalies and the associated +ionosphere estimation results should not be used in cmd/cmd_3_7_estimate_ionospheric_phase_for_each_date.sh. -You can plot baselines to see if the pairs are fully connected, e.g. +If you find some pairs with ionosphere estimation anomalies, specify them by adding argument '-exc_pair' +to the command ion_ls.py in cmd/cmd_3_7_estimate_ionospheric_phase_for_each_date.sh. Make sure all dates are +still connected after excluding these pairs. You can plot baselines to see if the pairs are fully connected, e.g. ${ISCE_STACK}/alosStack/plot_baseline.py -baseline baseline/baseline_center.txt -pairs_dir pairs_ion -pairs_exc 150520-150701 -output baselines.pdf +Then run cmd/cmd_3_7_estimate_ionospheric_phase_for_each_date.sh. + If the following parameters of the input xml file are True (default) -there is a final step called 'correct ionosphere' in cmd_3.sh, uncomment the code marked by '#uncomment to run this command' -and then run the entire step. +In cmd/cmd_3_8_correct_ionosphere.sh, uncomment the code marked by '#uncomment to run this command' +and then run it. 7. InSAR processing after ionosphere correction. Run -./cmd_4.sh +cmd/cmd_4.sh -If everything is OK, you may consider removing the huge slc files in folder dates_resampled. When you need them in -the future, you can re-run the commands in the '#resample to a common grid' step in cmd_1.sh. +If everything is OK, you may consider removing the huge slc files in folder dates_resampled. If you need them in +the future, you can re-run cmd/cmd_1_7_resample_to_a_common_grid.sh. Furthermore, you may consider removing the huge original data files you unpacked previously. @@ -160,14 +153,14 @@ Sometimes we want to add new acquistions to the already processed stack. To do t 2. Repeat the processing in #2. PROCESS DATA. -We recommend saving previous command files in a folder before new processing. Note that even the previously processed +We recommend saving previous scripts in a folder before new processing. Note that even the previously processed pairs will be reprocessed again by cmd_4.sh if the following parameters of the input xml file are True (default) -because ionospheric phase will be estimated by ion_ls.py at the end of cmd_3.sh for each date with new pairs included, -and therefore all steps after ion_ls.py should be reprocessed. +because ionospheric phase of each date will be estimated by cmd/cmd_3_7_estimate_ionospheric_phase_for_each_date.sh +with new pairs included, all steps after this step should be reprocessed. ########################################### @@ -206,7 +199,7 @@ relies on coherence and phase unwrapping, it does not work in some cases. These In addition to the above issues, there are also data-mode-related issues. (1) ScanSAR-ScanSAR interferometry. While you can process one single subswath, it's better to process -more than one subswath if the addistional subswath has good coherence. This is good for ionospheric +more than one subswath if the additional subswath has good coherence. This is good for ionospheric correction. (2) Range distortions in JAXA product. This mostly happens in stripmap-stripmap interferometry using @@ -220,27 +213,28 @@ if your product is ordered after this time, you don't have this problem. 2. How do I improve ionospheric correction? -First of all, we recommend reading through cmd_3.sh before manually improving ionosphere estimation results. +First of all, we recommend reading through cmd/cmd_3.sh before manually improving ionosphere estimation results. Isolated areas lead to relative phase unwrapping errors, and therefore leads to significant errors in ionosphere estimation result, usually shown as dense fringes in the corrected interferograms. If your scene covers an area with two or more isolated areas and you are interested in one of the areas, you can mask out the other areas by -setting "areas masked out in ionospheric phase estimation". +setting "areas masked out in ionospheric phase estimation" in the input file. -Or if you have processed the data, you can also specify the argument -masked_areas in ion_filt.py in cmd_3.sh. -Then check the updated results following step '#check ionosphere estimation results' in cmd_3.sh +Or if you have processed the data, you can also specify the argument -masked_areas in cmd/cmd_3_4_filter_ionosphere.sh. +Then check the updated results following cmd/cmd_3_6_check_ionosphere_estimation_results.sh. For ScanSAR, the software uses some accurate values for removing phase difference between adjacent swaths. This, however, does not work well sometimes as a result of the inconistencies between different JAXA products, especially products processed by different versions of JAXA software. As a result of this, you may see dense fringes in the ionospheric correction result. In this case, you can try not to use aforementioned accurate -values by setting -snap in ion_subband.py in cmd_3.sh, and run this command and the remaining commands to see -if ionosphere estimation results have improvement. +values by setting -snap in cmd/cmd_3_2_subband_interferograms.sh, and run this command and the remaining commands +to see if ionosphere estimation results have improvement. You should do this only for the problematic pairs. Note that each time you updated ionosphere estimation results, you need to re-run the steps after -'#estimate ionospheric phase for each date' (including this step) in cmd_3.sh, as well as cmd_4.sh +cmd/cmd_3_7_estimate_ionospheric_phase_for_each_date.sh (including this step) in cmd/cmd_3.sh, as well as cmd/cmd_4.sh + -4. ScanSAR burst synchronization +3. ScanSAR burst synchronization For ScanSAR data acquired before February 8, 2015, chances of having enough burst synchronization for interferometry are very low. Don't include data acquired before this date in your stack processing. diff --git a/contrib/stack/alosStack/create_cmds.py b/contrib/stack/alosStack/create_cmds.py index e6b931438..902717709 100755 --- a/contrib/stack/alosStack/create_cmds.py +++ b/contrib/stack/alosStack/create_cmds.py @@ -257,7 +257,7 @@ def checkStackDataDir(idir): raise Exception('all acquisition modes should be the same') -def createCmds(stack, datesProcess, pairsProcess, pairsProcessIon, mode): +def createCmds(stack, datesProcess, pairsProcess, pairsProcessIon, mode, nthreads=4, nruns=10): ''' create scripts to process an InSAR stack ''' @@ -279,45 +279,60 @@ def header(txt): stackScriptPath = os.path.join(os.environ['ISCE_STACK'], 'alosStack') - def parallelSettings(array): - settings = ''' -# For parallelly processing the dates/pairs. -# Uncomment and set the following variables, put these settings and the following -# one or multiple for loops for a group (with an individual group_i) in a seperate -# bash script. Then you can run the different groups parallelly. E.g. if you have -# 38 pairs and if you want to process them in 4 parallel runs, then you may set -# group_n=10, and group_i=1 for the first bash script (and 2, 3, 4 for the other -# three bash scripts). + def stepSettings(nthreads=4, nruns=10, array='', loop=True): + if loop == False: + settings = ''' +# Number of threads +export OMP_NUM_THREADS={} -# Number of threads for this run -# export OMP_NUM_THREADS=1 +'''.format(nthreads) + else: + settings = ''' +# Number of threads +export OMP_NUM_THREADS={} -# CUDA device you want to use for this run. Only need to set if you have CUDA GPU -# installed on your computer. To find GPU IDs, run nvidia-smi -# export CUDA_VISIBLE_DEVICES=7 +# Number of parallel runs +nruns={} -# Parallel processing mode. 0: no, 1 yes. -# Must set 'parallel=1' for parallel processing! -# parallel=1 +# set the array variable used. The array can be found at the +# beginning of this command file. You can redefine it here if +# you want to process different dates or pairs. +array=("${{{}[@]}}") -# Group number for this run (group_i starts from 1) -# group_i=1 +'''.format(nthreads, nruns, array) + return settings -# Number of dates or pairs in a group -# group_n=10 + def forLoop(functionName, array, nruns): + txt='''for ((i=0;i<${{#{array}[@]}};i++)); do -# set the array variable used in this for loop here. The array can be found at the -# beginning of this command file. -# {}=() + {functionName} ${{{array}[i]}} & -'''.format(array) - return settings + if [[ $(((i+1) % {nruns})) -eq 0 ]]; then + wait + fi - parallelCommands = ''' if [[ ${parallel} -eq 1 ]]; then - if !(((0+(${group_i}-1)*${group_n} <= ${i})) && ((${i} <= ${group_n}-1+(${group_i}-1)*${group_n}))); then - continue + #add a wait to the final group of runs to secure the completion of the for loop before anything can proceed + #if the wait was not added previously + if [[ $((i+1)) -eq ${{#{array}[@]}} ]]; then + if [[ $((${{#{array}[@]}} % {nruns})) -ne 0 ]]; then + wait fi - fi''' + fi + +done +'''.format(functionName = functionName, + array = array, + nruns = nruns) #make it a variable, not a number, as in stepSettings + return txt + + def stepDone(): + txt=''' +echo "$(basename "$0") done" +''' + return txt + + + functionName = 'process' print(' * * *') if stack.dateReferenceStack in datesProcess: @@ -352,20 +367,25 @@ def parallelSettings(array): pairsProcessIon2 = [ipair for ipair in pairsProcessIon if ipair not in pairsProcess] + cmds = [] + step_names = [] + + #start new commands: processing each date ################################################################################# - cmd = '#!/bin/bash\n\n' - cmd += '#########################################################################\n' - cmd += '#set the environment variable before running the following steps\n' - cmd += 'dates=({})\n'.format(' '.join(datesProcess)) - cmd += 'dates2=({})\n'.format(' '.join(datesProcessSecondary)) - cmd += '#########################################################################\n' - cmd += '\n\n' + cmd0 = '#!/bin/bash\n\n' + cmd0 += 'dates=({})\n'.format(' '.join(datesProcess)) + cmd0 += 'dates2=({})\n'.format(' '.join(datesProcessSecondary)) + cmd0 += '\n\n' #read data + step_name = 'read data' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='', loop=False) if datesProcess != []: - cmd += header('read data') + #cmd += header('read data') cmd += os.path.join(stackScriptPath, 'read_data.py') + ' -idir {} -odir {} -ref_date {} -sec_date {} -pol {}'.format(stack.dataDir, stack.datesProcessingDir, stack.dateReferenceStack, ' '.join(datesProcess), stack.polarization) if stack.frames is not None: cmd += ' -frames {}'.format(' '.join(stack.frames)) @@ -379,28 +399,49 @@ def parallelSettings(array): cmd += '\n' cmd += '\n' #frame and swath names use those from frame and swath dirs from now on + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #compute baseline + step_name = 'compute baseline' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='', loop=False) if datesProcessSecondary != []: - cmd += header('compute baseline') + #cmd += header('compute baseline') cmd += os.path.join(stackScriptPath, 'compute_baseline.py') + ' -idir {} -odir {} -ref_date {} -sec_date {} -baseline_center baseline_center.txt -baseline_grid -baseline_grid_width 10 -baseline_grid_length 10'.format(stack.datesProcessingDir, stack.baselineDir, stack.dateReferenceStack, ' '.join(datesProcessSecondary)) cmd += '\n' cmd += '\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #compute burst synchronization + step_name = 'compute burst synchronization' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='', loop=False) spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2() if mode in scansarNominalModes: - cmd += header('compute burst synchronization') + #cmd += header('compute burst synchronization') cmd += os.path.join(stackScriptPath, 'compute_burst_sync.py') + ' -idir {} -burst_sync_file burst_synchronization.txt -ref_date {}'.format(stack.datesProcessingDir, stack.dateReferenceStack) cmd += '\n' cmd += '\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #estimate SLC offsets + step_name = 'estimate SLC offsets' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='dates2', loop=True) if datesProcessSecondary != []: extraArguments = '' if insar.useWbdForNumberOffsets is not None: @@ -412,49 +453,70 @@ def parallelSettings(array): for x in insar.numberAzimuthOffsets: extraArguments += ' -num_az_offset {}'.format(' '.join(x)) - cmd += header('estimate SLC offsets') - cmd += parallelSettings('dates2') - cmd += '''for ((i=0;i<${{#dates2[@]}};i++)); do - -{extraCommands} + #cmd += header('estimate SLC offsets') + #cmd += parallelSettings('dates2') + cmd += '''{functionName} () {{ + date=$1 - {script} -idir {datesProcessingDir} -ref_date {dateReferenceStack} -sec_date ${{dates2[i]}} -wbd {wbd} -dem {dem}{extraArguments} - -done'''.format(extraCommands = parallelCommands, + {script} -idir {datesProcessingDir} -ref_date {dateReferenceStack} -sec_date ${{date}} -wbd {wbd} -dem {dem}{extraArguments} +}} +'''.format(functionName = functionName, script = os.path.join(stackScriptPath, 'estimate_slc_offset.py'), datesProcessingDir = stack.datesProcessingDir, dateReferenceStack = stack.dateReferenceStack, wbd = insar.wbd, dem = stack.dem, extraArguments = extraArguments) + cmd += forLoop(functionName, 'array', 'nruns') cmd += '\n' cmd += '\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #estimate swath offsets + step_name = 'estimate swath offsets' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='', loop=False) if processDateReferenceStack: - cmd += header('estimate swath offsets') + #cmd += header('estimate swath offsets') cmd += os.path.join(stackScriptPath, 'estimate_swath_offset.py') + ' -idir {} -date {} -output swath_offset.txt'.format(os.path.join(stack.datesProcessingDir, stack.dateReferenceStack), stack.dateReferenceStack) if insar.swathOffsetMatching: cmd += ' -match' cmd += '\n' cmd += '\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #estimate frame offsets + step_name = 'estimate frame offsets' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='', loop=False) if processDateReferenceStack: - cmd += header('estimate frame offsets') + #cmd += header('estimate frame offsets') cmd += os.path.join(stackScriptPath, 'estimate_frame_offset.py') + ' -idir {} -date {} -output frame_offset.txt'.format(os.path.join(stack.datesProcessingDir, stack.dateReferenceStack), stack.dateReferenceStack) if insar.frameOffsetMatching: cmd += ' -match' cmd += '\n' cmd += '\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #resample to a common grid + step_name = 'resample to a common grid' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='dates', loop=True) if datesProcess != []: extraArguments = '' if stack.gridFrame is not None: @@ -464,15 +526,14 @@ def parallelSettings(array): if insar.doIon: extraArguments += ' -subband' - cmd += header('resample to a common grid') - cmd += parallelSettings('dates') - cmd += '''for ((i=0;i<${{#dates[@]}};i++)); do - -{extraCommands} + #cmd += header('resample to a common grid') + #cmd += parallelSettings('dates') + cmd += '''{functionName} () {{ + date=$1 - {script} -idir {datesProcessingDir} -odir {datesResampledDir} -ref_date {dateReferenceStack} -sec_date ${{dates[i]}} -nrlks1 {numberRangeLooks1} -nalks1 {numberAzimuthLooks1}{extraArguments} - -done'''.format(extraCommands = parallelCommands, + {script} -idir {datesProcessingDir} -odir {datesResampledDir} -ref_date {dateReferenceStack} -sec_date ${{date}} -nrlks1 {numberRangeLooks1} -nalks1 {numberAzimuthLooks1}{extraArguments} +}} +'''.format(functionName = functionName, script = os.path.join(stackScriptPath, 'resample_common_grid.py'), datesProcessingDir = stack.datesProcessingDir, datesResampledDir = stack.datesResampledDir, @@ -480,14 +541,22 @@ def parallelSettings(array): numberRangeLooks1 = insar.numberRangeLooks1, numberAzimuthLooks1 = insar.numberAzimuthLooks1, extraArguments = extraArguments) + cmd += forLoop(functionName, 'array', 'nruns') cmd += '\n' cmd += '\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #mosaic parameter + step_name = 'mosaic parameter' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='', loop=False) if datesProcess != []: - cmd += header('mosaic parameter') + #cmd += header('mosaic parameter') cmd += os.path.join(stackScriptPath, 'mosaic_parameter.py') + ' -idir {} -ref_date {} -sec_date {} -nrlks1 {} -nalks1 {}'.format(stack.datesProcessingDir, stack.dateReferenceStack, ' '.join(datesProcess), insar.numberRangeLooks1, insar.numberAzimuthLooks1) if stack.gridFrame is not None: cmd += ' -ref_frame {}'.format(stack.gridFrame) @@ -507,11 +576,18 @@ def parallelSettings(array): else: cmd += '\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #compute lat/lon/hgt + step_name = 'compute latitude longtitude and height' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='', loop=False) if processDateReferenceStack: - cmd += header('compute latitude, longtitude and height') + #cmd += header('compute latitude, longtitude and height') cmd += 'cd {}\n'.format(os.path.join(stack.datesResampledDir, stack.dateReferenceStack)) cmd += os.path.join(stackScriptPath, 'rdr2geo.py') + ' -date {} -dem {} -wbd {} -nrlks1 {} -nalks1 {}'.format(stack.dateReferenceStack, stack.dem, insar.wbd, insar.numberRangeLooks1, insar.numberAzimuthLooks1) if insar.useGPU: @@ -526,25 +602,36 @@ def parallelSettings(array): cmd += '\n' cmd += '\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #compute geometrical offsets + step_name = 'compute geometrical offsets' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='dates2', loop=True) if datesProcessSecondary != []: extraArguments = '' if insar.useGPU: extraArguments += ' -gpu' + cmd += ''' +# Number of parallel runs. Since it uses GPU and is fast. do not use multiple runs due to GPU memory limitations. +nruns=1 - cmd += header('compute geometrical offsets') - cmd += parallelSettings('dates2') - cmd += '''for ((i=0;i<${{#dates2[@]}};i++)); do +''' -{extraCommands} + #cmd += header('compute geometrical offsets') + #cmd += parallelSettings('dates2') + cmd += '''{functionName} () {{ + date=$1 cd {datesResampledDir} - {script} -date ${{dates2[i]}} -date_par_dir {datesProcessingDir} -lat {lat} -lon {lon} -hgt {hgt} -nrlks1 {numberRangeLooks1} -nalks1 {numberAzimuthLooks1}{extraArguments} + {script} -date ${{date}} -date_par_dir {datesProcessingDir} -lat {lat} -lon {lon} -hgt {hgt} -nrlks1 {numberRangeLooks1} -nalks1 {numberAzimuthLooks1}{extraArguments} cd ../../ - -done'''.format(extraCommands = parallelCommands, +}} +'''.format(functionName = functionName, script = os.path.join(stackScriptPath, 'geo2rdr.py'), datesResampledDir = os.path.join(stack.datesResampledDir, '${dates2[i]}'), datesProcessingDir = os.path.join('../../', stack.datesProcessingDir, '${dates2[i]}'), @@ -554,95 +641,129 @@ def parallelSettings(array): numberRangeLooks1 = insar.numberRangeLooks1, numberAzimuthLooks1 = insar.numberAzimuthLooks1, extraArguments = extraArguments) + cmd += forLoop(functionName, 'array', 'nruns') cmd += '\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #save commands - cmd1 = cmd + cmds_1 = cmds + step_names_1 = step_names + + + cmds = [] + step_names = [] if pairsProcess != []: #start new commands: processing each pair before ionosphere correction ################################################################################# - cmd = '#!/bin/bash\n\n' - cmd += '#########################################################################\n' - cmd += '#set the environment variable before running the following steps\n' - cmd += 'insarpair=({})\n'.format(' '.join(pairsProcess)) - cmd += 'dates2=({})\n'.format(' '.join(datesProcessSecondary)) - cmd += '#########################################################################\n' - cmd += '\n\n' + cmd0 = '#!/bin/bash\n\n' + cmd0 += 'insarpair=({})\n'.format(' '.join(pairsProcess)) + cmd0 += 'dates2=({})\n'.format(' '.join(datesProcessSecondary)) + cmd0 += '\n\n' else: - cmd = '#!/bin/bash\n\n' - cmd += '#no pairs for InSAR processing.' + cmd0 = '#!/bin/bash\n\n' + cmd0 += '#no pairs for InSAR processing.' #pair up + step_name = 'pair up' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='', loop=False) if pairsProcess != []: - cmd += header('pair up') + #cmd += header('pair up') cmd += os.path.join(stackScriptPath, 'pair_up.py') + ' -idir1 {} -idir2 {} -odir {} -ref_date {} -pairs {}'.format(stack.datesProcessingDir, stack.datesResampledDir, stack.pairsProcessingDir, stack.dateReferenceStack, ' '.join(pairsProcess)) cmd += '\n' cmd += '\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #form interferograms + step_name = 'form interferograms' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='insarpair', loop=True) if pairsProcess != []: - cmd += header('form interferograms') - cmd += parallelSettings('insarpair') - cmd += '''for ((i=0;i<${{#insarpair[@]}};i++)); do + #cmd += header('form interferograms') + #cmd += parallelSettings('insarpair') + cmd += '''{functionName} () {{ + pair=$1 -{extraCommands} - - IFS='-' read -ra dates <<< "${{insarpair[i]}}" + IFS='-' read -ra dates <<< "${{pair}}" ref_date=${{dates[0]}} sec_date=${{dates[1]}} cd {pairsProcessingDir} - cd ${{insarpair[i]}} + cd ${{pair}} {script} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} cd ../../ - -done'''.format(extraCommands = parallelCommands, +}} +'''.format(functionName = functionName, script = os.path.join(stackScriptPath, 'form_interferogram.py'), pairsProcessingDir = stack.pairsProcessingDir, nrlks1 = insar.numberRangeLooks1, nalks1 = insar.numberAzimuthLooks1) + cmd += forLoop(functionName, 'array', 'nruns') cmd += '\n' cmd += '\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #mosaic interferograms + step_name = 'mosaic interferograms' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='insarpair', loop=True) if pairsProcess != []: - cmd += header('mosaic interferograms') - cmd += parallelSettings('insarpair') - cmd += '''for ((i=0;i<${{#insarpair[@]}};i++)); do + wbd1 = '../../dates_resampled/{}/insar/{}_{}rlks_{}alks.wbd'.format(stack.dateReferenceStack, stack.dateReferenceStack, insar.numberRangeLooks1, insar.numberAzimuthLooks1) -{extraCommands} + #cmd += header('mosaic interferograms') + #cmd += parallelSettings('insarpair') + cmd += '''{functionName} () {{ + pair=$1 - IFS='-' read -ra dates <<< "${{insarpair[i]}}" + IFS='-' read -ra dates <<< "${{pair}}" ref_date=${{dates[0]}} sec_date=${{dates[1]}} cd {pairsProcessingDir} - cd ${{insarpair[i]}} - {script} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} + cd ${{pair}} + {script} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} -water_body {wbd} cd ../../ - -done'''.format(extraCommands = parallelCommands, +}} +'''.format(functionName = functionName, script = os.path.join(stackScriptPath, 'mosaic_interferogram.py'), pairsProcessingDir = stack.pairsProcessingDir, ref_date_stack = stack.dateReferenceStack, nrlks1 = insar.numberRangeLooks1, - nalks1 = insar.numberAzimuthLooks1) + nalks1 = insar.numberAzimuthLooks1, + wbd = wbd1) + cmd += forLoop(functionName, 'array', 'nruns') cmd += '\n' cmd += '\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #estimate residual offsets between radar and DEM + step_name = 'estimate residual offsets between radar and DEM' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='', loop=False) if processDateReferenceStack: #if not os.path.isfile(os.path.join(stack.datesResampledDir, stack.dateReferenceStack, 'insar', 'affine_transform.txt')): #amplitde image of any pair should work, since they are all coregistered now @@ -661,7 +782,7 @@ def parallelSettings(array): hgt = os.path.join('insar', '{}_{}rlks_{}alks.hgt'.format(stack.dateReferenceStack, insar.numberRangeLooks1, insar.numberAzimuthLooks1)) amp = os.path.join('../../', stack.pairsProcessingDir, pairToUse, 'insar', '{}_{}rlks_{}alks.amp'.format(pairToUse, insar.numberRangeLooks1, insar.numberAzimuthLooks1)) - cmd += header('estimate residual offsets between radar and DEM') + #cmd += header('estimate residual offsets between radar and DEM') cmd += 'cd {}\n'.format(os.path.join(stack.datesResampledDir, stack.dateReferenceStack)) cmd += os.path.join(stackScriptPath, 'radar_dem_offset.py') + ' -track {} -dem {} -wbd {} -hgt {} -amp {} -output affine_transform.txt -nrlks1 {} -nalks1 {}'.format(track, stack.dem, wbd, hgt, amp, insar.numberRangeLooks1, insar.numberAzimuthLooks1) if insar.numberRangeLooksSim is not None: @@ -672,87 +793,108 @@ def parallelSettings(array): cmd += 'cd ../../\n' cmd += '\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #rectify range offsets + step_name = 'rectify range offsets' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='dates2', loop=True) if datesProcessSecondary != []: - cmd += header('rectify range offsets') - cmd += parallelSettings('dates2') - cmd += '''for ((i=0;i<${{#dates2[@]}};i++)); do - -{extraCommands} + #cmd += header('rectify range offsets') + #cmd += parallelSettings('dates2') + cmd += '''{functionName} () {{ + date=$1 cd {datesResampledDir} - cd ${{dates2[i]}} + cd ${{date}} cd insar - {script} -aff {aff} -input ${{dates2[i]}}_{nrlks1}rlks_{nalks1}alks_rg.off -output ${{dates2[i]}}_{nrlks1}rlks_{nalks1}alks_rg_rect.off -nrlks1 {nrlks1} -nalks1 {nalks1} + {script} -aff {aff} -input ${{date}}_{nrlks1}rlks_{nalks1}alks_rg.off -output ${{date}}_{nrlks1}rlks_{nalks1}alks_rg_rect.off -nrlks1 {nrlks1} -nalks1 {nalks1} cd ../../../ - -done'''.format(extraCommands = parallelCommands, +}} +'''.format(functionName = functionName, script = os.path.join(stackScriptPath, 'rect_range_offset.py'), datesResampledDir = stack.datesResampledDir, aff = os.path.join('../../', stack.dateReferenceStack, 'insar', 'affine_transform.txt'), nrlks1 = insar.numberRangeLooks1, nalks1 = insar.numberAzimuthLooks1) + cmd += forLoop(functionName, 'array', 'nruns') cmd += '\n' cmd += '\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #diff interferograms + step_name = 'diff interferograms' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='insarpair', loop=True) if pairsProcess != []: - cmd += header('diff interferograms') - cmd += parallelSettings('insarpair') - cmd += '''for ((i=0;i<${{#insarpair[@]}};i++)); do - -{extraCommands} + #cmd += header('diff interferograms') + #cmd += parallelSettings('insarpair') + cmd += '''{functionName} () {{ + pair=$1 - IFS='-' read -ra dates <<< "${{insarpair[i]}}" + IFS='-' read -ra dates <<< "${{pair}}" ref_date=${{dates[0]}} sec_date=${{dates[1]}} cd {pairsProcessingDir} - cd ${{insarpair[i]}} + cd ${{pair}} {script} -idir {idir} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} cd ../../ - -done'''.format(extraCommands = parallelCommands, +}} +'''.format(functionName = functionName, script = os.path.join(stackScriptPath, 'diff_interferogram.py'), pairsProcessingDir = stack.pairsProcessingDir, idir = os.path.join('../../', stack.datesResampledDir), ref_date_stack = stack.dateReferenceStack, nrlks1 = insar.numberRangeLooks1, nalks1 = insar.numberAzimuthLooks1) + cmd += forLoop(functionName, 'array', 'nruns') cmd += '\n' cmd += '\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #look and coherence + step_name = 'look and coherence' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='insarpair', loop=True) if (pairsProcess != []) or processDateReferenceStack: - cmd += header('look and coherence') + #cmd += header('look and coherence') if pairsProcess != []: - cmd += parallelSettings('insarpair') - cmd += '''for ((i=0;i<${{#insarpair[@]}};i++)); do + #cmd += parallelSettings('insarpair') + cmd += '''{functionName} () {{ + pair=$1 -{extraCommands} - - IFS='-' read -ra dates <<< "${{insarpair[i]}}" + IFS='-' read -ra dates <<< "${{pair}}" ref_date=${{dates[0]}} sec_date=${{dates[1]}} cd {pairsProcessingDir} - cd ${{insarpair[i]}} + cd ${{pair}} {script} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} -nrlks2 {nrlks2} -nalks2 {nalks2} cd ../../ - -done'''.format(extraCommands = parallelCommands, +}} +'''.format(functionName = functionName, script = os.path.join(stackScriptPath, 'look_coherence.py'), pairsProcessingDir = stack.pairsProcessingDir, nrlks1 = insar.numberRangeLooks1, nalks1 = insar.numberAzimuthLooks1, nrlks2 = insar.numberRangeLooks2, nalks2 = insar.numberAzimuthLooks2) + cmd += forLoop(functionName, 'array', 'nruns') cmd += '\n' cmd += '\n' @@ -762,73 +904,99 @@ def parallelSettings(array): cmd += '\n' cmd += 'cd ../../\n' cmd += '\n' + + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #save commands - cmd2 = cmd + cmds_2 = cmds + step_names_2 = step_names + + cmds = [] + step_names = [] #for ionospheric correction if insar.doIon and (pairsProcessIon != []): #start new commands: ionospheric phase estimation ################################################################################# - cmd = '#!/bin/bash\n\n' - cmd += '#########################################################################\n' - cmd += '#set the environment variables before running the following steps\n' - cmd += 'ionpair=({})\n'.format(' '.join(pairsProcessIon)) - cmd += 'ionpair1=({})\n'.format(' '.join(pairsProcessIon1)) - cmd += 'ionpair2=({})\n'.format(' '.join(pairsProcessIon2)) - cmd += 'insarpair=({})\n'.format(' '.join(pairsProcess)) - cmd += '#########################################################################\n' - cmd += '\n\n' + cmd0 = '#!/bin/bash\n\n' + cmd0 += 'ionpair=({})\n'.format(' '.join(pairsProcessIon)) + cmd0 += 'ionpair1=({})\n'.format(' '.join(pairsProcessIon1)) + cmd0 += 'ionpair2=({})\n'.format(' '.join(pairsProcessIon2)) + cmd0 += 'insarpair=({})\n'.format(' '.join(pairsProcess)) + cmd0 += '\n\n' #pair up - cmd += header('pair up for ionospheric phase estimation') + step_name = 'pair up for ionospheric phase estimation' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='', loop=False) + #cmd += header('pair up for ionospheric phase estimation') cmd += os.path.join(stackScriptPath, 'pair_up.py') + ' -idir1 {} -idir2 {} -odir {} -ref_date {} -pairs {}'.format(stack.datesProcessingDir, stack.datesResampledDir, stack.pairsProcessingDirIon, stack.dateReferenceStack, ' '.join(pairsProcessIon)) cmd += '\n' cmd += '\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #subband interferograms + step_name = 'subband interferograms' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='ionpair', loop=True) if insar.swathPhaseDiffSnapIon is not None: snap = [[1 if y else 0 for y in x] for x in insar.swathPhaseDiffSnapIon] snapArgument = ' ' + ' '.join(['-snap {}'.format(' '.join([str(y) for y in x])) for x in snap]) else: snapArgument = '' - cmd += header('subband interferograms') - cmd += parallelSettings('ionpair') - cmd += '''for ((i=0;i<${{#ionpair[@]}};i++)); do + wbd1 = '../../dates_resampled/{}/insar/{}_{}rlks_{}alks.wbd'.format(stack.dateReferenceStack, stack.dateReferenceStack, insar.numberRangeLooks1, insar.numberAzimuthLooks1) -{extraCommands} + #cmd += header('subband interferograms') + #cmd += parallelSettings('ionpair') + cmd += '''{functionName} () {{ + pair=$1 - IFS='-' read -ra dates <<< "${{ionpair[i]}}" + IFS='-' read -ra dates <<< "${{pair}}" ref_date=${{dates[0]}} sec_date=${{dates[1]}} cd {pairsProcessingDir} - cd ${{ionpair[i]}} - {script} -idir {idir} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1}{snapArgument} + cd ${{pair}} + {script} -idir {idir} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} -water_body {wbd}{snapArgument} cd ../../ - -done'''.format(extraCommands = parallelCommands, +}} +'''.format(functionName = functionName, script = os.path.join(stackScriptPath, 'ion_subband.py'), pairsProcessingDir = stack.pairsProcessingDirIon, idir = os.path.join('../../', stack.datesResampledDir), ref_date_stack = stack.dateReferenceStack, nrlks1 = insar.numberRangeLooks1, nalks1 = insar.numberAzimuthLooks1, + wbd = wbd1, snapArgument = snapArgument) + cmd += forLoop(functionName, 'array', 'nruns') cmd += '\n' cmd += '\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #unwrap subband interferograms + step_name = 'unwrap subband interferograms' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='ionpair', loop=True) if insar.filterSubbandInt: filtArgument = ' -filt -alpha {} -win {} -step {}'.format(insar.filterStrengthSubbandInt, insar.filterWinsizeSubbandInt, insar.filterStepsizeSubbandInt) if not insar.removeMagnitudeBeforeFilteringSubbandInt: @@ -836,22 +1004,21 @@ def parallelSettings(array): else: filtArgument = '' - cmd += header('unwrap subband interferograms') - cmd += parallelSettings('ionpair') - cmd += '''for ((i=0;i<${{#ionpair[@]}};i++)); do + #cmd += header('unwrap subband interferograms') + #cmd += parallelSettings('ionpair') + cmd += '''{functionName} () {{ + pair=$1 -{extraCommands} - - IFS='-' read -ra dates <<< "${{ionpair[i]}}" + IFS='-' read -ra dates <<< "${{pair}}" ref_date=${{dates[0]}} sec_date=${{dates[1]}} cd {pairsProcessingDir} - cd ${{ionpair[i]}} + cd ${{pair}} {script} -idir {idir} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -wbd {wbd} -nrlks1 {nrlks1} -nalks1 {nalks1} -nrlks_ion {nrlks_ion} -nalks_ion {nalks_ion}{filtArgument} cd ../../ - -done'''.format(extraCommands = parallelCommands, +}} +'''.format(functionName = functionName, script = os.path.join(stackScriptPath, 'ion_unwrap.py'), pairsProcessingDir = stack.pairsProcessingDirIon, idir = os.path.join('../../', stack.datesResampledDir), @@ -862,12 +1029,20 @@ def parallelSettings(array): nrlks_ion = insar.numberRangeLooksIon, nalks_ion = insar.numberAzimuthLooksIon, filtArgument = filtArgument) + cmd += forLoop(functionName, 'array', 'nruns') cmd += '\n' cmd += '\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #filter ionosphere + step_name = 'filter ionosphere' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='ionpair', loop=True) filtArgument = '' if insar.fitIon: filtArgument += ' -fit' @@ -875,6 +1050,8 @@ def parallelSettings(array): filtArgument += ' -filt' if insar.fitAdaptiveIon: filtArgument += ' -fit_adaptive' + if insar.rmOutliersIon: + filtArgument += ' -rm_outliers' if insar.filtSecondaryIon: filtArgument += ' -filt_secondary -win_secondary {}'.format(insar.filteringWinsizeSecondaryIon) if insar.filterStdIon is not None: @@ -883,22 +1060,21 @@ def parallelSettings(array): if insar.maskedAreasIon is not None: filtArgument += ''.join([' -masked_areas '+' '.join([str(y) for y in x]) for x in insar.maskedAreasIon]) - cmd += header('filter ionosphere') - cmd += parallelSettings('ionpair') - cmd += '''for ((i=0;i<${{#ionpair[@]}};i++)); do - -{extraCommands} + #cmd += header('filter ionosphere') + #cmd += parallelSettings('ionpair') + cmd += '''{functionName} () {{ + pair=$1 - IFS='-' read -ra dates <<< "${{ionpair[i]}}" + IFS='-' read -ra dates <<< "${{pair}}" ref_date=${{dates[0]}} sec_date=${{dates[1]}} cd {pairsProcessingDir} - cd ${{ionpair[i]}} - {script} -idir {idir1} -idir2 {idir2} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} -nrlks2 {nrlks2} -nalks2 {nalks2} -nrlks_ion {nrlks_ion} -nalks_ion {nalks_ion} -win_min {win_min} -win_max {win_max}{filtArgument} + cd ${{pair}} + {script} -idir {idir1} -idir2 {idir2} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} -nrlks2 {nrlks2} -nalks2 {nalks2} -nrlks_ion {nrlks_ion} -nalks_ion {nalks_ion} -win_min {win_min} -win_max {win_max} -fit_adaptive_order {fit_adaptive_order}{filtArgument} cd ../../ - -done'''.format(extraCommands = parallelCommands, +}} +'''.format(functionName = functionName, script = os.path.join(stackScriptPath, 'ion_filt.py'), pairsProcessingDir = stack.pairsProcessingDirIon, idir1 = os.path.join('../../', stack.datesResampledDir), @@ -912,28 +1088,36 @@ def parallelSettings(array): nalks_ion = insar.numberAzimuthLooksIon, win_min = insar.filteringWinsizeMinIon, win_max = insar.filteringWinsizeMaxIon, + fit_adaptive_order = insar.fitAdaptiveOrderIon, filtArgument = filtArgument) + cmd += forLoop(functionName, 'array', 'nruns') cmd += '\n' cmd += '\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #prepare interferograms for checking ionospheric correction - cmd += header('prepare interferograms for checking ionosphere estimation results') + step_name = 'prepare interferograms for checking ionosphere estimation results' + cmd = cmd0 + cmd += header(step_name) + #cmd += header('prepare interferograms for checking ionosphere estimation results') if pairsProcessIon1 != []: - cmd += parallelSettings('ionpair1') + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='ionpair1', loop=True) + #cmd += parallelSettings('ionpair1') if (insar.numberRangeLooksIon != 1) or (insar.numberAzimuthLooksIon != 1): - cmd += '''for ((i=0;i<${{#ionpair1[@]}};i++)); do - -{extraCommands} + cmd += '''{functionName} () {{ + pair=$1 - IFS='-' read -ra dates <<< "${{ionpair1[i]}}" + IFS='-' read -ra dates <<< "${{pair}}" ref_date=${{dates[0]}} sec_date=${{dates[1]}} - {script} -i {pairsProcessingDir}/${{ionpair1[i]}}/insar/diff_${{ionpair1[i]}}_{nrlks1}rlks_{nalks1}alks.int -o {pairsProcessingDirIon}/${{ionpair1[i]}}/ion/ion_cal/diff_${{ionpair1[i]}}_{nrlks}rlks_{nalks}alks_ori.int -r {nrlks_ion} -a {nalks_ion} - -done'''.format(extraCommands = parallelCommands, + {script} -i {pairsProcessingDir}/${{pair}}/insar/diff_${{pair}}_{nrlks1}rlks_{nalks1}alks.int -o {pairsProcessingDirIon}/${{pair}}/ion/ion_cal/diff_${{pair}}_{nrlks}rlks_{nalks}alks_ori.int -r {nrlks_ion} -a {nalks_ion} +}} +'''.format(functionName = functionName, script = os.path.join('', 'looks.py'), pairsProcessingDir = stack.pairsProcessingDir.strip('/'), pairsProcessingDirIon = stack.pairsProcessingDirIon.strip('/'), @@ -943,42 +1127,43 @@ def parallelSettings(array): nalks_ion = insar.numberAzimuthLooksIon, nrlks = insar.numberRangeLooks1 * insar.numberRangeLooksIon, nalks = insar.numberAzimuthLooks1 * insar.numberAzimuthLooksIon) + cmd += forLoop(functionName, 'array', 'nruns') cmd += '\n' cmd += '\n' cmd += '\n' else: - cmd += '''for ((i=0;i<${{#ionpair1[@]}};i++)); do - -{extraCommands} + cmd += '''{functionName} () {{ + pair=$1 - IFS='-' read -ra dates <<< "${{ionpair1[i]}}" + IFS='-' read -ra dates <<< "${{pair}}" ref_date=${{dates[0]}} sec_date=${{dates[1]}} - cp {pairsProcessingDir}/${{ionpair1[i]}}/insar/diff_${{ionpair1[i]}}_{nrlks1}rlks_{nalks1}alks.int* {pairsProcessingDirIon}/${{ionpair1[i]}}/ion/ion_cal - -done'''.format(extraCommands = parallelCommands, + cp {pairsProcessingDir}/${{pair}}/insar/diff_${{pair}}_{nrlks1}rlks_{nalks1}alks.int* {pairsProcessingDirIon}/${{pair}}/ion/ion_cal +}} +'''.format(functionName = functionName, pairsProcessingDir = stack.pairsProcessingDir.strip('/'), pairsProcessingDirIon = stack.pairsProcessingDirIon.strip('/'), nrlks1 = insar.numberRangeLooks1, nalks1 = insar.numberAzimuthLooks1) + cmd += forLoop(functionName, 'array', 'nruns') cmd += '\n' cmd += '\n' cmd += '\n' if pairsProcessIon2 != []: - cmd += parallelSettings('ionpair2') - cmd += '''for ((i=0;i<${{#ionpair2[@]}};i++)); do - -{extraCommands} + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='ionpair2', loop=True) + #cmd += parallelSettings('ionpair2') + cmd += '''{functionName} () {{ + pair=$1 - IFS='-' read -ra dates <<< "${{ionpair2[i]}}" + IFS='-' read -ra dates <<< "${{pair}}" ref_date=${{dates[0]}} sec_date=${{dates[1]}} cd {pairsProcessingDir} - cd ${{ionpair2[i]}} + cd ${{pair}} {script} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} cd ../../ @@ -987,66 +1172,68 @@ def parallelSettings(array): pairsProcessingDir = stack.pairsProcessingDirIon, nrlks1 = insar.numberRangeLooks1, nalks1 = insar.numberAzimuthLooks1) + cmd += forLoop(functionName, 'array', 'nruns') cmd += '\n' cmd += '\n' - cmd += '''for ((i=0;i<${{#ionpair2[@]}};i++)); do + wbd1 = '../../dates_resampled/{}/insar/{}_{}rlks_{}alks.wbd'.format(stack.dateReferenceStack, stack.dateReferenceStack, insar.numberRangeLooks1, insar.numberAzimuthLooks1) + cmd += '''{functionName} () {{ + pair=$1 -{extraCommands} - - IFS='-' read -ra dates <<< "${{ionpair2[i]}}" + IFS='-' read -ra dates <<< "${{pair}}" ref_date=${{dates[0]}} sec_date=${{dates[1]}} cd {pairsProcessingDir} - cd ${{ionpair2[i]}} - {script} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} + cd ${{pair}} + {script} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} -water_body {wbd} cd ../../ - -done'''.format(extraCommands = parallelCommands, +}} +'''.format(functionName = functionName, script = os.path.join(stackScriptPath, 'mosaic_interferogram.py'), pairsProcessingDir = stack.pairsProcessingDirIon, ref_date_stack = stack.dateReferenceStack, nrlks1 = insar.numberRangeLooks1, - nalks1 = insar.numberAzimuthLooks1) + nalks1 = insar.numberAzimuthLooks1, + wbd = wbd1) + cmd += forLoop(functionName, 'array', 'nruns') cmd += '\n' cmd += '\n' - cmd += '''for ((i=0;i<${{#ionpair2[@]}};i++)); do - -{extraCommands} + cmd += '''{functionName} () {{ + pair=$1 - IFS='-' read -ra dates <<< "${{ionpair2[i]}}" + IFS='-' read -ra dates <<< "${{pair}}" ref_date=${{dates[0]}} sec_date=${{dates[1]}} cd {pairsProcessingDir} - cd ${{ionpair2[i]}} + cd ${{pair}} {script} -idir {idir} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} cd ../../ - -done'''.format(extraCommands = parallelCommands, +}} +'''.format(functionName = functionName, script = os.path.join(stackScriptPath, 'diff_interferogram.py'), pairsProcessingDir = stack.pairsProcessingDirIon, idir = os.path.join('../../', stack.datesResampledDir), ref_date_stack = stack.dateReferenceStack, nrlks1 = insar.numberRangeLooks1, nalks1 = insar.numberAzimuthLooks1) + cmd += forLoop(functionName, 'array', 'nruns') cmd += '\n' cmd += '\n' if (insar.numberRangeLooksIon != 1) or (insar.numberAzimuthLooksIon != 1): - cmd += '''for ((i=0;i<${{#ionpair2[@]}};i++)); do - -{extraCommands} + cmd += '''{functionName} () {{ + pair=$1 - IFS='-' read -ra dates <<< "${{ionpair2[i]}}" + IFS='-' read -ra dates <<< "${{pair}}" ref_date=${{dates[0]}} sec_date=${{dates[1]}} - {script} -i {pairsProcessingDir}/${{ionpair2[i]}}/insar/diff_${{ionpair2[i]}}_{nrlks1}rlks_{nalks1}alks.int -o {pairsProcessingDir}/${{ionpair2[i]}}/ion/ion_cal/diff_${{ionpair2[i]}}_{nrlks}rlks_{nalks}alks_ori.int -r {nrlks_ion} -a {nalks_ion} - -done'''.format(extraCommands = parallelCommands, + {script} -i {pairsProcessingDir}/${{pair}}/insar/diff_${{pair}}_{nrlks1}rlks_{nalks1}alks.int -o {pairsProcessingDir}/${{pair}}/ion/ion_cal/diff_${{pair}}_{nrlks}rlks_{nalks}alks_ori.int -r {nrlks_ion} -a {nalks_ion} +}} +'''.format(functionName = functionName, script = os.path.join('', 'looks.py'), pairsProcessingDir = stack.pairsProcessingDirIon.strip('/'), nrlks1 = insar.numberRangeLooks1, @@ -1055,50 +1242,58 @@ def parallelSettings(array): nalks_ion = insar.numberAzimuthLooksIon, nrlks = insar.numberRangeLooks1 * insar.numberRangeLooksIon, nalks = insar.numberAzimuthLooks1 * insar.numberAzimuthLooksIon) + cmd += forLoop(functionName, 'array', 'nruns') cmd += '\n' cmd += '\n' cmd += '\n' else: - cmd += '''for ((i=0;i<${{#ionpair2[@]}};i++)); do - -{extraCommands} + cmd += '''{functionName} () {{ + pair=$1 - IFS='-' read -ra dates <<< "${{ionpair2[i]}}" + IFS='-' read -ra dates <<< "${{pair}}" ref_date=${{dates[0]}} sec_date=${{dates[1]}} - cp {pairsProcessingDir}/${{ionpair2[i]}}/insar/diff_${{ionpair2[i]}}_{nrlks1}rlks_{nalks1}alks.int* {pairsProcessingDir}/${{ionpair2[i]}}/ion/ion_cal - -done'''.format(extraCommands = parallelCommands, + cp {pairsProcessingDir}/${{pair}}/insar/diff_${{pair}}_{nrlks1}rlks_{nalks1}alks.int* {pairsProcessingDir}/${{pair}}/ion/ion_cal +}} +'''.format(functionName = functionName, pairsProcessingDir = stack.pairsProcessingDirIon.strip('/'), nrlks1 = insar.numberRangeLooks1, nalks1 = insar.numberAzimuthLooks1) + cmd += forLoop(functionName, 'array', 'nruns') cmd += '\n' cmd += '\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #check ionosphere estimation results - cmd += header('check ionosphere estimation results') - cmd += parallelSettings('ionpair') - cmd += '''for ((i=0;i<${{#ionpair[@]}};i++)); do - -{extraCommands} - - IFS='-' read -ra dates <<< "${{ionpair[i]}}" + step_name = 'check ionosphere estimation results' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='ionpair', loop=True) + #cmd += header('check ionosphere estimation results') + #cmd += parallelSettings('ionpair') + cmd += '''{functionName} () {{ + pair=$1 + + IFS='-' read -ra dates <<< "${{pair}}" ref_date=${{dates[0]}} sec_date=${{dates[1]}} cd {pairsProcessingDir} - cd ${{ionpair[i]}} - {script} -e='a*exp(-1.0*J*b)' --a=ion/ion_cal/diff_${{ionpair[i]}}_{nrlks}rlks_{nalks}alks_ori.int --b=ion/ion_cal/filt_ion_{nrlks}rlks_{nalks}alks.ion -s BIP -t cfloat -o ion/ion_cal/diff_${{ionpair[i]}}_{nrlks}rlks_{nalks}alks.int + cd ${{pair}} + {script} -e='a*exp(-1.0*J*b)' --a=ion/ion_cal/diff_${{pair}}_{nrlks}rlks_{nalks}alks_ori.int --b=ion/ion_cal/filt_ion_{nrlks}rlks_{nalks}alks.ion -s BIP -t cfloat -o ion/ion_cal/diff_${{pair}}_{nrlks}rlks_{nalks}alks.int cd ../../ - -done'''.format(extraCommands = parallelCommands, +}} +'''.format(functionName = functionName, script = os.path.join('', 'imageMath.py'), pairsProcessingDir = stack.pairsProcessingDirIon, nrlks = insar.numberRangeLooks1*insar.numberRangeLooksIon, nalks = insar.numberAzimuthLooks1*insar.numberAzimuthLooksIon) + cmd += forLoop(functionName, 'array', 'nruns') cmd += '\n' cmd += '\n' @@ -1106,10 +1301,17 @@ def parallelSettings(array): cmd += '\n' cmd += '\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #estimate ionospheric phase for each date - cmd += header('estimate ionospheric phase for each date') + step_name = 'estimate ionospheric phase for each date' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='', loop=False) + #cmd += header('estimate ionospheric phase for each date') cmd += "#check the ionospheric phase estimation results in folder 'fig_ion', and find out the bad pairs.\n" cmd += '#these pairs should be excluded from this step by specifying parameter -exc_pair. For example:\n' cmd += '#-exc_pair 150401-150624 150401-150722\n\n' @@ -1122,11 +1324,18 @@ def parallelSettings(array): cmd += '\n' cmd += '\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #correct ionosphere + step_name = 'correct ionosphere' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='', loop=False) if insar.applyIon: - cmd += header('correct ionosphere') + #cmd += header('correct ionosphere') cmd += '#no need to run parallelly for this for loop, it is fast!!!\n' cmd += '''#redefine insarpair to include all processed InSAR pairs insarpair=($(ls -l {pairsProcessingDir} | grep ^d | awk '{{print $9}}')) @@ -1151,57 +1360,65 @@ def parallelSettings(array): nalks2 = insar.numberAzimuthLooks2) cmd += '\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) else: cmd = '#!/bin/bash\n\n' cmd += '#no pairs for estimating ionosphere.' + #do not create steps if not doing ionospheric correction #save commands - cmd3 = cmd + cmds_3 = cmds + step_names_3 = step_names + + cmds = [] + step_names = [] #if pairsProcess != []: if True: #start new commands: processing each pair after ionosphere correction ################################################################################# - cmd = '#!/bin/bash\n\n' - cmd += '#########################################################################\n' - cmd += '#set the environment variable before running the following steps\n' + cmd0 = '#!/bin/bash\n\n' if insar.doIon and insar.applyIon: #reprocess all pairs - cmd += '''insarpair=($(ls -l {pairsProcessingDir} | grep ^d | awk '{{print $9}}'))'''.format(pairsProcessingDir = stack.pairsProcessingDir) - cmd += '\n' + cmd0 += '''insarpair=($(ls -l {pairsProcessingDir} | grep ^d | awk '{{print $9}}'))'''.format(pairsProcessingDir = stack.pairsProcessingDir) + cmd0 += '\n' else: - cmd += 'insarpair=({})\n'.format(' '.join(pairsProcess)) - cmd += '#########################################################################\n' - cmd += '\n\n' + cmd0 += 'insarpair=({})\n'.format(' '.join(pairsProcess)) + cmd0 += '\n\n' #filter interferograms + step_name = 'filter interferograms' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='insarpair', loop=True) extraArguments = '' if not insar.removeMagnitudeBeforeFiltering: extraArguments += ' -keep_mag' if insar.waterBodyMaskStartingStep == 'filt': extraArguments += ' -wbd_msk' - cmd += header('filter interferograms') - cmd += parallelSettings('insarpair') - cmd += '''for ((i=0;i<${{#insarpair[@]}};i++)); do - -{extraCommands} + #cmd += header('filter interferograms') + #cmd += parallelSettings('insarpair') + cmd += '''{functionName} () {{ + pair=$1 - IFS='-' read -ra dates <<< "${{insarpair[i]}}" + IFS='-' read -ra dates <<< "${{pair}}" ref_date=${{dates[0]}} sec_date=${{dates[1]}} cd {pairsProcessingDir} - cd ${{insarpair[i]}} + cd ${{pair}} {script} -idir {idir} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} -nrlks2 {nrlks2} -nalks2 {nalks2} -alpha {alpha} -win {win} -step {step}{extraArguments} cd ../../ - -done'''.format(extraCommands = parallelCommands, +}} +'''.format(functionName = functionName, script = os.path.join(stackScriptPath, 'filt.py'), pairsProcessingDir = stack.pairsProcessingDir, idir = os.path.join('../../', stack.datesResampledDir), @@ -1214,32 +1431,39 @@ def parallelSettings(array): win = insar.filterWinsize, step = insar.filterStepsize, extraArguments = extraArguments) + cmd += forLoop(functionName, 'array', 'nruns') cmd += '\n' cmd += '\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #unwrap interferograms + step_name = 'unwrap interferograms' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='insarpair', loop=True) extraArguments = '' if insar.waterBodyMaskStartingStep == 'unwrap': extraArguments += ' -wbd_msk' - cmd += header('unwrap interferograms') - cmd += parallelSettings('insarpair') - cmd += '''for ((i=0;i<${{#insarpair[@]}};i++)); do - -{extraCommands} + #cmd += header('unwrap interferograms') + #cmd += parallelSettings('insarpair') + cmd += '''{functionName} () {{ + pair=$1 - IFS='-' read -ra dates <<< "${{insarpair[i]}}" + IFS='-' read -ra dates <<< "${{pair}}" ref_date=${{dates[0]}} sec_date=${{dates[1]}} cd {pairsProcessingDir} - cd ${{insarpair[i]}} + cd ${{pair}} {script} -idir {idir} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} -nrlks2 {nrlks2} -nalks2 {nalks2}{extraArguments} cd ../../ - -done'''.format(extraCommands = parallelCommands, +}} +'''.format(functionName = functionName, script = os.path.join(stackScriptPath, 'unwrap_snaphu.py'), pairsProcessingDir = stack.pairsProcessingDir, idir = os.path.join('../../', stack.datesResampledDir), @@ -1249,37 +1473,44 @@ def parallelSettings(array): nrlks2 = insar.numberRangeLooks2, nalks2 = insar.numberAzimuthLooks2, extraArguments = extraArguments) + cmd += forLoop(functionName, 'array', 'nruns') cmd += '\n' cmd += '\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) #geocode + step_name = 'geocode' + cmd = cmd0 + cmd += header(step_name) + cmd += stepSettings(nthreads=stack.numberOfParallelThreads, nruns=stack.numberOfParallelRuns, array='insarpair', loop=True) extraArguments = '' if insar.geocodeInterpMethod is not None: extraArguments += ' -interp_method {}'.format(insar.geocodeInterpMethod) if insar.bbox: # is not None: extraArguments += ' -bbox {}'.format('/'.format(insar.bbox)) - cmd += header('geocode') - cmd += parallelSettings('insarpair') - cmd += '''for ((i=0;i<${{#insarpair[@]}};i++)); do + #cmd += header('geocode') + #cmd += parallelSettings('insarpair') + cmd += '''{functionName} () {{ + pair=$1 -{extraCommands} - - IFS='-' read -ra dates <<< "${{insarpair[i]}}" + IFS='-' read -ra dates <<< "${{pair}}" ref_date=${{dates[0]}} sec_date=${{dates[1]}} cd {pairsProcessingDir} - cd ${{insarpair[i]}} + cd ${{pair}} cd insar - {script} -ref_date_stack_track ../{ref_date_stack}.track.xml -dem {dem_geo} -input ${{insarpair[i]}}_{nrlks}rlks_{nalks}alks.cor -nrlks {nrlks} -nalks {nalks}{extraArguments} - {script} -ref_date_stack_track ../{ref_date_stack}.track.xml -dem {dem_geo} -input filt_${{insarpair[i]}}_{nrlks}rlks_{nalks}alks.unw -nrlks {nrlks} -nalks {nalks}{extraArguments} - {script} -ref_date_stack_track ../{ref_date_stack}.track.xml -dem {dem_geo} -input filt_${{insarpair[i]}}_{nrlks}rlks_{nalks}alks_msk.unw -nrlks {nrlks} -nalks {nalks}{extraArguments} + {script} -ref_date_stack_track ../{ref_date_stack}.track.xml -dem {dem_geo} -input ${{pair}}_{nrlks}rlks_{nalks}alks.cor -nrlks {nrlks} -nalks {nalks}{extraArguments} + {script} -ref_date_stack_track ../{ref_date_stack}.track.xml -dem {dem_geo} -input filt_${{pair}}_{nrlks}rlks_{nalks}alks.unw -nrlks {nrlks} -nalks {nalks}{extraArguments} + {script} -ref_date_stack_track ../{ref_date_stack}.track.xml -dem {dem_geo} -input filt_${{pair}}_{nrlks}rlks_{nalks}alks_msk.unw -nrlks {nrlks} -nalks {nalks}{extraArguments} cd ../../../ - -done'''.format(extraCommands = parallelCommands, +}} +'''.format(functionName = functionName, script = os.path.join(stackScriptPath, 'geocode.py'), pairsProcessingDir = stack.pairsProcessingDir, ref_date_stack = stack.dateReferenceStack, @@ -1287,6 +1518,7 @@ def parallelSettings(array): nrlks = insar.numberRangeLooks1*insar.numberRangeLooks2, nalks = insar.numberAzimuthLooks1*insar.numberAzimuthLooks2, extraArguments = extraArguments) + cmd += forLoop(functionName, 'array', 'nruns') cmd += '\n' cmd += '\n' @@ -1300,16 +1532,22 @@ def parallelSettings(array): cmd += '\n' cmd += 'cd ../../../\n' cmd += '\n' + cmd += stepDone() + cmds.append(cmd) + step_names.append(step_name) else: cmd = '#!/bin/bash\n\n' cmd += '#no pairs for InSAR processing.' + #do not create steps if no pairs for InSAR processing. + #save commands - cmd4 = cmd + cmds_4 = cmds + step_names_4 = step_names - return (cmd1, cmd2, cmd3, cmd4) + return (cmds_1, cmds_2, cmds_3, cmds_4, step_names_1, step_names_2, step_names_3, step_names_4) def cmdLineParse(): @@ -1470,14 +1708,32 @@ def cmdLineParse(): print('no dates and pairs need to be processed.') print('no processing script is generated.') else: - cmd1, cmd2, cmd3, cmd4 = createCmds(stack, datesProcess, pairsProcess, pairsProcessIon, mode) - with open('cmd_1.sh', 'w') as f: - f.write(cmd1) - with open('cmd_2.sh', 'w') as f: - f.write(cmd2) - with open('cmd_3.sh', 'w') as f: - f.write(cmd3) - with open('cmd_4.sh', 'w') as f: - f.write(cmd4) - - runCmd('chmod +x cmd_1.sh cmd_2.sh cmd_3.sh cmd_4.sh', silent=1) + # cmd1, cmd2, cmd3, cmd4 = createCmds(stack, datesProcess, pairsProcess, pairsProcessIon, mode) + # with open('cmd_1.sh', 'w') as f: + # f.write(cmd1) + # with open('cmd_2.sh', 'w') as f: + # f.write(cmd2) + # with open('cmd_3.sh', 'w') as f: + # f.write(cmd3) + # with open('cmd_4.sh', 'w') as f: + # f.write(cmd4) + (cmds_1, cmds_2, cmds_3, cmds_4, step_names_1, step_names_2, step_names_3, step_names_4) = createCmds(stack, datesProcess, pairsProcess, pairsProcessIon, mode) + + cmdDir = 'cmd' + os.makedirs(cmdDir, exist_ok=True) + os.chdir(cmdDir) + for i, (cmds_i, step_names_i) in enumerate(zip((cmds_1, cmds_2, cmds_3, cmds_4), (step_names_1, step_names_2, step_names_3, step_names_4))): + filename1 = 'cmd_{}.sh'.format(i+1) + cmd = '' + for j, (cmds_j, step_names_j) in enumerate(zip(cmds_i, step_names_i)): + filename2 = 'cmd_{}_{}_{}.sh'.format(i+1, j+1, step_names_j.replace(' ', '_')) + with open(filename2, 'w') as f: + f.write(cmds_j) + cmd += '{}/{}\n'.format(cmdDir, filename2) + cmd +=''' +echo "$(basename "$0") done" +''' + with open(filename1, 'w') as f: + f.write(cmd) + runCmd('chmod +x cmd_*.sh', silent=1) + os.chdir('../') diff --git a/contrib/stack/alosStack/form_interferogram_stack.py b/contrib/stack/alosStack/form_interferogram_stack.py new file mode 100644 index 000000000..e822859b5 --- /dev/null +++ b/contrib/stack/alosStack/form_interferogram_stack.py @@ -0,0 +1,169 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +# #examples +# #regular insar +# /home/liang/software/isce/isce_current/isce/contrib/stack/alosStack/form_interferogram_stack.py -dates_dir dates_resampled -pairs_dir pairs -pairs_user 151016-170106 151016-170217 211203-220909 -nrlks1 1 -nalks1 14 -frame_user 2900 -swath_user 1 + +# #ion lower +# /home/liang/software/isce/isce_current/isce/contrib/stack/alosStack/form_interferogram_stack.py -dates_dir dates_resampled -suffix _lower -pairs_dir pairs_ion -mid_path ion/lower -nrlks1 1 -nalks1 14 + +# #ion upper +# /home/liang/software/isce/isce_current/isce/contrib/stack/alosStack/form_interferogram_stack.py -dates_dir dates_resampled -suffix _upper -pairs_dir pairs_ion -mid_path ion/upper -nrlks1 1 -nalks1 14 + + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj + +from StackPulic import datesFromPairs +from StackPulic import formInterferogramStack + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='form a stack of interferograms') + parser.add_argument('-dates_dir', dest='dates_dir', type=str, required=True, + help = 'resampled slc directory') + parser.add_argument('-suffix', dest='suffix', type=str, default=None, + help = 'slc file suffix') + parser.add_argument('-pairs_dir', dest='pairs_dir', type=str, required=True, + help = 'pair directory') + parser.add_argument('-mid_path', dest='mid_path', type=str, default=None, + help = 'path between pair folder and frame folder') + parser.add_argument('-pairs_user', dest='pairs_user', type=str, nargs='+', default=None, + help = 'a number of pairs seperated by blanks. format: YYMMDD-YYMMDD YYMMDD-YYMMDD YYMMDD-YYMMDD... This argument has highest priority. When provided, only process these pairs') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + parser.add_argument('-frame_user', dest='frame_user', type=str, nargs='+', default=None, + help = 'a number of frames seperated by blanks. format: 2700 2750 2800... This argument has highest priority. When provided, only process these frames') + parser.add_argument('-swath_user', dest='swath_user', type=int, nargs='+', default=None, + help = 'a number of frames seperated by blanks. format: 2 3 4... This argument has highest priority. When provided, only process these swaths') + + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + dates_dir = inps.dates_dir + suffix = inps.suffix + if suffix is None: + suffix = '' + pairs_dir = inps.pairs_dir + mid_path = inps.mid_path + if mid_path is None: + #os.path.join() will ignore '' + mid_path = '' + pairs_user = inps.pairs_user + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + frame_user = inps.frame_user + swath_user = inps.swath_user + ####################################################### + + + dates0 = sorted([os.path.basename(x) for x in glob.glob(os.path.join(dates_dir, '*')) if os.path.isdir(x)]) + pairs = sorted([os.path.basename(x) for x in glob.glob(os.path.join(pairs_dir, '*')) if os.path.isdir(x)]) + if pairs_user is not None: + pairs = sorted(pairs_user) + + #used dates + dates = sorted(datesFromPairs(pairs)) + for x in dates: + if x not in dates0: + raise Exception('date {} used in pairs, but does not exist'.format(x)) + + ndates = len(dates) + npairs = len(pairs) + + #link dates and pairs + date_pair_index = [] + for i in range(npairs): + date_pair_index.append([dates.index(x) for x in pairs[i].split('-')]) + + + ####################################################### + ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1) + + #use one date to find frames and swaths. any date should work, here we use first date. all should be the same as those of reference date of stack + frames = sorted([x[-4:] for x in glob.glob(os.path.join(dates_dir, dates[0], 'f*_*'))]) + swaths = sorted([int(x[-1]) for x in glob.glob(os.path.join(dates_dir, dates[0], 'f1_*', 's*'))]) + + nframe = len(frames) + nswath = len(swaths) + + if frame_user is None: + frame_user = frames + if swath_user is None: + swath_user = swaths + + for i, frameNumber in enumerate(frames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + #os.chdir(frameDir) + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + swathDir = 's{}'.format(swathNumber) + #os.chdir(swathDir) + if (frameNumber not in frame_user) or (swathNumber not in swath_user): + continue + + print('\nprocessing swath {}, frame {}'.format(swathNumber, frameNumber)) + slcs = [os.path.join(dates_dir, x, frameDir, swathDir, x+suffix+'.slc') for x in dates] + # interferograms = [os.path.join(pairs_dir, x, mid_path, frameDir, swathDir, x+ml1+'.int') for x in pairs] + # amplitudes = [os.path.join(pairs_dir, x, mid_path, frameDir, swathDir, x+ml1+'.amp') for x in pairs] + + interferograms = [] + amplitudes = [] + for x in pairs: + output_dir = os.path.join(pairs_dir, x, mid_path, frameDir, swathDir) + if not os.path.isdir(output_dir): + os.makedirs(output_dir, exist_ok=True) + interferograms.append(os.path.join(output_dir, x+ml1+'.int')) + amplitudes.append(os.path.join(output_dir, x+ml1+'.amp')) + + formInterferogramStack(slcs, date_pair_index, interferograms, amplitudes=amplitudes, numberRangeLooks=numberRangeLooks1, numberAzimuthLooks=numberAzimuthLooks1) + + + + + + + + + + + + + + + + + + + + diff --git a/contrib/stack/alosStack/ion_filt.py b/contrib/stack/alosStack/ion_filt.py index d05852232..2218a19d3 100755 --- a/contrib/stack/alosStack/ion_filt.py +++ b/contrib/stack/alosStack/ion_filt.py @@ -124,6 +124,8 @@ def ionFilt(self, referenceTrack, catalog=None): fit = self.fitIon filt = self.filtIon fitAdaptive = self.fitAdaptiveIon + fitAdaptiveOrder = self.fitAdaptiveOrderIon + rmOutliers = self.rmOutliersIon filtSecondary = self.filtSecondaryIon if (fit == False) and (filt == False): raise Exception('either fit ionosphere or filt ionosphere should be True when doing ionospheric correction\n') @@ -185,6 +187,10 @@ def ionFilt(self, referenceTrack, catalog=None): cor[np.nonzero(cor<0)] = 0.0 cor[np.nonzero(cor>1)] = 0.0 + #remove these values so that there are no outliers in the computed std + cor[np.nonzero(cor<0.05)] = 0.05 + cor[np.nonzero(cor>0.95)] = 0.95 + #remove water body. Not helpful, just leave it here wbd = np.fromfile('wbd'+ml2+'.wbd', dtype=np.int8).reshape(length, width) cor[np.nonzero(wbd==-1)] = 0.0 @@ -288,7 +294,7 @@ def ion_std(fl, fu, numberOfLooks, cor): ion -= ion_fit * (ion!=0) #filter the rest of the ionosphere if filt: - (ion_filt, std_out, window_size_out) = adaptive_gaussian(ion, std, size_min, size_max, std_out0, fit=fitAdaptive) + (ion_filt, std_out, window_size_out) = adaptive_gaussian(ion, std, size_min, size_max, std_out0, fit=fitAdaptive, order=fitAdaptiveOrder, rm_outliers=rmOutliers) if filtSecondary: g2d = gaussian(size_secondary, size_secondary/2.0, scale=1.0) scale = ss.fftconvolve((ion_filt!=0), g2d, mode='same') @@ -361,6 +367,10 @@ def cmdLineParse(): help='filtering ionosphere phase') parser.add_argument('-fit_adaptive', dest='fit_adaptive', action='store_true', default=False, help='apply polynomial fit in adaptive filtering window') + parser.add_argument('-fit_adaptive_order', dest='fit_adaptive_order', type=int, default=2, + help='polynomial fit order in adaptive filtering window') + parser.add_argument('-rm_outliers', dest='rm_outliers', action='store_true', default=False, + help='remove outliers in adaptive filtering window. very slow') parser.add_argument('-filt_secondary', dest='filt_secondary', action='store_true', default=False, help='secondary filtering of ionosphere phase') parser.add_argument('-win_min', dest='win_min', type=int, default=11, @@ -402,6 +412,8 @@ def cmdLineParse(): fitIon = inps.fit filtIon = inps.filt fitAdaptiveIon = inps.fit_adaptive + fitAdaptiveOrderIon = inps.fit_adaptive_order + rmOutliersIon = inps.rm_outliers filtSecondaryIon = inps.filt_secondary filteringWinsizeMinIon = inps.win_min filteringWinsizeMaxIon = inps.win_max @@ -435,6 +447,8 @@ def cmdLineParse(): self.fitIon = fitIon self.filtIon = filtIon self.fitAdaptiveIon = fitAdaptiveIon + self.fitAdaptiveOrderIon = fitAdaptiveOrderIon + self.rmOutliersIon = rmOutliersIon self.filtSecondaryIon = filtSecondaryIon self.filteringWinsizeMaxIon = filteringWinsizeMaxIon self.filteringWinsizeMinIon = filteringWinsizeMinIon diff --git a/contrib/stack/alosStack/ion_ls.py b/contrib/stack/alosStack/ion_ls.py index 1f73560bc..02df95331 100755 --- a/contrib/stack/alosStack/ion_ls.py +++ b/contrib/stack/alosStack/ion_ls.py @@ -39,6 +39,135 @@ def least_sqares(H, S, W=None): return Z.reshape(Z.size) +def least_sqares_insar(H0, S0, dates2, dateZero, pairs, W0=None, mode=0): + ''' + least squares for insar time series analysis. basic equation + H0 dates2 = S0 + invalid values are zeros in S0. + assuming full-rank if all pairs are valid. dateZero must be valid. + + input paramters + H0: observation matrix, 2-D numpy array + S0: insar values, 1-D numpy array + dates2: sorted dates, dateZero excluded, format: ['date1', 'date2'...] + dateZero: reference date, format: 'date' + pairs: pairs, format: ['date1-date2', 'date1-date2'...] + W0: weighting matrix 1-D numpy array + mode: solution mode + 0: only do ls if all pairs are valid + 1: only do ls if all dates are valid + 2: do ls for all cases + + output parameters + ts: deformation values of dates2, dateZero excluded, format: 1-D numpy array with length ndate-1. + zero values are not valid. + + different cases: + | 1. all pairs √ + all dates | | 2. full rank √ + | a subset of pairs | + | 3. low rank × + + | 4. full rank √ + | ref date included | + some dates | | 5. low rank × + | 6. ref date not included × + ''' + + #import numpy as np + + if mode not in [0, 1, 2]: + raise Exception('unknown solution mode: {}'.format(mode)) + + (npair, ndate) = H0.shape + ndate += 1 + npairValid = np.sum(S0!=0, dtype=np.int32) + + #zero values are not valid + ts = np.zeros(ndate-1, dtype=np.float32) + + #case 1. nothing to be done + if npairValid == npair: + S = S0 + H = H0 + else: + if mode == 0: + return ts + + #get valid dates first (dates included in valid pairs). + #Assume zero values in ionPairs are not valid. + dates2Valid = [] + for k in range(npair): + if S0[k] != 0: + for datek in pairs[k].split('-'): + if datek not in dates2Valid: + dates2Valid.append(datek) + dates2Valid = sorted(dates2Valid) + ndateValid = len(dates2Valid) + if dateZero in dates2Valid: + dateZeroIncluded = True + dates2Valid.remove(dateZero) + else: + dateZeroIncluded = False + + #case: valid include all dates + if ndateValid == ndate: + #get valid + H1 = H0[np.nonzero(S0!=0)] + S1 = S0[np.nonzero(S0!=0)] + #case 2. full rank + #case 3. low rank + if np.linalg.matrix_rank(H1) < ndate-1: + return ts + S = S1 + H = H1 + else: + if mode <= 1: + return ts + + if dateZeroIncluded: + dates2ValidIndex = [dates2.index(x) for x in dates2Valid] + H1 = H0[np.nonzero(S0!=0)] + S1 = S0[np.nonzero(S0!=0)] + H2 = H1[:, dates2ValidIndex] + #case 4. full rank + #case 5. low rank + if np.linalg.matrix_rank(H2) < ndateValid-1: + return ts + S = S1 + H = H2 + else: + #case 6. reference date not in valid + return ts + + #adding weight + #https://stackoverflow.com/questions/19624997/understanding-scipys-least-square-function-with-irls + #https://stackoverflow.com/questions/27128688/how-to-use-least-squares-with-weight-matrix-in-python + if W0 is not None: + if npairValid == npair: + W = W0 + else: + W = W0[np.nonzero(S0!=0)] + H = H0 * W[:, None] + S = S0 * W + + #do least-squares estimation + #[theta, residuals, rank, singular] = np.linalg.lstsq(H, S) + #make W full matrix if use W here (which is a slower method) + #'using W before this' is faster + theta = least_sqares(H, S, W=None) + + if npairValid == npair: + ts = theta + else: + if ndateValid == ndate: + ts = theta + else: + ts[dates2ValidIndex] = theta + + return ts + + def cmdLineParse(): ''' command line parser. @@ -79,6 +208,9 @@ def cmdLineParse(): help='use reciprocal of window size as weight') parser.add_argument('-interp', dest='interp', action='store_true', default=False, help='interpolate ionospheric phase to nrlks2/nalks2 sample size') + parser.add_argument('-mode', dest='mode', type=int, default=1, + help = 'least squares solution mode. 0: only do ls if all pairs are valid. 1: only do ls if all dates are valid (default). 2: do ls for all cases. invalid values are zeros in the interferograms.') + if len(sys.argv) <= 1: print('') @@ -110,6 +242,7 @@ def cmdLineParse(): numberAzimuthLooksIon = inps.nalks_ion ww = inps.ww interp = inps.interp + mode = inps.mode ####################################################### #all pair folders in order @@ -227,43 +360,12 @@ def cmdLineParse(): print() for j in range(width): - #observed signal - S0 = ionPairs[:, i, j] - if ww == False: - #observed signal - S = S0 - H = H0 + W = None else: - #add weight - #https://stackoverflow.com/questions/19624997/understanding-scipys-least-square-function-with-irls - #https://stackoverflow.com/questions/27128688/how-to-use-least-squares-with-weight-matrix-in-python wgt = winPairs[:, i, j] W = np.sqrt(1.0/wgt) - H = H0 * W[:, None] - S = S0 * W - - #do least-squares estimation - #[theta, residuals, rank, singular] = np.linalg.lstsq(H, S) - #make W full matrix if use W here (which is a slower method) - #'using W before this' is faster - theta = least_sqares(H, S, W=None) - ts[:, i, j] = theta - - # #dump raw estimate - # cdir = os.getcwd() - # os.makedirs(odir, exist_ok=True) - # os.chdir(odir) - - # for i in range(ndate-1): - # file_name = 'filt_ion_'+dates2[i]+ml2+'.ion' - # ts[i, :, :].astype(np.float32).tofile(file_name) - # create_xml(file_name, width, length, 'float') - # file_name = 'filt_ion_'+dateZero+ml2+'.ion' - # (np.zeros((length, width), dtype=np.float32)).astype(np.float32).tofile(file_name) - # create_xml(file_name, width, length, 'float') - - # os.chdir(cdir) + ts[:, i, j] = least_sqares_insar(H0, ionPairs[:, i, j], dates2, dateZero, pairs, W0=W, mode=mode) #################################################################################### diff --git a/contrib/stack/alosStack/ion_subband.py b/contrib/stack/alosStack/ion_subband.py index 53c00eff6..e6ec2fa87 100755 --- a/contrib/stack/alosStack/ion_subband.py +++ b/contrib/stack/alosStack/ion_subband.py @@ -258,6 +258,10 @@ def runIonSubband(self, referenceTrack, idir, dateReferenceStack, dateReference, #These are for ALOS-2, may need to change for ALOS-4! phaseDiffFixed = [0.0, 0.4754024578084084, 0.9509913179406437, 1.4261648478671614, 2.179664007520499, 2.6766909968024932, 3.130810857] + #new value from processing d170 in Cascadia + phaseDiffFixed.append(0.7545891925607972) + + #if (referenceTrack.frames[i].processingSoftwareVersion == '2.025' and secondaryTrack.frames[i].processingSoftwareVersion == '2.023') or \ # (referenceTrack.frames[i].processingSoftwareVersion == '2.023' and secondaryTrack.frames[i].processingSoftwareVersion == '2.025'): @@ -278,7 +282,7 @@ def runIonSubband(self, referenceTrack, idir, dateReferenceStack, dateReference, # snapThreshold = None #whether snap for each swath - if self.swathPhaseDiffSnapIon == None: + if self.swathPhaseDiffSnapIon is None: snapSwath = [[True for jjj in range(numberOfSwaths-1)] for iii in range(numberOfFrames)] else: snapSwath = self.swathPhaseDiffSnapIon @@ -294,7 +298,7 @@ def runIonSubband(self, referenceTrack, idir, dateReferenceStack, dateReference, filt=False, resamplingMethod=1) #the first item is meaningless for all the following list, so only record the following items - if phaseDiff == None: + if phaseDiff is None: phaseDiff = [None for iii in range(self._insar.startingSwath, self._insar.endingSwath + 1)] #catalog.addItem('frame {} {} band swath phase diff input'.format(frameNumber, ionDir['subband'][k]), phaseDiff[1:], 'runIonSubband') #catalog.addItem('frame {} {} band swath phase diff estimated'.format(frameNumber, ionDir['subband'][k]), phaseDiffEst[1:], 'runIonSubband') @@ -404,10 +408,30 @@ def runIonSubband(self, referenceTrack, idir, dateReferenceStack, dateReference, frameMosaic(referenceTrack, inputAmplitudes, self._insar.amplitude, rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, updateTrack=False, phaseCompensation=False, resamplingMethod=0) + + #same phaseDiffFixed used for frames + snapThreshold = 0.2 + + #whether snap for each frame + if self.framePhaseDiffSnapIon is None: + snapFrame = [False for jjj in range(numberOfFrames-1)] + else: + snapFrame = self.framePhaseDiffSnapIon + if len(snapFrame) != numberOfFrames-1: + raise Exception('please specify correct number of swaths for parameter: frame phase difference snap to fixed values') + + #whether use individual coherence to exclude low quality pixels for each frame + if self.framePhaseDiffCorIon is None: + corFrame = [False for jjj in range(numberOfFrames-1)] + else: + corFrame = self.framePhaseDiffCorIon + if len(corFrame) != numberOfFrames-1: + raise Exception('using individual interferogram coherence to exclude low quality pixels...') + #mosaic interferograms (phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = frameMosaic(referenceTrack, inputInterferograms, self._insar.interferogram, rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, - updateTrack=False, phaseCompensation=True, resamplingMethod=1) + updateTrack=False, phaseCompensation=True, phaseDiffFixed=phaseDiffFixed, snapThreshold=snapThreshold, snapFrame=snapFrame, corFrame=corFrame, waterBody=self.waterBody, resamplingMethod=1) create_xml(self._insar.amplitude, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'amp') create_xml(self._insar.interferogram, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'int') @@ -527,6 +551,14 @@ def cmdLineParse(): help='swath phase difference lower band. e.g. you have 3 swaths and 2 frames. specify this parameter as: -snap -1.3 2.37 -snap 0.1 None, where None means no user input phase difference value') parser.add_argument('-phase_diff_upper', dest='phase_diff_upper', type=str, nargs='+', action='append', default=None, help='swath phase difference upper band. e.g. you have 3 swaths and 2 frames. specify this parameter as: -snap -1.3 2.37 -snap 0.1 None, where None means no user input phase difference value') + parser.add_argument('-water_body', dest='water_body', type=str, default=None, + help = 'water body in frame mosaicked size and coordinate for masking out water body in the frame overlap areas when these areas are used to compute frame phase difference in mosaicking frames. default: None') + parser.add_argument('-snap_frame', dest='snap_frame', type=int, nargs='+', default=None, + help='frame phase difference snap to fixed values. e.g. you have 3 frames. specify this parameter as: -snap_frame 1 0, where 0 means no snap, 1 means snap. default: no snap') + parser.add_argument('-cor_frame', dest='cor_frame', type=int, nargs='+', default=None, + help='using individual interferogram coherence to exclude low quality pixels when computing frame phase difference. e.g. you have 3 frames. specify this parameter as: -cor_frame 1 0, where 0 means do not use, 1 means use. no need to use if water_body specified. default: do not use') + + if len(sys.argv) <= 1: print('') @@ -553,6 +585,14 @@ def cmdLineParse(): swathPhaseDiffSnapIon = inps.snap swathPhaseDiffLowerIon = inps.phase_diff_lower swathPhaseDiffUpperIon = inps.phase_diff_upper + + if inps.water_body is not None: + waterBody = os.path.abspath(inps.water_body) + else: + waterBody = None + + framePhaseDiffSnapIon = inps.snap_frame + framePhaseDiffCorIon = inps.cor_frame ####################################################### pair = '{}-{}'.format(dateReference, dateSecondary) @@ -607,9 +647,22 @@ def cmdLineParse(): if len(swathPhaseDiffUpperIon[i]) != (nswath-1): raise Exception('please specify correct number of swaths for parameter: -phase_diff_upper') + if framePhaseDiffSnapIon is not None: + framePhaseDiffSnapIon = [True if x==1 else False for x in framePhaseDiffSnapIon] + if len(framePhaseDiffSnapIon) != nframe - 1: + raise Exception('please specify correct number of frames for parameter: -snap_frame') + + if framePhaseDiffCorIon is not None: + framePhaseDiffCorIon = [True if x==1 else False for x in framePhaseDiffCorIon] + if len(framePhaseDiffCorIon) != nframe - 1: + raise Exception('please specify correct number of frames for parameter: -cor_frame') + self.swathPhaseDiffSnapIon = swathPhaseDiffSnapIon self.swathPhaseDiffLowerIon = swathPhaseDiffLowerIon self.swathPhaseDiffUpperIon = swathPhaseDiffUpperIon + self.waterBody = waterBody + self.framePhaseDiffSnapIon = framePhaseDiffSnapIon + self.framePhaseDiffCorIon = framePhaseDiffCorIon log = runIonSubband(self, trackReferenceStack, idir, dateReferenceStack, dateReference, dateSecondary) diff --git a/contrib/stack/alosStack/mosaic_interferogram.py b/contrib/stack/alosStack/mosaic_interferogram.py index 3a86d4d52..0fdd6ab58 100755 --- a/contrib/stack/alosStack/mosaic_interferogram.py +++ b/contrib/stack/alosStack/mosaic_interferogram.py @@ -40,6 +40,8 @@ def cmdLineParse(): help = 'number of range looks 1. default: 1') parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, help = 'number of azimuth looks 1. default: 1') + parser.add_argument('-water_body', dest='water_body', type=str, default=None, + help = 'water body in frame mosaicked size and coordinate for masking out water body in the frame overlap areas when these areas are used to compute frame phase difference in mosaicking frames. default: None') if len(sys.argv) <= 1: print('') @@ -60,6 +62,12 @@ def cmdLineParse(): dateSecondary = inps.sec_date numberRangeLooks1 = inps.nrlks1 numberAzimuthLooks1 = inps.nalks1 + + if inps.water_body is not None: + waterBody = os.path.abspath(inps.water_body) + else: + waterBody = None + ####################################################### logFile = 'process.log' @@ -186,7 +194,7 @@ def cmdLineParse(): (phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = \ frameMosaic(trackReferenceStack, inputInterferograms, interferogram, rangeOffsets, azimuthOffsets, numberRangeLooks1, numberAzimuthLooks1, - updateTrack=False, phaseCompensation=True, resamplingMethod=1) + updateTrack=False, phaseCompensation=True, waterBody=waterBody, resamplingMethod=1) create_xml(amplitude, trackReferenceStack.numberOfSamples, trackReferenceStack.numberOfLines, 'amp') create_xml(interferogram, trackReferenceStack.numberOfSamples, trackReferenceStack.numberOfLines, 'int') diff --git a/contrib/stack/alosStack/plot_baseline.py b/contrib/stack/alosStack/plot_baseline.py index 280e38f5d..97e74ba51 100755 --- a/contrib/stack/alosStack/plot_baseline.py +++ b/contrib/stack/alosStack/plot_baseline.py @@ -75,8 +75,8 @@ def cmdLineParse(): fig, ax = plt.subplots() time = [datetime.datetime.strptime(x, "%y%m%d") for x in baseline_dict] - baseline = [baseline_dict[x] for x in baseline_dict] - ax.plot(time, baseline, 'o', alpha=0.7, c='g') + #baseline = [baseline_dict[x] for x in baseline_dict] + #ax.plot(time, baseline, 'o', alpha=0.7, c='g') year_min = datetime.datetime(min(time).year, 1, 1) year_max = datetime.datetime(max(time).year+1, 1, 1) @@ -89,6 +89,10 @@ def cmdLineParse(): baseline = [baseline_dict[rdate], baseline_dict[sdate]] ax.plot(time, baseline, '-', lw=.5, c='b') + time = [datetime.datetime.strptime(x, "%y%m%d") for x in baseline_dict] + baseline = [baseline_dict[x] for x in baseline_dict] + ax.plot(time, baseline, 'o', alpha=0.7, c='g') + ax.xaxis.set_major_locator(mdates.YearLocator()) ax.xaxis.set_major_formatter(mdates.DateFormatter('%Y')) ax.xaxis.set_minor_locator(mdates.MonthLocator()) diff --git a/examples/input_files/alos2/alos2App.xml b/examples/input_files/alos2/alos2App.xml index 2f51bacbc..0136729fd 100644 --- a/examples/input_files/alos2/alos2App.xml +++ b/examples/input_files/alos2/alos2App.xml @@ -329,6 +329,17 @@ FBD (indicated by ^) is the acquisition mode code. + + + + + + diff --git a/examples/input_files/alos2/alos2burstApp.xml b/examples/input_files/alos2/alos2burstApp.xml index 416f65ab3..114b3a2ad 100644 --- a/examples/input_files/alos2/alos2burstApp.xml +++ b/examples/input_files/alos2/alos2burstApp.xml @@ -303,6 +303,17 @@ FBD (indicated by ^) is the acquisition mode code. + + + + + +