diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 0000000..e69de29 diff --git a/doc/_modules/index.html b/doc/_modules/index.html new file mode 100644 index 0000000..cb09722 --- /dev/null +++ b/doc/_modules/index.html @@ -0,0 +1,94 @@ + + + + + + + Overview: module code — SARvey 1.0.0 documentation + + + + + + + + + + + + + +
+
+ +
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/_modules/sarvey/coherence.html b/doc/_modules/sarvey/coherence.html new file mode 100644 index 0000000..32e5abc --- /dev/null +++ b/doc/_modules/sarvey/coherence.html @@ -0,0 +1,281 @@ + + + + + + + sarvey.coherence — SARvey 1.0.0 documentation + + + + + + + + + + + + + +
+
+
+
+ +

Source code for sarvey.coherence

+#!/usr/bin/env python
+
+# SARvey - A multitemporal InSAR time series tool for the derivation of displacements.
+#
+# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de)
+#
+# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context
+# of the SAR4Infra project with funds of the German Federal Ministry for Digital and
+# Transport and contributions from Landesamt fuer Vermessung und Geoinformation
+# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein.
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++
+# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you
+# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes.
+# This requirement extends to SARvey.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see <https://www.gnu.org/licenses/>.
+
+"""Coherence module for SARvey."""
+import multiprocessing
+import time
+import numpy as np
+from numba import jit
+from scipy.signal import convolve2d
+from logging import Logger
+from miaplpy.objects.slcStack import slcStack
+from sarvey.objects import BaseStack
+from sarvey.utils import convertBboxToBlock
+
+
+
+[docs] +def computeIfgsAndTemporalCoherence(*, path_temp_coh: str, path_ifgs: str, path_slc: str, ifg_array: np.ndarray, + time_mask: np.ndarray, wdw_size: int, num_boxes: int, box_list: list, + num_cores: int, logger: Logger): + """ComputeIfgsAndTemporalCoherence. + + Compute the interferograms and temporal coherence from the SLC stack for a given set of (spatial) patches. + + Parameters + ---------- + path_temp_coh : str + Path to the temporary coherence stack. The data will be stored in this file during processing. + path_ifgs : str + Path to the interferograms stack. The data will be stored in this file during processing. + path_slc : str + Path to the SLC stack. The data will be read from this file. + ifg_array : np.ndarray + Array containing the indices of the reference and secondary images which are used to compute the interferograms. + time_mask : np.ndarray + Binary mask indicating the selected images from the SLC stack. + wdw_size : int + Size of the filter window. Has to be odd. + num_boxes : int + Number of patches to enable reading and processing of larger SLC stacks. + box_list : list + List containing the indices of each patch. + num_cores : int + Number of cores for parallel processing. + logger : Logger + Logger object. + + Returns + ------- + mean_amp_img : np.ndarray + Mean amplitude image. + """ + start_time = time.time() + filter_kernel = np.ones((wdw_size, wdw_size), dtype=np.float64) + filter_kernel[wdw_size // 2, wdw_size // 2] = 0 + + slc_stack_obj = slcStack(path_slc) + slc_stack_obj.open() + temp_coh_obj = BaseStack(file=path_temp_coh, logger=logger) + ifg_stack_obj = BaseStack(file=path_ifgs, logger=logger) + + mean_amp_img = np.zeros((slc_stack_obj.length, slc_stack_obj.width), dtype=np.float32) + num_ifgs = ifg_array.shape[0] + + for idx in range(num_boxes): + bbox = box_list[idx] + block2d = convertBboxToBlock(bbox=bbox) + + # read slc + slc = slc_stack_obj.read(datasetName='slc', box=bbox, print_msg=False) + slc = slc[time_mask, :, :] + + mean_amp = np.mean(np.abs(slc), axis=0) + mean_amp[mean_amp == 0] = np.nan + mean_amp_img[bbox[1]:bbox[3], bbox[0]:bbox[2]] = np.log10(mean_amp) + + # compute ifgs + ifgs = computeIfgs(slc=slc, ifg_array=ifg_array) + ifg_stack_obj.writeToFileBlock(data=ifgs, dataset_name="ifgs", block=block2d, print_msg=False) + del slc + + # filter ifgs + avg_neighbours = np.zeros_like(ifgs) + if num_cores == 1: + for i in range(num_ifgs): + avg_neighbours[:, :, i] = convolve2d(in1=ifgs[:, :, i], in2=filter_kernel, mode='same', boundary="symm") + else: + pool = multiprocessing.Pool(processes=num_cores) + + args = [( + idx, + ifgs[:, :, idx], + filter_kernel) for idx in range(num_ifgs)] + + results = pool.map(func=launchConvolve2d, iterable=args) + + # retrieve results + for j, avg_neigh in results: + avg_neighbours[:, :, j] = avg_neigh + del results, args, avg_neigh + + # compute temporal coherence + residual_phase = np.angle(ifgs * np.conjugate(avg_neighbours)) + del ifgs, avg_neighbours + temp_coh = np.abs(np.mean(np.exp(1j * residual_phase), axis=2)) + temp_coh_obj.writeToFileBlock(data=temp_coh, dataset_name="temp_coh", block=block2d, print_msg=False) + del residual_phase, temp_coh + logger.info(msg="Patches processed:\t {}/{}".format(idx + 1, num_boxes)) + + m, s = divmod(time.time() - start_time, 60) + logger.debug(msg='\ntime used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s)) + return mean_amp_img
+ + + +
+[docs] +@jit(nopython=True) +def computeIfgs(*, slc: np.ndarray, ifg_array: np.ndarray): + """ComputeIfgs. + + Parameters + ---------- + slc : np.ndarray + SLC stack. + ifg_array : np.ndarray + Array containing the indices of the reference and secondary images which are used to compute the interferograms. + + Returns + ------- + ifgs : np.ndarray + Interferograms. + """ + t, length, width = slc.shape + num_ifgs = ifg_array.shape[0] + ifgs = np.zeros((length, width, num_ifgs), dtype=np.complex64) + + c = 0 + for i, j in ifg_array: + ifgs[:, :, c] = slc[i, :, :] * np.conjugate(slc[j, :, :]) + c += 1 + return ifgs
+ + + +
+[docs] +def launchConvolve2d(args: tuple): + """LaunchConvolve2d. + + Parameters + ---------- + args : tuple + Tuple containing the arguments for the convolution. + Tuple contains: + + idx : int + Index of the processed interferogram. + ifg : np.ndarray + Interferogram. + filter_kernel : np.ndarray + Filter kernel. + + Returns + ------- + idx : int + Index of the processed interferogram. + avg_neighbours : np.ndarray + Low-pass filtered phase derived as average of neighbours. + """ + (idx, ifg, filter_kernel) = args + avg_neighbours = convolve2d(in1=ifg, in2=filter_kernel, mode='same', boundary="symm") + return idx, avg_neighbours
+ +
+ +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/_modules/sarvey/console.html b/doc/_modules/sarvey/console.html new file mode 100644 index 0000000..9a1521f --- /dev/null +++ b/doc/_modules/sarvey/console.html @@ -0,0 +1,206 @@ + + + + + + + sarvey.console — SARvey 1.0.0 documentation + + + + + + + + + + + + + +
+
+
+
+ +

Source code for sarvey.console

+#!/usr/bin/env python
+
+# SARvey - A multitemporal InSAR time series tool for the derivation of displacements.
+#
+# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de)
+#
+# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context
+# of the SAR4Infra project with funds of the German Federal Ministry for Digital and
+# Transport and contributions from Landesamt fuer Vermessung und Geoinformation
+# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein.
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++
+# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you
+# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes.
+# This requirement extends to SARvey.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see <https://www.gnu.org/licenses/>.
+
+"""Console module for SARvey."""
+from sarvey import version
+from logging import Logger
+
+
+
+[docs] +def printStep(*, step: int, step_dict: dict, logger: Logger): + """Print the current step to console. + + Parameters + ---------- + step: int + current step number + step_dict: dict + dictionary with step numbers and names + logger: Logger + Logging handler + """ + logger.info(msg=" ---------------------------------------------------------------------------------") + logger.info(msg=f" STEP {step}: {step_dict[step]}") + logger.info(msg=" ---------------------------------------------------------------------------------")
+ + + +
+[docs] +def printCurrentConfig(*, config_section: dict, config_section_default: dict, logger: Logger): + """Print the current parameters and their default values from the config file to console. + + Parameters + ---------- + config_section: dict + Section of the configuration class which contains the selected parameters. + config_section_default: dict + Config section with default values. + logger: Logger + Logging handler. + """ + shift = " " + logger.info(msg=shift + "{:>35} {:>15} {:>10}".format("Parameter", "value", "default")) + logger.info(msg=shift + "{:>35} {:>15} {:>10}".format("_________", "_____", "_______")) + + for key in config_section.keys(): + default = config_section_default[key] + default = "None" if default is None else default + default = "True" if default is True else default + default = "False" if default is False else default + + value = config_section[key] + value = "None" if value is None else value + value = "True" if value is True else value + value = "False" if value is False else value + if default == value: + logger.info(msg=shift + "{:>35} {:>15} {:>10}".format(key, value, default)) + else: + logger.info(msg=shift + "{:>35} {:>15} <--- {:>10}".format(key, value, default)) + + logger.info(msg="")
+ + + +
+[docs] +def showLogoSARvey(*, logger: Logger, step: str): + """ShowLogoSARvey. + + Parameters + ---------- + logger: Logger + logging handler + step: str + Name of the step or script which is shown on the logo. + """ + # generate_from: http://patorjk.com/software/taag/ - font: Big, style: default + # and https://textik.com/ + logger.info(msg=f"SARvey version: {version.__version__} - {version.__versionalias__}, {version.__versiondate__}, " + f"Run: {step}") + new_logo = rf""" . _____ _____ + +------ / \ ------ / ____| /\ | __ \ + | / / | (___ / \ | |__) |_ _____ _ _ + | / / \___ \ / /\ \ | _ /\ \ / / _ \ | | | + | /\\ / / ____) / ____ \| | \ \ \ V / __/ |_| | + | / \\/ / |_____/_/ \_\_| \_\ \_/ \___|\__, | + | / \ / __/ | + | \ / / v{version.__version__:<5} - {version.__versionalias__:<18} |___/ + \\ / /... {version.__versiondate__:<20} | + / \\/ / :... | + / / / :... {step: <20} | + / / / :... | + / / _______ :... _________| + \/ \______ :... ____________/ | + +-------------------- \________:___/ --------------------+ + """ + print(new_logo)
+ +
+ +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/_modules/sarvey/densification.html b/doc/_modules/sarvey/densification.html new file mode 100644 index 0000000..2041841 --- /dev/null +++ b/doc/_modules/sarvey/densification.html @@ -0,0 +1,364 @@ + + + + + + + sarvey.densification — SARvey 1.0.0 documentation + + + + + + + + + + + + + +
+
+
+
+ +

Source code for sarvey.densification

+#!/usr/bin/env python
+
+# SARvey - A multitemporal InSAR time series tool for the derivation of displacements.
+#
+# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de)
+#
+# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context
+# of the SAR4Infra project with funds of the German Federal Ministry for Digital and
+# Transport and contributions from Landesamt fuer Vermessung und Geoinformation
+# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein.
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++
+# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you
+# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes.
+# This requirement extends to SARvey.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see <https://www.gnu.org/licenses/>.
+
+"""Densification module for SARvey."""
+import time
+import multiprocessing
+import numpy as np
+from scipy.spatial import KDTree
+from logging import Logger
+
+from mintpy.utils import ptime
+
+from sarvey.unwrapping import oneDimSearchTemporalCoherence
+from sarvey.objects import Points
+import sarvey.utils as ut
+
+
+
+[docs] +def densificationInitializer(tree_p1: KDTree, point2_obj: Points, demod_phase1: np.ndarray): + """DensificationInitializer. + + Sets values to global variables for parallel processing. + + Parameters + ---------- + tree_p1 : KDTree + KDTree of the first-order network + point2_obj : Points + Points object with second-order points + demod_phase1 : np.ndarray + demodulated phase of the first-order network + """ + global global_tree_p1 + global global_point2_obj + global global_demod_phase1 + + global_tree_p1 = tree_p1 + global_point2_obj = point2_obj + global_demod_phase1 = demod_phase1
+ + + +
+[docs] +def launchDensifyNetworkConsistencyCheck(args: tuple): + """LaunchDensifyNetworkConsistencyCheck. + + Launches the densification of the network with second-order points inside parallel processing. + + Parameters + ---------- + args : tuple + Tuple with the following parameters: + + idx_range : np.ndarray + Array with the indices of the second-order points + num_points : int + Number of second-order points + num_conn_p1 : int + Number of nearest points in the first-order network + max_dist_p1 : float + Maximum allowed distance to the nearest points in the first-order network + velocity_bound : float + Bound for the velocity estimate in temporal unwrapping + demerr_bound : float + Bound for the DEM error estimate in temporal unwrapping + num_samples : int + Number of samples for the search of the optimal parameters + + Returns + ------- + idx_range : np.ndarray + Array with the indices of the second-order points + demerr_p2 : np.ndarray + DEM error array of the second-order points + vel_p2 : np.ndarray + Velocity array of the second-order points + gamma_p2 : np.ndarray + Estimated temporal coherence array of the second-order points resulting from temporal unwrapping + """ + (idx_range, num_points, num_conn_p1, max_dist_p1, velocity_bound, demerr_bound, num_samples) = args + + counter = 0 + prog_bar = ptime.progressBar(maxValue=num_points) + + # initialize output + demerr_p2 = np.zeros((num_points,), dtype=np.float32) + vel_p2 = np.zeros((num_points,), dtype=np.float32) + gamma_p2 = np.zeros((num_points,), dtype=np.float32) + + design_mat = np.zeros((global_point2_obj.ifg_net_obj.num_ifgs, 2), dtype=np.float32) + + demerr_range = np.linspace(-demerr_bound, demerr_bound, num_samples) + vel_range = np.linspace(-velocity_bound, velocity_bound, num_samples) + + factor = 4 * np.pi / global_point2_obj.wavelength + + for idx in range(num_points): + p2 = idx_range[idx] + # nearest points in p1 + dist, nearest_p1 = global_tree_p1.query([global_point2_obj.coord_utm[p2, 0], + global_point2_obj.coord_utm[p2, 1]], k=num_conn_p1) + mask = (dist < max_dist_p1) & (dist != 0) + mask[:3] = True # ensure that always at least the three closest points are used + nearest_p1 = nearest_p1[mask] + + # compute arc observations to nearest points + arc_phase_p1 = np.angle(np.exp(1j * global_point2_obj.phase[p2, :]) * + np.conjugate(np.exp(1j * global_demod_phase1[nearest_p1, :]))) + + design_mat[:, 0] = (factor * global_point2_obj.ifg_net_obj.pbase_ifg + / (global_point2_obj.slant_range[p2] * np.sin(global_point2_obj.loc_inc[p2]))) + design_mat[:, 1] = factor * global_point2_obj.ifg_net_obj.tbase_ifg + + demerr_p2[idx], vel_p2[idx], gamma_p2[idx] = oneDimSearchTemporalCoherence( + demerr_range=demerr_range, + vel_range=vel_range, + obs_phase=arc_phase_p1, + design_mat=design_mat + ) + + prog_bar.update(counter + 1, every=np.int16(200), + suffix='{}/{} points'.format(counter + 1, num_points)) + counter += 1 + + return idx_range, demerr_p2, vel_p2, gamma_p2
+ + + +
+[docs] +def densifyNetwork(*, point1_obj: Points, vel_p1: np.ndarray, demerr_p1: np.ndarray, point2_obj: Points, + num_conn_p1: int, max_dist_p1: float, velocity_bound: float, demerr_bound: float, + num_samples: int, num_cores: int = 1, logger: Logger): + """DensifyNetwork. + + Densifies the network with second-order points by connecting the second-order points to the closest points in the + first-order network. + + Parameters + ---------- + point1_obj : Points + Points object with first-order points + vel_p1 : np.ndarray + Velocity array of the first-order points + demerr_p1 : np.ndarray + DEM error array of the first-order points + point2_obj : Points + Points object with second-order points + num_conn_p1 : int + Number of nearest points in the first-order network + max_dist_p1 : float + Maximum allowed distance to the nearest points in the first-order network + velocity_bound : float + Bound for the velocity estimate in temporal unwrapping + demerr_bound : float + Bound for the DEM error estimate in temporal unwrapping + num_samples : int + Number of samples for the search of the optimal parameters + num_cores : int + Number of cores for parallel processing (default: 1) + logger : Logger + Logger object + + Returns + ------- + demerr_p2 : np.ndarray + DEM error array of the second-order points + vel_p2 : np.ndarray + Velocity array of the second-order points + gamma_p2 : np.ndarray + Estimated temporal coherence array of the second-order points resulting from temporal unwrapping + """ + msg = "#" * 10 + msg += " DENSIFICATION WITH SECOND-ORDER POINTS " + msg += "#" * 10 + logger.info(msg=msg) + start_time = time.time() + + # find the closest points from first-order network + tree_p1 = KDTree(data=point1_obj.coord_utm) + + # remove parameters from wrapped phase + pred_phase_demerr, pred_phase_vel = ut.predictPhase( + obj=point1_obj, + vel=vel_p1, demerr=demerr_p1, + ifg_space=True, logger=logger + ) + pred_phase = pred_phase_demerr + pred_phase_vel + + # Note: for small baselines it does not make a difference if re-wrapping the phase difference or not. + # However, for long baselines (like in the star network) it does make a difference. Leijen (2014) does not re-wrap + # the arc double differences to be able to test the ambiguities. Kampes (2006) does re-wrap, but is testing based + # on the estimated parameters. Hence, it doesn't make a difference for him. Not re-wrapping can be a starting point + # for triangle-based temporal unwrapping. + # demod_phase1 = np.angle(np.exp(1j * point1_obj.phase) * np.conjugate(np.exp(1j * pred_phase))) # re-wrapping + demod_phase1 = point1_obj.phase - pred_phase # not re-wrapping + + # initialize output + init_args = (tree_p1, point2_obj, demod_phase1) + + if num_cores == 1: + densificationInitializer(tree_p1=tree_p1, point2_obj=point2_obj, demod_phase1=demod_phase1) + args = (np.arange(point2_obj.num_points), point2_obj.num_points, num_conn_p1, max_dist_p1, + velocity_bound, demerr_bound, num_samples) + idx_range, demerr_p2, vel_p2, gamma_p2 = launchDensifyNetworkConsistencyCheck(args) + else: + with multiprocessing.Pool(num_cores, initializer=densificationInitializer, initargs=init_args) as pool: + logger.info(msg="start parallel processing with {} cores.".format(num_cores)) + num_cores = point2_obj.num_points if num_cores > point2_obj.num_points else num_cores + # avoids having less samples than cores + idx = ut.splitDatasetForParallelProcessing(num_samples=point2_obj.num_points, num_cores=num_cores) + args = [( + idx_range, + idx_range.shape[0], + num_conn_p1, + max_dist_p1, + velocity_bound, + demerr_bound, + num_samples + ) for idx_range in idx] + + results = pool.map_async(launchDensifyNetworkConsistencyCheck, args, chunksize=1) + while True: + time.sleep(5) + if results.ready(): + results = results.get() + break + # needed to make coverage work in multiprocessing (not sure what that means. copied from package Arosics). + pool.close() + pool.join() + + demerr_p2 = np.zeros((point2_obj.num_points,), dtype=np.float32) + vel_p2 = np.zeros((point2_obj.num_points,), dtype=np.float32) + gamma_p2 = np.zeros((point2_obj.num_points,), dtype=np.float32) + + # retrieve results + for i, demerr_i, vel_i, gamma_i in results: + demerr_p2[i] = demerr_i + vel_p2[i] = vel_i + gamma_p2[i] = gamma_i + + m, s = divmod(time.time() - start_time, 60) + logger.debug(msg='time used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s)) + + # combine p1 and p2 parameters and bring them in correct order using point_id + sort_idx = np.argsort(np.append(point1_obj.point_id, point2_obj.point_id)) + demerr_p2 = np.append(demerr_p1, demerr_p2) # add gamma=1 for p1 pixels + vel_p2 = np.append(vel_p1, vel_p2) + gamma_p2 = np.append(np.ones_like(point1_obj.point_id), gamma_p2) # add gamma=1 for p1 pixels + + demerr_p2 = demerr_p2[sort_idx] + vel_p2 = vel_p2[sort_idx] + gamma_p2 = gamma_p2[sort_idx] + return demerr_p2, vel_p2, gamma_p2
+ +
+ +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/_modules/sarvey/filtering.html b/doc/_modules/sarvey/filtering.html new file mode 100644 index 0000000..45c5cca --- /dev/null +++ b/doc/_modules/sarvey/filtering.html @@ -0,0 +1,381 @@ + + + + + + + sarvey.filtering — SARvey 1.0.0 documentation + + + + + + + + + + + + + +
+
+
+
+ +

Source code for sarvey.filtering

+#!/usr/bin/env python
+
+# SARvey - A multitemporal InSAR time series tool for the derivation of displacements.
+#
+# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de)
+#
+# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context
+# of the SAR4Infra project with funds of the German Federal Ministry for Digital and
+# Transport and contributions from Landesamt fuer Vermessung und Geoinformation
+# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein.
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++
+# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you
+# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes.
+# This requirement extends to SARvey.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see <https://www.gnu.org/licenses/>.
+
+"""Filtering module for SARvey."""
+import time
+import multiprocessing
+import matplotlib.pyplot as plt
+import numpy as np
+from scipy.interpolate import griddata
+import gstools as gs
+from logging import Logger
+
+from mintpy.utils import ptime
+
+import sarvey.utils as ut
+
+
+
+[docs] +def launchSpatialFiltering(parameters: tuple): + """Launch_spatial_filtering. + + Launches the spatial filtering to estimate the atmospheric phase screen with low-pass filtering. + + Parameters + ---------- + parameters: tuple + Tuple containing the following parameters: + + idx_range: np.ndarray + range of indices for the time series + num_time: int + number of time steps + residuals: np.ndarray + residual phase (size: num_points x num_ifgs) + coord_utm1: np.ndarray + coordinates in UTM of the first-order points for which the residuals are given (size: num_points_p1 x 2) + coord_utm2: np.ndarray + coordinates in UTM of the new points which shall be interpolated (size: num_points_p2 x 2) + bins: np.ndarray + bin edges for the variogram + bool_plot: bool + boolean flag to plot intermediate results + logger: Logger + Logging handler + + Returns + ------- + idx_range: np.ndarray + range of indices for the time series + aps1: np.ndarray + atmospheric phase screen for the known points (size: num_points_p1 x num_ifgs) + aps2: np.ndarray + atmospheric phase screen for the new points (size: num_points_p2 x num_ifgs) + """ + # Unpack the parameters + (idx_range, num_time, residuals, coord_utm1, coord_utm2, bins, bool_plot, logger) = parameters + + x = coord_utm1[:, 1] + y = coord_utm1[:, 0] + x_new = coord_utm2[:, 1] + y_new = coord_utm2[:, 0] + + aps1 = np.zeros((coord_utm1.shape[0], num_time), dtype=np.float32) + aps2 = np.zeros((coord_utm2.shape[0], num_time), dtype=np.float32) + + prog_bar = ptime.progressBar(maxValue=num_time) + + for i in range(num_time): + field = residuals[:, i].astype(np.float32) + + # 1) estimate the variogram of the field + bin_center, vario = gs.vario_estimate(pos=[x, y], field=field, bin_edges=bins) + + # 2) fit model to empirical variogram + model = gs.Stable(dim=2) + try: + model.fit_variogram(x_data=bin_center, y_data=vario, nugget=True, max_eval=1500) + except RuntimeError as err: + logger.error(msg="\nIMAGE {}: Not able to fit variogram! {}".format(idx_range[i], err)) + if bool_plot: + fig, ax = plt.subplots(2, figsize=[10, 5]) + sca1 = ax[0].scatter(x, y, c=field) + plt.colorbar(sca1, ax=ax[0], pad=0.03, shrink=0.5) + ax[0].set_title("Not able to fit variogram! - PS1 residuals") + ax[1].scatter(bin_center, vario) + ax[1].set_xlabel("distance in [m]") + ax[1].set_ylabel("semi-variogram") + plt.close(fig) + prog_bar.update(value=i + 1, every=1, suffix='{}/{} images'.format(i + 1, num_time)) + continue + + # 3) estimate parameters of kriging + sk = gs.krige.Simple( + model=model, + cond_pos=[x, y], + cond_val=field, + ) + + # 4) evaluate the kriging model at ORIGINAL locations + fld_sk, _ = sk((x, y), return_var=True) + aps1[:, i] = fld_sk + + # 5) evaluate the kriging model at NEW locations + fld_sk_new, var_sk_new = sk((x_new, y_new), return_var=True) + aps2[:, i] = fld_sk_new + + prog_bar.update(value=i + 1, every=1, suffix='{}/{} images'.format(i + 1, num_time)) + + # 5) show results + if bool_plot: + min_val = np.min(field) + max_val = np.max(field) + + fig, ax = plt.subplots(2, 2, figsize=[10, 5]) + + cur_ax = ax[0, 0] + sca1 = cur_ax.scatter(x, y, c=field, vmin=min_val, vmax=max_val) + plt.colorbar(sca1, ax=cur_ax, pad=0.03, shrink=0.5) + cur_ax.set_title("PS1 residuals") + + cur_ax = ax[0, 1] + cur_ax = model.plot(x_max=bin_center[-1], ax=cur_ax) + cur_ax.scatter(bin_center, vario) + cur_ax.set_xlabel("distance in [m]") + cur_ax.set_ylabel("semi-variogram") + + if coord_utm2 is not None: + cur_ax = ax[1, 0] + sca2 = cur_ax.scatter(x_new, y_new, c=fld_sk_new, vmin=min_val, vmax=max_val) + plt.colorbar(sca2, ax=cur_ax, pad=0.03, shrink=0.5) + cur_ax.set_title("PS2 prediction of atmospheric effect") + + cur_ax = ax[0, 1] + sca4 = cur_ax.scatter(x_new, y_new, c=var_sk_new) + plt.colorbar(sca4, ax=cur_ax, pad=0.03, shrink=0.5) + cur_ax.set_title("Variance of predicted atmospheric effect") + + plt.close(fig) + + return idx_range, aps1, aps2
+ + + +
+[docs] +def estimateAtmosphericPhaseScreen(*, residuals: np.ndarray, coord_utm1: np.ndarray, coord_utm2: np.ndarray, + num_cores: int = 1, bool_plot: bool = False, + logger: Logger) -> tuple[np.ndarray, np.ndarray]: + """Estimate_atmospheric_phase_screen. + + Estimates the atmospheric phase screen from a stack of phase time series for a sparse set of points. + Kriging is used to estimate the spatial dependence and to interpolate the phase screen over a set of new points. + + Parameters + ---------- + residuals: np.ndarray + residual phase (size: num_points1 x num_images) + coord_utm1: np.ndarray + coordinates in UTM of the points for which the residuals are given (size: num_points1 x 2) + coord_utm2: np.ndarray + coordinates in UTM of the new points which shall be interpolated (size: num_points2 x 2) + num_cores: int + Number of cores + bool_plot: bool + boolean flag to plot intermediate results (default: False) + logger: Logger + Logging handler + + Returns + ------- + aps1: np.ndarray + atmospheric phase screen for the known points (size: num_points1 x num_images) + aps2: np.ndarray + atmospheric phase screen for the new points (size: num_points2 x num_images) + """ + msg = "#" * 10 + msg += " ESTIMATE ATMOSPHERIC PHASE SCREEN (KRIGING) " + msg += "#" * 10 + logger.info(msg=msg) + + start_time = time.time() + + num_points1 = residuals.shape[0] + num_points2 = coord_utm2.shape[0] + num_time = residuals.shape[1] # can be either num_ifgs or num_images + + bins = gs.variogram.standard_bins(pos=(coord_utm1[:, 1], coord_utm1[:, 0]), + dim=2, latlon=False, mesh_type='unstructured', bin_no=30, max_dist=None) + + if num_cores == 1: + args = (np.arange(0, num_time), num_time, residuals, coord_utm1, coord_utm2, bins, bool_plot, logger) + _, aps1, aps2 = launchSpatialFiltering(parameters=args) + else: + logger.info(msg="start parallel processing with {} cores.".format(num_cores)) + pool = multiprocessing.Pool(processes=num_cores) + + aps1 = np.zeros((num_points1, num_time), dtype=np.float32) + aps2 = np.zeros((num_points2, num_time), dtype=np.float32) + + num_cores = num_time if num_cores > num_time else num_cores # avoids having more samples than cores + idx = ut.splitDatasetForParallelProcessing(num_samples=num_time, num_cores=num_cores) + + args = [( + idx_range, + idx_range.shape[0], + residuals[:, idx_range], + coord_utm1, + coord_utm2, + bins, + False, + logger) for idx_range in idx] + + results = pool.map(func=launchSpatialFiltering, iterable=args) + + # retrieve results + for i, aps1_i, aps2_i in results: + aps1[:, i] = aps1_i + aps2[:, i] = aps2_i + + m, s = divmod(time.time() - start_time, 60) + logger.debug(msg='time used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s)) + + return aps1, aps2
+ + + +
+[docs] +def simpleInterpolation(*, residuals: np.ndarray, coord_utm1: np.ndarray, coord_utm2: np.ndarray, + interp_method: str = "linear"): + """SimpleInterpolation. + + Simple interpolation of atmospheric phase screen using scipy's griddata function with options "linear" or "cubic". + For pixels outside the convex hull of the input points, the nearest neighbor is used. + + Parameters + ---------- + residuals: np.ndarray + residual phase (size: num_points x num_ifgs) + coord_utm1: np.ndarray + coordinates in UTM of the points for which the residuals are given (size: num_points_p1 x 2) + coord_utm2: np.ndarray + coordinates in UTM of the new points which shall be interpolated (size: num_points_p2 x 2) + interp_method: str + interpolation method (default: "linear"; options: "linear", "cubic") + + Returns + ------- + aps1: np.ndarray + atmospheric phase screen for the known points (size: num_points_p1 x num_images) + aps2: np.ndarray + atmospheric phase screen for the new points (size: num_points_p2 x num_images) + """ + num_points2 = coord_utm2.shape[0] + num_images = residuals.shape[1] + + aps1 = np.zeros_like(residuals, dtype=np.float32) + aps2 = np.zeros((num_points2, num_images), dtype=np.float32) + for i in range(num_images): + aps1[:, i] = griddata(coord_utm1, residuals[:, i], coord_utm1, method=interp_method) + aps2[:, i] = griddata(coord_utm1, residuals[:, i], coord_utm2, method=interp_method) + # interpolation with 'linear' or 'cubic' yields nan values for pixel that need to be extrapolated. + # interpolation with 'knn' solves this problem. + mask_extrapolate = np.isnan(aps2[:, i]) + aps2[mask_extrapolate, i] = griddata( + coord_utm1, + residuals[:, i], + coord_utm2[mask_extrapolate, :], + method='nearest' + ) + + return aps1, aps2
+ +
+ +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/_modules/sarvey/geolocation.html b/doc/_modules/sarvey/geolocation.html new file mode 100644 index 0000000..d7f1d3a --- /dev/null +++ b/doc/_modules/sarvey/geolocation.html @@ -0,0 +1,187 @@ + + + + + + + sarvey.geolocation — SARvey 1.0.0 documentation + + + + + + + + + + + + + +
+
+
+
+ +

Source code for sarvey.geolocation

+#!/usr/bin/env python
+
+# SARvey - A multitemporal InSAR time series tool for the derivation of displacements.
+#
+# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de)
+#
+# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context
+# of the SAR4Infra project with funds of the German Federal Ministry for Digital and
+# Transport and contributions from Landesamt fuer Vermessung und Geoinformation
+# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein.
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++
+# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you
+# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes.
+# This requirement extends to SARvey.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see <https://www.gnu.org/licenses/>.
+
+"""Module for correcting the geolocation of the scatterers."""
+import logging
+from os.path import join
+import numpy as np
+
+from miaplpy.objects.slcStack import slcStack
+
+from sarvey.objects import Points
+
+
+
+[docs] +def getHeading(input_path: str, logger: logging.Logger): + """ + Read heading angle from slcStack.h5. + + Parameters + ---------- + input_path: str + Path to directory containing 'slcStack.h5' and 'geometryRadar.h5'. + logger: Logger + Logger handle + + Returns + ------- + heading_angle: float + heading angle of the satellite in radians + for ascending ~ -12*pi/180 + for descending ~ 190*pi/180 + """ + # get heading from slcStack.h5 + slc_stack_file = join(input_path, 'slcStack.h5') + slc_stack_obj = slcStack(slc_stack_file) + try: + meta_dict = slc_stack_obj.get_metadata() + lower_case_meta_dict = {k.lower(): v for k, v in meta_dict.items()} + + heading_angle = float(lower_case_meta_dict["heading"]) + logger.info(msg=f"Heading_angle of satellite: {heading_angle} deg") + heading_angle = np.deg2rad(heading_angle) + except Exception as exc: + logger.error(f'Failed to retrieve heading angle from {slc_stack_file}: {exc}') + raise Exception + return heading_angle
+ + + +
+[docs] +def calculateGeolocationCorrection(*, path_geom: str, point_obj: Points, demerr: np.array, logger: logging.Logger): + """ + Calculate geolocation correction. + + Parameters + ---------- + path_geom: str + Path to directory containing 'slcStack.h5' or 'geometryRadar.h5'. + point_obj: Points + Point object with incidence angle for points + demerr: np.array + Array of dem error per pixel + logger: Logger + Logger handle + + Returns + ------- + coord_correction: np.array + array of geolocation corrections, two columns [x_correction, y_correction] per point. + """ + heading_angle = getHeading(input_path=path_geom, logger=logger) + + coord_correction = np.zeros_like(point_obj.coord_xy, dtype=float) + coord_correction[:, 0] = demerr * np.cos(heading_angle) / np.tan(point_obj.loc_inc) + coord_correction[:, 1] = -demerr * np.sin(heading_angle) / np.tan(point_obj.loc_inc) + + return coord_correction
+ +
+ +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/_modules/sarvey/ifg_network.html b/doc/_modules/sarvey/ifg_network.html new file mode 100644 index 0000000..fc8d1cf --- /dev/null +++ b/doc/_modules/sarvey/ifg_network.html @@ -0,0 +1,474 @@ + + + + + + + sarvey.ifg_network — SARvey 1.0.0 documentation + + + + + + + + + + + + + +
+
+
+
+ +

Source code for sarvey.ifg_network

+#!/usr/bin/env python
+
+# SARvey - A multitemporal InSAR time series tool for the derivation of displacements.
+#
+# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de)
+#
+# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context
+# of the SAR4Infra project with funds of the German Federal Ministry for Digital and
+# Transport and contributions from Landesamt fuer Vermessung und Geoinformation
+# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein.
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++
+# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you
+# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes.
+# This requirement extends to SARvey.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see <https://www.gnu.org/licenses/>.
+
+"""IfgNetwork module for SARvey."""
+import datetime
+import h5py
+import os
+import matplotlib.pyplot as plt
+import numpy as np
+from typing import Union
+import warnings
+from logging import Logger
+from scipy.spatial import Delaunay
+
+
+
+[docs] +class IfgNetwork: + """Abstract class/interface for different types of interferogram networks.""" + + ifg_list: Union[list, np.ndarray] = None + + def __init__(self): + """Init.""" + self.pbase = None + self.tbase = None + self.num_images = None + self.ifg_list = list() # is later converted to np.array + self.pbase_ifg = None + self.tbase_ifg = None + self.num_ifgs = None + self.dates = list() + +
+[docs] + def plot(self): + """Plot the network of interferograms.""" + fig = plt.figure(figsize=(15, 5)) + axs = fig.subplots(1, 3) + dt = [datetime.date.fromisoformat(d) for d in self.dates] + axs[0].plot(dt, self.pbase, 'ko') + for idx in self.ifg_list: + xx = np.array([dt[idx[0]], dt[idx[1]]]) + yy = np.array([self.pbase[idx[0]], self.pbase[idx[1]]]) + axs[0].plot(xx, yy, 'k-') + axs[0].set_ylabel('perpendicular baseline [m]') + axs[0].set_xlabel('temporal baseline [years]') + axs[0].set_title('Network of interferograms') + fig.autofmt_xdate() + + axs[1].hist(self.tbase_ifg * 365.25, bins=100) + axs[1].set_ylabel('Absolute frequency') + axs[1].set_xlabel('temporal baseline [days]') + + axs[2].hist(self.pbase_ifg, bins=100) + axs[2].set_ylabel('Absolute frequency') + axs[2].set_xlabel('perpendicular baseline [m]') + return fig
+ + +
+[docs] + def getDesignMatrix(self): + """Compute the design matrix for the smallbaseline network.""" + a = np.zeros((self.num_ifgs, self.num_images)) + for i in range(len(self.ifg_list)): + a[i, self.ifg_list[i][0]] = 1 + a[i, self.ifg_list[i][1]] = -1 + return a
+ + +
+[docs] + def open(self, *, path: str): + """Read stored information from already existing.h5 file. + + Parameter + ----------- + path: str + path to existing file to read from. + """ + with h5py.File(path, 'r') as f: + self.num_images = f.attrs["num_images"] + self.num_ifgs = f.attrs["num_ifgs"] + + self.tbase_ifg = f['tbase_ifg'][:] + self.pbase_ifg = f['pbase_ifg'][:] + self.tbase = f['tbase'][:] + self.pbase = f['pbase'][:] + self.ifg_list = f['ifg_list'][:] + try: + self.dates = f['dates'][:] + self.dates = [date.decode("utf-8") for date in self.dates] + except KeyError as ke: + self.dates = None + print(f"IfgNetwork is in old dataformat. Cannot read 'dates'! {ke}") + + f.close()
+ + +
+[docs] + def writeToFile(self, *, path: str, logger: Logger): + """Write all existing data to .h5 file. + + Parameters + ---------- + path: str + path to filename + logger: Logger + Logging handler. + """ + logger.info(msg="write IfgNetwork to {}".format(path)) + + if os.path.exists(path): + os.remove(path) + + dates = np.array(self.dates, dtype=np.string_) + + with h5py.File(path, 'w') as f: + f.attrs["num_images"] = self.num_images + f.attrs["num_ifgs"] = self.num_ifgs + + f.create_dataset('tbase_ifg', data=self.tbase_ifg) + f.create_dataset('pbase_ifg', data=self.pbase_ifg) + f.create_dataset('tbase', data=self.tbase) + f.create_dataset('pbase', data=self.pbase) + f.create_dataset('ifg_list', data=self.ifg_list) + f.create_dataset('dates', data=dates)
+
+ + + +
+[docs] +class StarNetwork(IfgNetwork): + """Star network of interferograms (single-reference).""" + +
+[docs] + def configure(self, *, pbase: np.ndarray, tbase: np.ndarray, ref_idx: int, dates: list): + """Create list of interferograms containing the indices of the images and computes baselines. + + Parameter + --------- + pbase: np.ndarray + Perpendicular baselines of the SAR acquisitions. + tbase: np.ndarray + Temporal baselines of the SAR acquisitions. + ref_idx: int + Index of the reference image. + dates: list + Dates of the acquisitions. + """ + self.pbase = pbase + self.tbase = tbase / 365.25 + self.num_images = pbase.shape[0] + self.dates = dates + + for i in range(self.num_images): + if i == ref_idx: + continue + self.ifg_list.append((ref_idx, i)) + + self.pbase_ifg = np.delete(self.pbase - self.pbase[ref_idx], ref_idx) + self.tbase_ifg = np.delete(self.tbase - self.tbase[ref_idx], ref_idx) + self.num_ifgs = self.num_images - 1
+
+ + + +
+[docs] +class SmallTemporalBaselinesNetwork(IfgNetwork): + """Small temporal baselines network of interferograms without restrictions on the perpendicular baselines.""" + +
+[docs] + def configure(self, *, pbase: np.ndarray, tbase: np.ndarray, num_link: int = None, dates: list): + """Create list of interferograms containing the indices of the images and computes baselines. + + Parameter + ----------- + pbase: np.ndarray + Perpendicular baselines of the SAR acquisitions. + tbase: np.ndarray + Temporal baselines of the SAR acquisitions. + num_link: int + Number of consecutive links in time connecting acquisitions. + dates: list + Dates of the acquisitions. + """ + self.pbase = pbase + self.tbase = tbase / 365.25 + self.num_images = pbase.shape[0] + self.dates = dates + + for i in range(self.num_images): + for j in range(num_link): + if i + j + 1 >= self.num_images: + continue + self.ifg_list.append((i, i + j + 1)) + + self.ifg_list = [(i, j) for i, j in self.ifg_list if i != j] # remove connections to itself, e.g. (0, 0) + + self.pbase_ifg = np.array([self.pbase[idx[1]] - self.pbase[idx[0]] for idx in self.ifg_list]) + self.tbase_ifg = np.array([self.tbase[idx[1]] - self.tbase[idx[0]] for idx in self.ifg_list]) + self.num_ifgs = self.pbase_ifg.shape[0]
+
+ + + +
+[docs] +class SmallBaselineNetwork(IfgNetwork): + """Small baseline network of interferograms restricting both temporal and spatial baselines.""" + +
+[docs] + def configure(self, *, pbase: np.ndarray, tbase: np.ndarray, num_link: int, max_tbase: int, dates: list): + """Create list of interferograms containing the indices of the images and computes baselines. + + Parameter + ----------- + pbase: np.ndarray + perpendicular baselines of the SAR acquisitions. + tbase: np.ndarray + temporal baselines of the SAR acquisitions. + max_tbase: int + maximum temporal baseline in [days] (default: None). + num_link: int + number of links within the range of maximum temporal baseline. + dates: list + Dates of the acquisitions. + """ + self.pbase = pbase + self.tbase = tbase / 365.25 + self.num_images = pbase.shape[0] + self.dates = dates + flag_restrict_to_max_tbase = False + + # in this section use tbase in [days] (function argument, not self.) + for i in range(self.num_images - 1): + + if i + 1 < self.num_images - 1: + # always use one connection to nearest neighbour in time + self.ifg_list.append((i, i + 1)) + else: + self.ifg_list.append((i, i + 1)) + break + # compute index corresponding to max_tbase for current time + diff = np.abs(tbase - (tbase[i] + max_tbase)) + max_idx = np.where(diff == diff.min())[0][0] + self.ifg_list.append((i, max_idx)) + + if max_idx == i: # no further images between i and max_idx + flag_restrict_to_max_tbase = True + continue + + # spread the rest of the links over the remaining time steps in between + links = np.floor(np.arange(i, max_idx, (max_idx - i) / (num_link - 1)))[1:].astype(int) + for link in links: + self.ifg_list.append((i, link)) + self.ifg_list = np.unique(self.ifg_list, axis=0) + + if flag_restrict_to_max_tbase: + warnings.warn(f"Cannot restrict ifgs to maximum temporal baseline of {max_tbase} days.") + + self.ifg_list = [(i, j) for i, j in self.ifg_list if i != j] # remove connections to itself, e.g. (0, 0) + + self.pbase_ifg = np.array([self.pbase[idx[1]] - self.pbase[idx[0]] for idx in self.ifg_list]) + self.tbase_ifg = np.array([self.tbase[idx[1]] - self.tbase[idx[0]] for idx in self.ifg_list]) + self.num_ifgs = self.pbase_ifg.shape[0]
+
+ + + +
+[docs] +class DelaunayNetwork(IfgNetwork): + """Delaunay network of interferograms which restricts both the temporal and perpendicular baselines.""" + +
+[docs] + def configure(self, *, pbase: np.ndarray, tbase: np.ndarray, dates: list): + """Create list of interferograms containing the indices of the images and computes baselines. + + Parameter + ----------- + pbase: np.ndarray + perpendicular baselines of the SAR acquisitions, array + tbase: np.ndarray + temporal baselines of the SAR acquisitions, array + dates: list + Dates of the acquisitions, list. + """ + self.pbase = pbase + self.tbase = tbase / 365.25 + self.num_images = pbase.shape[0] + self.dates = dates + scale = 0.25 + + network = Delaunay(points=np.stack([self.pbase, self.tbase * 365.25 * scale]).T) + for p1, p2, p3 in network.simplices: + self.ifg_list.append((p1, p2)) + self.ifg_list.append((p1, p3)) + self.ifg_list.append((p2, p3)) + self.ifg_list = np.unique(self.ifg_list, axis=0) + + self.pbase_ifg = np.array([self.pbase[idx[1]] - self.pbase[idx[0]] for idx in self.ifg_list]) + self.tbase_ifg = np.array([self.tbase[idx[1]] - self.tbase[idx[0]] for idx in self.ifg_list]) + self.num_ifgs = self.pbase_ifg.shape[0]
+
+ + + +
+[docs] +class SmallBaselineYearlyNetwork(IfgNetwork): + """Small baseline network of interferograms with yearly connections.""" + +
+[docs] + def configure(self, *, pbase: np.ndarray, tbase: np.ndarray, num_link: int = None, dates: list): + """Create list of interferograms containing the indices of the images and computes baselines. + + Parameter + ----------- + pbase: np.ndarray + perpendicular baselines of the SAR acquisitions, array + tbase: np.ndarray + temporal baselines of the SAR acquisitions, array + num_link: int + Number of consecutive links in time connecting acquisitions. + dates: list + Dates of the acquisitions, list. + """ + self.pbase = pbase + self.tbase = tbase / 365.25 + self.num_images = pbase.shape[0] + self.dates = dates + + # add small temporal baselines + for i in range(self.num_images): + for j in range(num_link): + if i + j + 1 >= self.num_images: + continue + self.ifg_list.append((i, i + j + 1)) + + # add yearly ifgs + for i in range(self.num_images): + # find index of image at roughly one year distance + diff = np.abs(tbase - (tbase[i] + 365.25)) + year_idx = np.where(diff == diff.min())[0][0] + print(year_idx) + if year_idx != self.num_images - 1: # avoid connections to the last image + self.ifg_list.append((i, year_idx)) + print("found!") + + self.ifg_list = np.unique(self.ifg_list, axis=0) + self.ifg_list = [(i, j) for i, j in self.ifg_list if i != j] # remove connections to itself, e.g. (0, 0) + + self.pbase_ifg = np.array([self.pbase[idx[1]] - self.pbase[idx[0]] for idx in self.ifg_list]) + self.tbase_ifg = np.array([self.tbase[idx[1]] - self.tbase[idx[0]] for idx in self.ifg_list]) + self.num_ifgs = self.pbase_ifg.shape[0]
+
+ +
+ +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/_modules/sarvey/objects.html b/doc/_modules/sarvey/objects.html new file mode 100644 index 0000000..7f644df --- /dev/null +++ b/doc/_modules/sarvey/objects.html @@ -0,0 +1,972 @@ + + + + + + + sarvey.objects — SARvey 1.0.0 documentation + + + + + + + + + + + + + +
+
+
+
+ +

Source code for sarvey.objects

+#!/usr/bin/env python
+
+# SARvey - A multitemporal InSAR time series tool for the derivation of displacements.
+#
+# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de)
+#
+# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context
+# of the SAR4Infra project with funds of the German Federal Ministry for Digital and
+# Transport and contributions from Landesamt fuer Vermessung und Geoinformation
+# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein.
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++
+# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you
+# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes.
+# This requirement extends to SARvey.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see <https://www.gnu.org/licenses/>.
+
+"""Objects module for SARvey."""
+import os
+from os.path import join, dirname, exists, basename
+from typing import Optional
+import h5py
+import matplotlib.pyplot as plt
+import numpy as np
+from pyproj import Proj, CRS
+from pyproj.aoi import AreaOfInterest
+from pyproj.database import query_utm_crs_info
+from logging import Logger
+
+from miaplpy.objects.slcStack import slcStack
+from mintpy.utils import readfile
+from mintpy.utils.plot import auto_flip_direction
+
+from sarvey.ifg_network import IfgNetwork
+
+
+
+[docs] +class AmplitudeImage: + """AmplitudeImage.""" + + def __init__(self, *, file_path: str): + """Init. + + Parameters + ---------- + file_path: str + path to filename + """ + self.width = None + self.length = None + self.file_path = file_path + self.background_map = None + self.orbit_direction = None + +
+[docs] + def prepare(self, *, slc_stack_obj: slcStack, img: np.ndarray, logger: Logger): + """Read the SLC stack, compute the mean amplitude image and store it into a file. + + Parameters + ---------- + slc_stack_obj: slcStack + object of class slcStack from MiaplPy + img: np.ndarray + amplitude image, e.g. the mean over time + logger: Logger + Logging handler + """ + self.orbit_direction = slc_stack_obj.metadata["ORBIT_DIRECTION"] + self.length = slc_stack_obj.length + self.width = slc_stack_obj.width + + self.background_map = img + + logger.info(msg="write data to {}...".format(self.file_path)) + + if exists(self.file_path): + os.remove(self.file_path) + + with h5py.File(self.file_path, 'w') as f: + f.create_dataset('background_map', data=self.background_map) + f.attrs["ORBIT_DIRECTION"] = self.orbit_direction + f.attrs["LENGTH"] = self.length + f.attrs["WIDTH"] = self.width
+ + +
+[docs] + def open(self): + """Open.""" + # print("read from {}".format(self.file_path)) + + with h5py.File(self.file_path, 'r') as f: + self.background_map = f["background_map"][:] + self.orbit_direction = f.attrs["ORBIT_DIRECTION"] + self.length = f.attrs["LENGTH"] + self.width = f.attrs["WIDTH"]
+ + +
+[docs] + def plot(self, *, ax: plt.Axes = None, logger: Logger): + """Plot the mean amplitude image as a background map. + + Parameters + ---------- + ax: plt.Axes + axes for plotting (default: None, a new figure will be created). + logger: Logger + Logging handler. + + Return + ------ + ax: plt.Axes + axes object. + """ + if self.background_map is None: + try: + self.open() + except OSError as e: + logger.error(msg="Could not open file: {}".format(e)) + fig = plt.figure(figsize=(15, 5)) + ax = fig.add_subplot() + logger.error(msg="Orbit direction not available.") + return ax + + if ax is None: + fig = plt.figure(figsize=(15, 5)) + ax = fig.add_subplot() + ax.imshow(self.background_map, cmap=plt.cm.get_cmap("gray")) + meta = {"ORBIT_DIRECTION": self.orbit_direction} + auto_flip_direction(meta, ax=ax, print_msg=False) + + ax.set_xlabel("Range") + ax.set_ylabel("Azimuth") + + return ax
+
+ + + +
+[docs] +class CoordinatesUTM: + """Coordinates in UTM for all pixels in the radar image.""" + + def __init__(self, *, file_path: str, logger: Logger): + """Init. + + Parameters + ---------- + file_path: str + path to filename + logger: Logger + Logging handler. + """ + self.file_path = file_path + self.coord_utm = None + self.logger = logger + +
+[docs] + def prepare(self, *, input_path: str): + """Read the slc stack, computes the mean amplitude image and stores it into a file. + + Parameters + ---------- + input_path: str + path to slcStack.h5 file. + """ + log = self.logger + lat = readfile.read(input_path, datasetName='latitude')[0] + lon = readfile.read(input_path, datasetName='longitude')[0] + + log.info(msg="Transform coordinates from latitude and longitude (WGS84) to North and East (UTM).") + # noinspection PyTypeChecker + utm_crs_list = query_utm_crs_info( + datum_name="WGS 84", + area_of_interest=AreaOfInterest( + west_lon_degree=np.nanmin(lon.ravel()), + south_lat_degree=np.nanmin(lat.ravel()), + east_lon_degree=np.nanmax(lon.ravel()), + north_lat_degree=np.nanmax(lat.ravel())), + contains=True) + utm_crs = CRS.from_epsg(utm_crs_list[0].code) + lola2utm = Proj(utm_crs) + self.coord_utm = np.array(lola2utm(lon, lat)) + + log.info(msg="write data to {}...".format(self.file_path)) + + if exists(self.file_path): + os.remove(self.file_path) + + with h5py.File(self.file_path, 'w') as f: + f.create_dataset('coord_utm', data=self.coord_utm)
+ + +
+[docs] + def open(self): + """Open.""" + with h5py.File(self.file_path, 'r') as f: + self.coord_utm = f["coord_utm"][:]
+
+ + + +
+[docs] +class BaseStack: + """Class for 3D image-like data stacks.""" + + def __init__(self, *, file: str = None, logger: Logger): + """Init. + + Parameters + ---------- + file: str + path to filename + logger: Logger + Logging handler. + """ + self.file = file + self.logger = logger + self.metadata = None + self.num_time = None + self.length = None + self.width = None + self.f = None + +
+[docs] + def close(self, *, print_msg: bool = True): + """Close.""" + try: + self.f.close() + if print_msg: + self.logger.info(msg='close file: {}'.format(basename(self.file))) + except Exception as e: + self.logger.exception(msg=e) + pass + return None
+ + +
+[docs] + def getShape(self, *, dataset_name: str): + """Open file and read shape of dataset.""" + with h5py.File(self.file, 'r') as f: + dshape = f[dataset_name].shape + return dshape
+ + +
+[docs] + def read(self, *, dataset_name: str, box: Optional[tuple] = None, print_msg: bool = True): + """Read dataset from slc file. + + Parameters + ---------- + dataset_name: str + name of dataset + box: tuple + tuple of 4 int, indicating x0,y0,x1,y1 of range, or + tuple of 6 int, indicating x0,y0,z0,x1,y1,z1 of range + print_msg: bool + print message. + + Returns + ------- + data: np.ndarray + 2D or 3D dataset + """ + if print_msg: + self.logger.info(msg='reading box {} from file: {} ...'.format(box, self.file)) + + with h5py.File(self.file, 'r') as f: + self.metadata = dict(f.attrs) + + ds = f[dataset_name] + if len(ds.shape) == 3: + self.length, self.width, self.num_time = ds.shape + else: + self.length, self.width = ds.shape + + # Get Index in space/2_3 dimension + if box is None: + box = [0, 0, self.width, self.length] + + if len(ds.shape) == 3: + if len(box) == 4: + data = ds[box[1]:box[3], box[0]:box[2], :] + if len(box) == 6: + data = ds[box[1]:box[4], box[0]:box[3], box[2]:box[5]] + else: + if len(box) == 6: + raise IndexError("Cannot read 3D box from 2D data.") + data = ds[box[1]:box[3], box[0]:box[2]] + + for key, value in self.metadata.items(): + try: + self.metadata[key] = value.decode('utf8') + except Exception: + self.metadata[key] = value + return data
+ + +
+[docs] + def prepareDataset(self, dataset_name: str, dshape: tuple, dtype: object, + metadata: Optional[dict], mode: str = "w", chunks: [tuple, bool] = True): + """PrepareDataset. Creates a dataset in file with specified size without writing any data. + + Parameters + ---------- + dataset_name: str + name of dataset. + dshape: tuple + shape of dataset. + dtype: object + data type of dataset. + metadata: dict + metadata of dataset (e.g. WAVELENGTH, ORBIT_DIRECTION, etc.). Usually the same as in slcStack.h5. + mode: str + open mode ('w' for writing new file or 'a' for appending to existing file). + chunks: tuple + chunk size ('True'/'False' or tuple specifying the dimension of the chunks) + """ + with h5py.File(self.file, mode) as f: + self.logger.info(msg="Prepare dataset: {d:<25} of {t:<25} in size of {s}".format( + d=dataset_name, + t=str(dtype), + s=dshape)) + + f.create_dataset(dataset_name, + shape=dshape, + dtype=dtype, + chunks=chunks) + + # write attributes + metadata = dict(metadata) + for key in metadata.keys(): + f.attrs[key] = metadata[key] + + return
+ + +
+[docs] + def writeToFileBlock(self, *, data: np.ndarray, dataset_name: str, block: Optional[tuple] = None, mode: str = 'a', + print_msg: bool = True): + """Write data to existing HDF5 dataset in disk block by block. + + Parameters + ---------- + data: np.ndarray + 1/2/3D matrix. + dataset_name: str + dataset name. + block: list + the list can contain 2, 4 or 6 integers indicating: [zStart, zEnd, yStart, yEnd, xStart, xEnd]. + mode: str + open mode ('w' for writing new file or 'a' for appending to existing file). + print_msg: bool + print message. + + Returns + -------- + file: str + path to file + """ + if block is None: + # data shape + if isinstance(data, list): + shape = (len(data),) + else: + shape = data.shape + + if len(shape) == 1: + block = [0, shape[0]] + elif len(shape) == 2: + block = [0, shape[0], + 0, shape[1]] + elif len(shape) == 3: + block = [0, shape[0], + 0, shape[1], + 0, shape[2]] + + with h5py.File(self.file, mode) as f: + + if print_msg: + self.logger.info(msg="writing dataset /{:<25} block: {}".format(dataset_name, block)) + if len(block) == 6: + f[dataset_name][block[0]:block[1], + block[2]:block[3], + block[4]:block[5]] = data + + elif len(block) == 4: + f[dataset_name][block[0]:block[1], + block[2]:block[3]] = data + + elif len(block) == 2: + f[dataset_name][block[0]:block[1]] = data + + return self.file
+ + +
+[docs] + def writeToFile(self, *, data: np.ndarray, dataset_name: str, metadata: Optional[dict] = None, mode: str = 'a', + chunks: [tuple, bool] = True): + """Write the whole dataset to the file (not block-by-block). + + Parameters + ---------- + data: np.ndarray + 3D data array. + dataset_name: str + name of dataset. + metadata: dict + metadata of dataset (e.g. WAVELENGTH, ORBIT_DIRECTION, etc.). Usually the same as in slcStack.h5. + mode: str + mode for opening the h5 file (e.g. write: 'w' or append: 'a') + chunks: tuple + chunk size ('True'/'False' or tuple specifying the dimension of the chunks) + """ + # 3D dataset + self.logger.info(msg='create HDF5 file: {} with w mode'.format(self.file)) + self.f = h5py.File(self.file, mode) + if dataset_name not in self.f: + self.logger.info(msg='create dataset /{n} of {t:<10} in size of {s}.'.format(n=dataset_name, + t=str(data.dtype), + s=data.shape)) + self.f.create_dataset(dataset_name, data=data, chunks=chunks) + else: + self.logger.info(msg='overwrite dataset /{n} of {t:<10} in size of {s}.'.format(n=dataset_name, + t=str(data.dtype), + s=data.shape)) + self.f[dataset_name] = data + + # Attributes + if metadata is not None: + metadata = dict(metadata) + for key, value in metadata.items(): + self.f.attrs[key] = str(value) + + self.f.close() + self.logger.info(msg='finished writing to {}'.format(self.file)) + return
+
+ + + +
+[docs] +class Points: + """Points class for storing information about the selected scatterers.""" + + file_path: str + point_id: np.array + coord_xy: np.array + num_points: int + phase: np.array + wavelength: float + length: int + width: int + times: None + + # etc. + + def __init__(self, *, file_path: str, logger: Logger): + """Init. + + Parameters + ---------- + file_path: str + ath to filename + logger: Logger + Logging handler. + """ + self.ifg_net_obj = IfgNetwork() # use parent class here which doesn't know and care about 'star' or 'sb' + self.coord_utm = None + self.coord_lalo = None + self.height = None + self.slant_range = None + self.loc_inc = None + self.file_path = file_path + self.logger = logger + +
+[docs] + def prepare(self, *, point_id: np.ndarray, coord_xy: np.ndarray, input_path: str): + """Assign point_id and radar coordinates to the object. + + Store the point_id and radar coordinates of the scatterers in the object (not file) and read further + attributes from external files (ifg_network.h5, slcStack.h5, geometryRadar.h5, coordinates_utm.h5). + + Parameters + ---------- + point_id: np.ndarray + point_id of the scatterers. + coord_xy: np.ndarray + radar coordinates of the scatterers. + input_path: str + path to input files (slcStack.h5, geometryRadar.h5). + """ + self.point_id = point_id + self.coord_xy = coord_xy + self.num_points = self.coord_xy.shape[0] + self.phase = None + self.openExternalData(input_path=input_path)
+ + +
+[docs] + def writeToFile(self): + """Write data to .h5 file (num_points, coord_xy, point_id, phase).""" + self.logger.info(msg="write data to {}...".format(self.file_path)) + + if exists(self.file_path): + os.remove(self.file_path) + + with h5py.File(self.file_path, 'w') as f: + f.attrs["num_points"] = self.num_points + f.create_dataset('coord_xy', data=self.coord_xy) + f.create_dataset('point_id', data=self.point_id) + f.create_dataset('phase', data=self.phase)
+ + +
+[docs] + def open(self, input_path: str, other_file_path: str = None): + """Read data from file. + + Read stored information from already existing .h5 file. This can be the file of the object itself. If the + data should be read from another file, the path to this file can be given as 'other_file_path'. Thereby, a new + Points object can be created with the data of another Points object. + + Parameters + ---------- + input_path: str + path to input files (slcStack.h5, geometryRadar.h5). + other_file_path: str + path to other .h5 file (default: None). + """ + # 1) read own data: coord_xy, phase, point_id, num_points, reference_point_idx + if other_file_path is not None: + path = other_file_path + else: + path = self.file_path + self.logger.info(msg="read from {}".format(path)) + + with h5py.File(path, 'r') as f: + self.num_points = f.attrs["num_points"] + self.coord_xy = f["coord_xy"][:] + self.point_id = f["point_id"][:] + self.phase = f["phase"][:] + + self.openExternalData(input_path=input_path)
+ + +
+[docs] + def openExternalData(self, *, input_path: str): + """Load data which is stored in slcStack.h5, geometryRadar.h5, ifg_network.h5 and coordinates_utm.h5.""" + # 1) read IfgNetwork + self.ifg_net_obj.open(path=join(dirname(self.file_path), "ifg_network.h5")) + + # 2) read metadata from slcStack + slc_stack_obj = slcStack(join(input_path, "slcStack.h5")) + slc_stack_obj.open(print_msg=False) + self.wavelength = np.float64(slc_stack_obj.metadata["WAVELENGTH"]) + self.length = slc_stack_obj.length # y-coordinate axis (azimut) + self.width = slc_stack_obj.width # x-coordinate axis (range) + + # 3) read from geometry file + mask = self.createMask() + + geom_path = join(input_path, "geometryRadar.h5") + + # load geometry data + loc_inc, meta = readfile.read(geom_path, datasetName='incidenceAngle') + loc_inc *= np.pi / 180 # in [rad] + slant_range = readfile.read(geom_path, datasetName='slantRangeDistance')[0] + height = readfile.read(geom_path, datasetName='height')[0] + lat = readfile.read(geom_path, datasetName='latitude')[0] + lon = readfile.read(geom_path, datasetName='longitude')[0] + + self.loc_inc = loc_inc[mask].ravel() + self.slant_range = slant_range[mask].ravel() + self.height = height[mask].ravel() + self.coord_lalo = np.array([lat[mask].ravel(), lon[mask].ravel()]).transpose() + + # 4) read UTM coordinates + coord_utm_obj = CoordinatesUTM(file_path=join(dirname(self.file_path), "coordinates_utm.h5"), + logger=self.logger) + coord_utm_obj.open() + self.coord_utm = coord_utm_obj.coord_utm[:, mask].transpose()
+ + +
+[docs] + def createMask(self): + """Create a mask. + + Create a mask in the size of the radar image which is used to read the geometry and SLC data for the selected + scatterers. + """ + mask = np.zeros((self.length, self.width), dtype=np.bool_) + tmp = [tuple([c[0], c[1]]) for c in self.coord_xy] + for i in tmp: + mask[i] = True + return mask
+ + +
+[docs] + def addPointsFromObj(self, *, new_point_id: np.ndarray, new_coord_xy: np.ndarray, new_phase: np.ndarray, + new_num_points: int, input_path: str): + """Add new points and their attributes to the existing data. + + Parameters + ---------- + new_point_id: np.ndarray + point_id of the new scatterers. + new_coord_xy: np.ndarray + radar coordinates of the new scatterers. + new_phase: np.ndarray + phase of the new scatterers. + new_num_points: int + number of new points. + input_path: str + path to input files (slcStack.h5, geometryRadar.h5). + """ + self.point_id = np.append(self.point_id, new_point_id) + self.coord_xy = np.append(self.coord_xy, new_coord_xy, axis=0) + self.phase = np.append(self.phase, new_phase, axis=0) + self.num_points += new_num_points + + # all data must be ordered, so that all external data can be loaded correctly + sort_idx = np.argsort(self.point_id) + self.point_id = self.point_id[sort_idx] + self.coord_xy = self.coord_xy[sort_idx, :] + self.phase = self.phase[sort_idx, :] + # refresh by reopening all external data + self.openExternalData(input_path=input_path)
+ + +
+[docs] + def removePoints(self, mask: np.ndarray = None, *, keep_id: [np.ndarray, list], input_path: str): + """Remove all entries from specified points. + + The possible options exist for removing the points: + a) Keep all points which are set to True in a 'mask' with size (num_points x 1). Or + b) Keep all points whose ID is listed in keep_id. The rest of the points will be removed. + + Parameters + ---------- + mask: np.ndarray + mask to select points to be kept, rest will be removed (default: None). + keep_id: np.ndarray + list of point_id to keep. + input_path: str + path to input files (slcStack.h5, geometryRadar.h5). + """ + if mask is None: + mask = np.ones((self.num_points,), dtype=np.bool_) + for p in self.point_id: + if p not in keep_id: + mask[self.point_id == p] = False + self.point_id = self.point_id[mask] + self.coord_xy = self.coord_xy[mask, :] + self.phase = self.phase[mask, :] + self.num_points = mask[mask].shape[0] + # refresh by reopening all external data + self.openExternalData(input_path=input_path)
+
+ + + +
+[docs] +class Network: + """Spatial network of PS candidates.""" + + def __init__(self, *, file_path: str, logger: Logger): + """Init. + + Parameters + ---------- + file_path: str + absolute path to working directory for creating/loading 'psNetwork.h5' + logger: Logger + Logging handler. + """ + self.num_arcs = None + self.gamma = None + self.arcs = None + self.slant_range = None + self.loc_inc = None + self.phase = None + self.vel = None + self.demerr = None + self.ifg_net_obj = None + self.width = None + self.length = None + self.wavelength = None + self.file_path = file_path + self.logger = logger + +
+[docs] + def writeToFile(self): + """Write all existing data to psNetwork.h5 file.""" + self.logger.info(msg="write data to {}...".format(self.file_path)) + + if exists(self.file_path): + os.remove(self.file_path) + + with h5py.File(self.file_path, 'w') as f: + f.attrs["num_arcs"] = self.num_arcs + f.create_dataset('arcs', data=self.arcs) + f.create_dataset('phase', data=self.phase) + f.create_dataset('loc_inc', data=self.loc_inc) + f.create_dataset('slant_range', data=self.slant_range)
+ + +
+[docs] + def open(self, *, input_path: str): + """Read stored information from existing .h5 file.""" + with h5py.File(self.file_path, 'r') as f: + self.num_arcs = f.attrs["num_arcs"] + self.arcs = f["arcs"][:] + self.phase = f["phase"][:] + self.loc_inc = f["loc_inc"][:] + self.slant_range = f["slant_range"][:] + self.openExternalData(input_path=input_path)
+ + +
+[docs] + def openExternalData(self, *, input_path: str): + """Read data from slcStack.h5 and IfgNetwork.h5 files.""" + slc_stack_obj = slcStack(join(input_path, "slcStack.h5")) + slc_stack_obj.open(print_msg=False) + self.wavelength = np.float64(slc_stack_obj.metadata["WAVELENGTH"]) + self.length = slc_stack_obj.length # y-coordinate axis (azimut) + self.width = slc_stack_obj.width # x-coordinate axis (range) + + # 3) read IfgNetwork + self.ifg_net_obj = IfgNetwork() + self.ifg_net_obj.open(path=join(dirname(self.file_path), "ifg_network.h5"))
+ + +
+[docs] + def computeArcObservations(self, *, point_obj: Points, arcs: np.ndarray): + """Compute the phase observations for each arc. + + Compute double difference phase observations, i.e. the phase differences for each arc in the network from the + phase of the two scatterers connected by the arc. + + Parameters + ---------- + point_obj: Points + object of class Points. + arcs: np.ndarray + Array with the indices of the points connected by an arc. + """ + self.arcs = arcs + self.num_arcs = self.arcs.shape[0] + self.logger.info(msg="no. arcs:\t{}".format(self.num_arcs)) + + self.phase = np.zeros((self.num_arcs, point_obj.ifg_net_obj.num_ifgs)) + self.loc_inc = np.zeros((self.num_arcs,)) + self.slant_range = np.zeros((self.num_arcs,)) + for idx, arc in enumerate(self.arcs): + self.phase[idx, :] = np.angle( + np.exp(1j * point_obj.phase[arc[0], :]) * np.conjugate(np.exp(1j * point_obj.phase[arc[1], :]))) + self.loc_inc[idx] = np.mean([point_obj.loc_inc[arc[0]], point_obj.loc_inc[arc[1]]]) + self.slant_range[idx] = np.mean([point_obj.slant_range[arc[0]], point_obj.slant_range[arc[1]]]) + + self.logger.info(msg="ifg arc observations created.")
+ + +
+[docs] + def removeArcs(self, *, mask: np.ndarray): + """Remove arcs from the list of arcs in the network. + + Parameter + --------- + mask: np.ndarray + mask to select arcs to be kept, rest will be removed. + """ + self.demerr = self.demerr[mask] + self.vel = self.vel[mask] + self.phase = self.phase[mask, :] + self.loc_inc = self.loc_inc[mask] + self.slant_range = self.slant_range[mask] + self.arcs = np.array(self.arcs) + self.arcs = self.arcs[mask, :] + self.gamma = self.gamma[mask] + self.num_arcs = mask[mask].shape[0]
+
+ + + +
+[docs] +class NetworkParameter(Network): + """Spatial Network with the estimated parameters of each arc in the network.""" + + def __init__(self, *, file_path: str, logger: Logger): + """Init.""" + super().__init__(file_path=file_path, logger=logger) + self.gamma = None + self.vel = None + self.demerr = None + self.slant_range = None + self.loc_inc = None + self.phase = None + self.arcs = None + self.num_arcs = None + self.logger = logger + +
+[docs] + def prepare(self, *, net_obj: Network, demerr: np.ndarray, vel: np.ndarray, gamma: np.ndarray): + """Prepare. + + Parameter + ----------- + net_obj: Network + object of class Network. + demerr: np.ndarray + estimated DEM error for each arc in the network. + vel: np.ndarray + estimated velocity for each arc in the network. + gamma: np.ndarray + estimated temporal coherence for each arc in the network. + """ + self.num_arcs = net_obj.num_arcs + self.arcs = net_obj.arcs + self.phase = net_obj.phase + self.loc_inc = net_obj.loc_inc + self.slant_range = net_obj.slant_range + self.demerr = demerr + self.vel = vel + self.gamma = gamma
+ + +
+[docs] + def writeToFile(self): + """Write DEM error, velocity and temporal coherence to file.""" + super().writeToFile() + + with h5py.File(self.file_path, 'r+') as f: # append existing file + f.create_dataset('demerr', data=self.demerr) + f.create_dataset('vel', data=self.vel) + f.create_dataset('gamma', data=self.gamma)
+ + +
+[docs] + def open(self, *, input_path: str): + """Read data from file.""" + super().open(input_path=input_path) + + with h5py.File(self.file_path, 'r') as f: + self.demerr = f["demerr"][:] + self.vel = f["vel"][:] + self.gamma = f["gamma"][:]
+
+ +
+ +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/_modules/sarvey/osm_utils.html b/doc/_modules/sarvey/osm_utils.html new file mode 100644 index 0000000..180aaa9 --- /dev/null +++ b/doc/_modules/sarvey/osm_utils.html @@ -0,0 +1,275 @@ + + + + + + + sarvey.osm_utils — SARvey 1.0.0 documentation + + + + + + + + + + + + + +
+
+
+
+ +

Source code for sarvey.osm_utils

+#!/usr/bin/env python
+
+# SARvey - A multitemporal InSAR time series tool for the derivation of displacements.
+#
+# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de)
+#
+# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context
+# of the SAR4Infra project with funds of the German Federal Ministry for Digital and
+# Transport and contributions from Landesamt fuer Vermessung und Geoinformation
+# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein.
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++
+# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you
+# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes.
+# This requirement extends to SARvey.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see <https://www.gnu.org/licenses/>.
+
+"""Osm utils module for SARvey."""
+import numpy as np
+import overpy
+from logging import Logger
+from shapely import Point
+
+from mintpy.utils import readfile, utils as ut
+
+
+
+[docs] +def getSpatialExtend(*, geom_file: str, logger: Logger): + """Get spatial extend of the radar image. + + Parameters + ---------- + geom_file: str + path of geometryRadar.h5 file + logger: Logger + Logging handler. + + Returns + ------- + ll_corner_wgs: list + list of coordinates of the lower-left corner of the radar image in WGS84 coordinates. + ur_corner_wgs: list + list of coordinates of the upper-right corner of the radar image in WGS84 coordinates. + coord: np.ndarray + coordinates of all pixels in the radar image in WGS84. + atr: dict + metadata dictionary from geometryRadar.h5. + """ + logger.info(msg='read spatial extend from geometryRadar.h5') + _, atr = readfile.read(geom_file) + coord = ut.coordinate(atr, lookup_file=geom_file) + lat, atr = readfile.read(geom_file, datasetName='latitude') + lon, _ = readfile.read(geom_file, datasetName='longitude') + + # radar image is flipped up-down + # unclear: check if bounding box fits. Otherwise, change to max and min values of lat and lon + ll_bbox = [np.nanmin(lat), np.nanmin(lon)] + ur_bbox = [np.nanmax(lat), np.nanmax(lon)] + + img_ext = [ + Point(lon[0, 0], lat[0, 0]), + Point(lon[-1, 0], lat[-1, 0]), + Point(lon[-1, -1], lat[-1, -1]), + Point(lon[0, -1], lat[0, -1]) + ] + return ll_bbox, ur_bbox, img_ext, coord, atr
+ + + +
+[docs] +def runOsmQuery(*, ll_corner_wgs: np.ndarray, ur_corner_wgs: np.ndarray, type_list: list, + logger: Logger) -> overpy.Result: + """Query OSM database for transport infrastructure within the spatial extent of the radar image. + + Parameters + ---------- + ll_corner_wgs: np.ndarray + coordinates of the lower-left corner of the radar image in WGS84 coordinates. + ur_corner_wgs: np.ndarray + coordinates of the upper-right corner of the radar image in WGS84 coordinates. + type_list: list + List of street types that shall be queried at the OSM database. + logger: Logger + Logging handler. + + Returns + ------- + result: overpy.Result + results of the overpy query to OSM database. + """ + # Initialize overpass connection + api = overpy.Overpass() + + # Request data from API + logger.info(msg='querying OSM database for infra types...') + # query_cmd = "way({},{},{},{}) [""highway=motorway_link""]; (._;>;); out body;" + + query_cmd = "[bbox: {},{},{},{}];(" + for infra_type in type_list: + logger.info(msg='\t - {}'.format(infra_type)) + if infra_type == 'rail': + query_cmd += "way[railway={}];".format(infra_type) + else: + query_cmd += "way[highway={}];".format(infra_type) + + query_cmd += ");(._; >;); out body;" # skel + + cmd = query_cmd.format(ll_corner_wgs[0], ll_corner_wgs[1], + ur_corner_wgs[0], ur_corner_wgs[1]) + logger.info(msg="\n" + cmd + "\n") + result = api.query(cmd) + + if len(result.ways) == 0: + logger.error(msg='Empty OSM query results. No roads or railway tracks found.') + raise ValueError + + logger.info(msg='...done.') + return result
+ + + +
+[docs] +def runOsmQueryBridge(*, ll_corner_wgs: np.ndarray, ur_corner_wgs: np.ndarray, bridge_highway: bool, + bridge_railway: bool, logger: Logger) -> overpy.Result: + """Query OSM database for bridges of transport infrastructure within the spatial extent of the radar image. + + Parameters + ---------- + ll_corner_wgs: np.ndarray + coordinates of the lower-left corner of the radar image in WGS84 coordinates. + ur_corner_wgs: np.ndarray + coordinates of the upper-right corner of the radar image in WGS84 coordinates. + bridge_highway: bool + Set true to query highway bridges. + bridge_railway: bool + Set true to query railway bridges. + logger: Logger + Logging handler. + + Returns + ------- + result: overpy.Result + results of the overpy query to OSM database. + """ + # Initialize overpass connection + api = overpy.Overpass() + + # Request data from API + logger.info(msg='querying OSM database for infra types...') + # query_cmd = "way({},{},{},{}) [""highway=motorway_link""]; (._;>;); out body;" + + query_cmd = "[bbox: {},{},{},{}];(" + + if bridge_highway: + logger.info(msg='\t - bridge_highway') + query_cmd += 'way[highway~"^(motorway|motorway_link|trunk|trunk_link)$"][bridge];' + + if bridge_railway: + logger.info(msg='\t - bridge_railway') + query_cmd += 'way[railway=rail][bridge];' + + if (bridge_highway is False) & (bridge_railway is False): + logger.info(msg='\t - all bridges') + query_cmd += 'way[bridge];' + + query_cmd += ");(._; >;); out body;" # skel + + cmd = query_cmd.format(ll_corner_wgs[0], ll_corner_wgs[1], + ur_corner_wgs[0], ur_corner_wgs[1]) + logger.info(msg="\n" + cmd + "\n") + result = api.query(cmd) + + if len(result.ways) == 0: + logger.error(msg='Empty OSM query results. No bridges found.') + raise ValueError + + logger.info(msg='...done.') + return result
+ +
+ +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/_modules/sarvey/preparation.html b/doc/_modules/sarvey/preparation.html new file mode 100644 index 0000000..bbddbd6 --- /dev/null +++ b/doc/_modules/sarvey/preparation.html @@ -0,0 +1,373 @@ + + + + + + + sarvey.preparation — SARvey 1.0.0 documentation + + + + + + + + + + + + + +
+
+
+
+ +

Source code for sarvey.preparation

+#!/usr/bin/env python
+
+# SARvey - A multitemporal InSAR time series tool for the derivation of displacements.
+#
+# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de)
+#
+# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context
+# of the SAR4Infra project with funds of the German Federal Ministry for Digital and
+# Transport and contributions from Landesamt fuer Vermessung und Geoinformation
+# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein.
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++
+# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you
+# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes.
+# This requirement extends to SARvey.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see <https://www.gnu.org/licenses/>.
+
+"""Preparation module for SARvey."""
+import datetime
+import matplotlib.pyplot as plt
+import numpy as np
+from logging import Logger
+from os.path import join
+
+import mintpy.utils.readfile as readfile
+
+from sarvey import viewer
+import sarvey.utils as ut
+from sarvey.objects import CoordinatesUTM, AmplitudeImage, BaseStack, Points
+from sarvey.triangulation import PointNetworkTriangulation
+
+
+
+[docs] +def createTimeMaskFromDates(*, start_date: str, stop_date: str, date_list: list, logger: Logger): + """Create a mask with selected dates within given time frame. + + Parameters + ---------- + start_date: str + Start date. + stop_date: str + Stop date. + date_list: list + all avaiable dates in the slcStack.h5. + logger: Logger + Logging handler. + + Returns + ------- + time_mask: np.ndarray + mask with True for selected dates. + num_slc: int + number of selected images. + result_date_list: list + list of selected dates. + """ + time_mask = np.ones((len(date_list)), dtype=np.bool_) + date_list = [datetime.date(year=int(d[:4]), month=int(d[4:6]), day=int(d[6:])) for d in date_list] + + if (start_date is None) and (stop_date is None): + # use all images. + result_date_list = [date.isoformat() for date in date_list] + return time_mask, time_mask.shape[0], result_date_list + + if start_date is None: + start_date = min(date_list) + else: + start_date = datetime.date.fromisoformat(start_date) + + if stop_date is None: + stop_date = max(date_list) + else: + stop_date = datetime.date.fromisoformat(stop_date) + + if start_date >= stop_date: + logger.error(msg="Choose start date < stop date!") + raise ValueError + + if stop_date < min(date_list): + logger.error(msg="Stop date is before the first acquired image. Choose a later stop date!") + raise ValueError + + if start_date > max(date_list): + logger.error(msg="Start date is after the last acquired image. Choose an earlier start date!") + raise ValueError + + shift = " " + logger.debug(msg=shift + "{:>10} {:>10}".format(" Date ", "Selected")) + logger.debug(msg=shift + "{:>10} {:>10}".format("__________", "________")) + + result_date_list = list() + for i, date in enumerate(date_list): + if (date < start_date) or (date > stop_date): + time_mask[i] = False + else: + result_date_list.append(date.isoformat()) + val = " x" if time_mask[i] else "" + logger.debug(msg=shift + "{:>10} {:>3}".format(date.isoformat(), val)) + + num_slc = time_mask[time_mask].shape[0] + return time_mask, num_slc, result_date_list
+ + + +
+[docs] +def readSlcFromMiaplpy(*, path: str, box: tuple = None, logger: Logger) -> np.ndarray: + """Read SLC data from phase-linking results of Miaplpy. + + Parameters + ---------- + path: str + Path to the phase_series.h5 file. + box: tuple + Bounding Box to read from. + logger: Logger + Logging handler. + + Returns + ------- + slc: np.ndarray + slc stack created from phase-linking results. + """ + logger.info(msg="read phase from MiaplPy results...") + phase = readfile.read(path, datasetName='phase', box=box)[0] + + logger.info(msg="read amplitude from MiaplPy results...") + amp = readfile.read(path, datasetName='amplitude', box=box)[0] + + logger.info(msg="combine phase and amplitude to slc...") + slc = amp * np.exp(phase * 1j) + return slc
+ + + +
+[docs] +def readCoherenceFromMiaplpy(*, path: str, box: tuple = None, logger: Logger) -> tuple[np.ndarray, dict]: + """Read the coherence image from phase-linking of MiaplPy. + + Parameters + ---------- + path: str + Path to phase_series.h5 file. + box: tuple + Bounding Box to read from. + logger: Logger + Logging handler. + + Returns + ------- + temp_coh: np.ndarray + temporal coherence image from phase-linking results of MiaplPy. + """ + logger.info(msg="read quality from MiaplPy results...") + temp_coh = readfile.read(path, datasetName='temporalCoherence', box=box)[0][1, :, :] + return temp_coh
+ + + +
+[docs] +def selectPixels(*, path: str, selection_method: str, thrsh: float, + grid_size: int = None, bool_plot: bool = False, logger: Logger): + """Select pixels based on temporal coherence. + + Parameters + ---------- + path: str + Path to the directory with the temporal_coherence.h5 file. + selection_method: str + Pixel selection method. Currently, only "temp_coh" is implemented. + thrsh: float + Threshold for pixel selection. + grid_size: int + Grid size for sparse pixel selection. + bool_plot: bool + Plot the selected pixels. + logger: Logger + Logging handler. + + Returns + ------- + cand_mask: np.ndarray + Mask with selected pixels. + """ + quality = None + grid_min_val = None + cand_mask = None + unit = None + cmap = None + # compute candidates + if selection_method == "temp_coh": + temp_coh_obj = BaseStack(file=join(path, "temporal_coherence.h5"), logger=logger) + quality = temp_coh_obj.read(dataset_name="temp_coh") + cand_mask = quality >= thrsh + grid_min_val = False + unit = "Temporal\nCoherence [ ]" + cmap = "autumn" + + if selection_method == "miaplpy": + raise NotImplementedError("This part is not developed yet. MiaplPy data is read in another way.") + # pl_coherence = readCoherenceFromMiaplpy(path=join(path, 'inverted', 'phase_series.h5'), box=None, + # logger=logger) + # cand_mask = pl_coherence >= thrsh + # quality = pl_coherence + # grid_min_val = False + # unit = "Phase-Linking\nCoherence [ ]" + # cmap = "autumn" + + if grid_size is not None: # -> sparse pixel selection + coord_utm_obj = CoordinatesUTM(file_path=join(path, "coordinates_utm.h5"), logger=logger) + coord_utm_obj.open() + box_list = ut.createSpatialGrid(coord_utm_img=coord_utm_obj.coord_utm, + length=coord_utm_obj.coord_utm.shape[1], + width=coord_utm_obj.coord_utm.shape[2], + grid_size=grid_size)[0] + cand_mask_sparse = ut.selectBestPointsInGrid(box_list=box_list, quality=quality, sel_min=grid_min_val) + cand_mask &= cand_mask_sparse + + if bool_plot: + coord_xy = np.array(np.where(cand_mask)).transpose() + bmap_obj = AmplitudeImage(file_path=join(path, "background_map.h5")) + viewer.plotScatter(value=quality[cand_mask], coord=coord_xy, bmap_obj=bmap_obj, ttl="Selected pixels", + unit=unit, s=2, cmap=cmap, vmin=0, vmax=1, logger=logger) + # if grid_size is not None: + # psViewer.plotGridFromBoxList(box_list, ax=ax, edgecolor="k", linewidth=0.2) + plt.tight_layout() + plt.gcf().savefig(join(path, "pic", "selected_pixels_{}_{}.png".format(selection_method, thrsh)), + dpi=300) + plt.close(plt.gcf()) + + return cand_mask
+ + + +
+[docs] +def createArcsBetweenPoints(*, point_obj: Points, knn: int = None, max_arc_length: float = np.inf, + logger: Logger) -> np.ndarray: + """Create a spatial network of arcs to triangulate the points. + + All points are triangulated with a Delaunay triangulation. If knn is given, the triangulation is done with the k + nearest neighbors. Too long arcs are removed from the network. If, afterward, the network is not connected, a + delaunay triangulation is performed again to ensure connectivity in the network. + + Parameters + ---------- + point_obj: Points + Point object. + knn: int + Number of nearest neighbors to consider (default: None). + max_arc_length: float + Maximum length of an arc. Longer arcs will be removed. Default: np.inf. + logger: Logger + Logging handler. + + Returns + ------- + arcs: np.ndarray + Arcs of the triangulation containing the indices of the points for each arc. + """ + triang_obj = PointNetworkTriangulation(coord_xy=point_obj.coord_xy, coord_utmxy=point_obj.coord_utm, logger=logger) + + if knn is not None: + triang_obj.triangulateKnn(k=knn) + + triang_obj.triangulateGlobal() + + logger.info(msg="remove arcs with length > {}.".format(max_arc_length)) + triang_obj.removeLongArcs(max_dist=max_arc_length) + + if not triang_obj.isConnected(): + triang_obj.triangulateGlobal() + + logger.info(msg="retrieve arcs from adjacency matrix.") + arcs = triang_obj.getArcsFromAdjMat() + return arcs
+ +
+ +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/_modules/sarvey/sarvey_mask.html b/doc/_modules/sarvey/sarvey_mask.html new file mode 100644 index 0000000..b87bd4d --- /dev/null +++ b/doc/_modules/sarvey/sarvey_mask.html @@ -0,0 +1,757 @@ + + + + + + + sarvey.sarvey_mask — SARvey 1.0.0 documentation + + + + + + + + + + + + + +
+
+
+
+ +

Source code for sarvey.sarvey_mask

+#!/usr/bin/env python
+
+# SARvey - A multitemporal InSAR time series tool for the derivation of displacements.
+#
+# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de)
+#
+# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context
+# of the SAR4Infra project with funds of the German Federal Ministry for Digital and
+# Transport and contributions from Landesamt fuer Vermessung und Geoinformation
+# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein.
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++
+# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you
+# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes.
+# This requirement extends to SARvey.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see <https://www.gnu.org/licenses/>.
+
+"""Generate mask from shape file."""
+import argparse
+import os
+import sys
+import time
+from os.path import join
+import PIL.Image as Image
+import PIL.ImageDraw as ImageDraw
+import matplotlib
+import matplotlib.pyplot as plt
+import numpy as np
+from scipy import spatial
+import logging
+from logging import Logger
+import geopandas as gpd
+
+from mintpy.utils import writefile, ptime, utils
+
+from sarvey.osm_utils import getSpatialExtend
+
+try:
+    matplotlib.use('TkAgg')
+except ImportError as e:
+    print(e)
+
+EXAMPLE = """Example:
+  sarvey_mask path/to/file.shp --geom ./geometryRadar.h5 --width 6 -o mask_infra.h5
+"""
+
+
+
+[docs] +def create_parser(): + """Create_parser.""" + parser = argparse.ArgumentParser( + description='Create transport infrastructure mask from shp-file.', + formatter_class=argparse.RawTextHelpFormatter, + epilog=EXAMPLE) + + parser.add_argument(dest='input_file', help='path to input shp-file.') + + parser.add_argument('-w', '--work_dir', dest='work_dir', default=None, + help='absolute path to working directory\n' + + '(default: current directory).') + + parser.add_argument('--geom', dest='geom_file', default=None, + help='path to existing geometryRadar.h5 file.') + + parser.add_argument('--width', dest='width', default=6, type=int, + help='Width of the mask in pixel (default: 6).') + + parser.add_argument('-o', dest='out_file_name', default='mask_infra.h5', + help="name of output file. (default: 'mask_infra.h5').") + + return parser
+ + + +
+[docs] +class Node: + """Define simple class for a node at a road (similar to overpy.Node).""" + + def __init__(self, *, lat: float = None, lon: float = None): + """Init.""" + self.lat = lat + self.lon = lon
+ + + +
+[docs] +class CoordinateSearch: + """CoordinateSearch.""" + + def __init__(self): + """Init.""" + self.search_tree = None + self.yidx = None + self.xidx = None + self.lon = None + self.lat = None + self.coord = None + +
+[docs] + def createSearchTree(self, *, coord: utils.coordinate, logger: Logger): + """Create search tree. + + Parameters + ---------- + coord: utils.coordinate + Coordinates + logger: Logger + Logging handler. + """ + self.coord = coord + logger.info(msg='create kd-tree for efficient search...') + + if self.coord.lut_y is None or self.coord.lut_x is None: + self.coord.open() + lat, lon = self.coord.read_lookup_table(print_msg=False) + self.lat = lat.ravel() + self.lon = lon.ravel() + + # create the 2D coordinate arrays for fast indexing + x = np.arange(self.coord.lut_x.shape[1]) + y = np.arange(self.coord.lut_x.shape[0]) + xx, yy = np.meshgrid(x, y) + self.xidx = xx.ravel() + self.yidx = yy.ravel() + + start_time = time.time() + self.search_tree = spatial.KDTree(data=np.array([self.lon, self.lat]).transpose()) + + logger.info(msg='... done.') + m, s = divmod(time.time() - start_time, 60) + logger.debug(msg='time used: {:02.0f} mins {:02.1f} secs.'.format(m, s))
+ + +
+[docs] + def getMeanDistanceBetweenPixels(self): + """Compute mean distance between adjacent pixels.""" + distances = self.search_tree.query([self.lon[0], self.lat[0]], k=10)[0] + mean_dist = np.mean(distances[1:]) + return mean_dist
+ + +
+[docs] + def getNearestNeighbour(self, *, node: Node): + """Query the kd-tree for the nearest neighbour. + + :param node: Node object + """ + # find nearest neighbour + dist, idx = self.search_tree.query([node.lon, node.lat]) + found_node = Node(lat=self.lat[idx], lon=self.lon[idx]) + # return index of NN in radar coordinates + return dist, (self.yidx[idx], self.xidx[idx]), found_node
+
+ + + +
+[docs] +def findLastRoadPixel(*, csearch: CoordinateSearch, cur_node: Node, prev_node: Node, dist_thrsh: float): + """Find the index of the last road pixel that is within the image extend. + + Idea: the pixel with the shortest distance to the current node of a road is not necessarily on the road, if the + current node is outside the image extend. Split the road in further linear parts and find the last road pixel + recursively that is still inside the image. + Hint: all nodes are instances from class Node + + Parameters + ---------- + csearch: CoordinateSearch + Search tree for efficient spatial search of the coordinate of a pixel in the radar image. + cur_node: Node + Current node of the road that is outside the image extend. + prev_node: Node + Previous node of the road that is inside the image extend. + dist_thrsh: float + Distance threshold for stop criterion (derived from average distance between two pixels in the image). + + Returns + ------- + node_idx: int + Node of the pixel which is the last pixel on the road inside the image. + """ + # create a new node at half of the road distance between previous and current node + mid_lat = cur_node.lat + (cur_node.lat - prev_node.lat) / 2 + mid_lon = cur_node.lon + (cur_node.lon - prev_node.lon) / 2 + mid_node = Node(lat=mid_lat, lon=mid_lon) + + dist, node_idx = csearch.getNearestNeighbour(node=mid_node)[0:2] + if dist < dist_thrsh: + return node_idx + else: + node_idx = findLastRoadPixel(csearch=csearch, cur_node=cur_node, prev_node=prev_node, dist_thrsh=dist_thrsh) + return node_idx
+ + + +
+[docs] +def euclDist(*, node1: Node, node2: Node): + """Compute the euclidean distance between two nodes.""" + return np.sqrt((node1.lat - node2.lat) ** 2 + (node1.lon - node2.lon) ** 2)
+ + + +
+[docs] +def computeLastRoadPixel(*, cur_node: Node, prev_node: Node, found_node: Node): + """Compute the location of the pixel at the border of the radar image that is part of the road. + + Parameters + ---------- + cur_node: Node + Current node of the road. + prev_node: Node + Previous node of the road. + found_node: Node + Found node of the road. + + Returns + ------- + new_lon: float + Longitude of the pixel at the border of the radar image that is part of the road. + new_lat: float + Latitude of the pixel at the border of the radar image that is part of the road. + """ + a = euclDist(node1=prev_node, node2=found_node) + b = euclDist(node1=cur_node, node2=found_node) + c = euclDist(node1=prev_node, node2=cur_node) + alpha = np.arccos((- a ** 2 + b ** 2 + c ** 2) / (2 * b * c)) + d = b / np.sin(np.pi / 2 - alpha) + new_lat = cur_node.lat + (prev_node.lat - cur_node.lat) / c * d + new_lon = cur_node.lon + (prev_node.lon - cur_node.lon) / c * d + return new_lon, new_lat
+ + + +
+[docs] +def convertToRadarCoordPolygon(*, gdf_infra: gpd.geodataframe, csearch: CoordinateSearch, logger: Logger): + """Convert Polygon to a mask in shape of radar image. + + Parameters + ---------- + gdf_infra: gpd.geodataframe + The queried infrastructures containing polygons. + csearch: CoordinateSearch + The coordinate search object. + logger: Logger + Logging handler. + + Returns + ------- + img_np: np.ndarray + Mask image. + """ + # create a new image + logger.info(msg='create mask image...') + img_pil = Image.new(mode="1", + size=(int(csearch.coord.src_metadata['LENGTH']), int(csearch.coord.src_metadata['WIDTH']))) + img_pil_draw = ImageDraw.Draw(im=img_pil) + + num_ways = gdf_infra.shape[0] + way_iter = 0 + prog_bar = ptime.progressBar(maxValue=num_ways) + + dist_thrsh = 1.3 * csearch.getMeanDistanceBetweenPixels() + lines = [geom.boundary.coords for geom in gdf_infra.geometry if geom is not None] + way_list = list() + for coo in lines: + way_list.append([Node(lat=point[1], lon=point[0]) for point in coo]) + + # plt.ion() + fig = plt.figure() + ax = fig.add_subplot() + ax.set_xlabel("lon") + ax.set_ylabel("lat") + lat, lon = csearch.coord.read_lookup_table(print_msg=False) + ax.plot([lon[0, 0], lon[-1, 0]], [lat[0, 0], lat[-1, 0]], '-k') + ax.plot([lon[0, 0], lon[0, -1]], [lat[0, 0], lat[0, -1]], '-k') + ax.plot([lon[0, -1], lon[-1, -1]], [lat[0, -1], lat[-1, -1]], '-k') + ax.plot([lon[-1, 0], lon[-1, -1]], [lat[-1, 0], lat[-1, -1]], '-k') + # ax.plot(lon.ravel(), lat.ravel(), '.k', markersize=0.5) + + while way_iter < num_ways: + way = way_list[way_iter] + poly_line_way = [] + + # perform a preliminary search to check if polygon is partly outside image extend + outside = np.zeros(len(way)) + for i in range(len(way)): + cur_node = way[i] + + # convert node coordinates (lat, lon) to image coordinates + dist, _, _ = csearch.getNearestNeighbour(node=cur_node) + + # check if node is outside the image + if dist > dist_thrsh: + outside[i] = 1 + + if np.sum(outside) == 0: # all road nodes inside image extend + for i in range(len(way)): + cur_node = way[i] + + # convert node coordinates (lat, lon) to image coordinates + dist, node_idx, found_node = csearch.getNearestNeighbour(node=cur_node) + + # Fill list of current way with node coordinates + poly_line_way.append(node_idx) + ax.plot(cur_node.lon, cur_node.lat, '*k') + ax.plot(found_node.lon, found_node.lat, 'ok') + + else: # some polygon nodes outside image extend + if np.sum(outside) == outside.size: # all nodes outside, skip + way_iter += 1 + continue + + # polygon nodes partly inside and partly outside + prev_p = outside[-2] == 1 # last point == first point (due to closed polygon). Select second last. + for i in range(outside.shape[0]): + cur_p = outside[i] == 1 + cur_node = way[i] + + # convert node coordinates (lat, lon) to image coordinates + dist, node_idx, found_node = csearch.getNearestNeighbour(node=cur_node) + + # check if transition happens + # yes: check if current point is inside or outside + # if outside: find transition point, but do not add current point + # if inside: find transition point, then add current point + # no: if point inside: add point + # if point outside: skip point + + if not (prev_p == cur_p): # transition + stored_idx = None + if i - 1 < 0: + prev_node = way[-2] + else: + prev_node = way[i - 1] + + if cur_p: # transition: in -> out + # find transition point, but do not add current point + ax.plot(cur_node.lon, cur_node.lat, '*y') + + if prev_p: # transition: out -> in + # find and add transition point, then add current point. + stored_idx = node_idx # store current point for adding it later. + ax.plot(cur_node.lon, cur_node.lat, '*r') # plot now, because variables will be overwritten + # the 'found_node' has to be computed from the last point outside, i.e. from 'prev_node' + ax.plot(found_node.lon, found_node.lat, 'or') + _, _, found_node = csearch.getNearestNeighbour(node=prev_node) + + new_lon, new_lat = computeLastRoadPixel( + cur_node=cur_node, + prev_node=prev_node, + found_node=found_node + ) + + dist, node_idx, found_node = csearch.getNearestNeighbour(node=Node(lon=new_lon, lat=new_lat)) + ax.plot(cur_node.lon, cur_node.lat, '*b') + ax.plot(found_node.lon, found_node.lat, 'ob') + ax.plot(new_lon, new_lat, '+b') + + # add the transition point + poly_line_way.append(node_idx) + if prev_p: # transition: out -> in + # transition point found and added, now add stored current point. + poly_line_way.append(stored_idx) + prev_p = cur_p # prepare for next iteration + + elif cur_p: # no transition, current point is outside -> do not add point + ax.plot(cur_node.lon, cur_node.lat, '*y') + prev_p = cur_p # prepare for next iteration + + else: # no transition, current points is inside -> add point + ax.plot(cur_node.lon, cur_node.lat, '*r') + ax.plot(found_node.lon, found_node.lat, 'or') + poly_line_way.append(node_idx) + prev_p = cur_p # prepare for next iteration + + prog_bar.update(value=way_iter + 1, every=10, suffix='{}/{} polygons'.format(way_iter + 1, num_ways)) + + # if first point is outside image, the polygon will not be closed. However, it still works to create a polygon. + img_pil_draw.polygon(poly_line_way, fill=255) + # plt.figure() + # plt.imshow(np.array(img_pil.getdata()).reshape(img_pil.size[1], img_pil.size[0]).astype(int)) + + way_iter += 1 + + img_np = np.array(img_pil.getdata()).reshape(img_pil.size[1], img_pil.size[0]).astype(int) + return img_np
+ + + +
+[docs] +def convertToRadarCoord(*, gdf_infra: gpd.geodataframe, csearch: CoordinateSearch, width: int, logger: Logger): + """Convert Polyline to a mask in shape of radar image. Apply a buffer of size 'width' in pixels. + + Parameters + ---------- + gdf_infra: gpd.geodataframe + The queried infrastructures containing polygons. + csearch: CoordinateSearch + The coordinate search object. + width: int + Width of the mask in pixel. + logger: Logger + Logging handler. + + Returns + ------- + img_np: np.ndarray + Mask image. + """ + # create a new image + logger.info(msg='create mask image...') + img_pil = Image.new(mode="1", + size=(int(csearch.coord.src_metadata['LENGTH']), int(csearch.coord.src_metadata['WIDTH']))) + img_pil_draw = ImageDraw.Draw(im=img_pil) + + num_roads = gdf_infra.shape[0] + prog_bar = ptime.progressBar(maxValue=num_roads) + + dist_thrsh = 1.3 * csearch.getMeanDistanceBetweenPixels() + lines = [ls.coords for ls in gdf_infra.geometry if ls is not None] # enables to append to list + way_list = list() + for coo in lines: + way_list.append([Node(lat=point[1], lon=point[0]) for point in coo]) + + num_ways = len(way_list) # changes during iteration + way_iter = 0 + + # plt.ion() + fig = plt.figure() + ax = fig.add_subplot() + ax.set_xlabel("lon") + ax.set_ylabel("lat") + lat, lon = csearch.coord.read_lookup_table(print_msg=False) + ax.plot([lon[0, 0], lon[-1, 0]], [lat[0, 0], lat[-1, 0]], '-k') + ax.plot([lon[0, 0], lon[0, -1]], [lat[0, 0], lat[0, -1]], '-k') + ax.plot([lon[0, -1], lon[-1, -1]], [lat[0, -1], lat[-1, -1]], '-k') + ax.plot([lon[-1, 0], lon[-1, -1]], [lat[-1, 0], lat[-1, -1]], '-k') + # ax.plot(lon.ravel(), lat.ravel(), '.k', markersize=0.5) + + while way_iter < num_ways: + way = way_list[way_iter] + poly_line_way = [] + + # perform a preliminary search to check if road is partly outside image extend + outside = np.zeros(len(way)) + for i in range(len(way)): + cur_node = way[i] + + # convert node coordinates (lat, lon) to image coordinates + dist, _, _ = csearch.getNearestNeighbour(node=cur_node) + + # check if node is outside the image + if dist > dist_thrsh: + outside[i] = 1 + + if np.sum(outside) == 0: # all road nodes inside image extend + for i in range(len(way)): + cur_node = way[i] + + # convert node coordinates (lat, lon) to image coordinates + dist, node_idx, found_node = csearch.getNearestNeighbour(node=cur_node) + + # Fill list of current way with node coordinates + poly_line_way.append(node_idx) + ax.plot(cur_node.lon, cur_node.lat, '*k') + ax.plot(found_node.lon, found_node.lat, 'ok') + + else: # some road nodes outside image extend + if np.sum(outside) == outside.size: # all nodes outside, skip + way_iter += 1 + continue + # split the way into sub parts based on in-out / out-in transition + # find first node inside the image + first_inside_idx = np.where(outside == 0)[0][0] + if first_inside_idx > 0: # this is a transition into the image + start_idx = first_inside_idx - 1 + else: + start_idx = first_inside_idx + + # find first node which is again outside the image + outside_idx = np.where(outside[first_inside_idx:] == 1)[0] + if outside_idx.size == 0: # no more transition to outside the image + stop_idx = len(way) + else: + stop_idx = outside_idx[0] + first_inside_idx + 1 + if stop_idx != len(way): # split the current way and add a new way at the end of the way_list + # to handle it later + way_list.append(way[stop_idx:]) + num_ways += 1 + + for i in range(start_idx, stop_idx): + cur_node = way[i] + + # convert node coordinates (lat, lon) to image coordinates + dist, node_idx, found_node = csearch.getNearestNeighbour(node=cur_node) + + if dist > dist_thrsh: + if i == start_idx: # there is no previous node, but a next node. + prev_node = way[i + 1] + else: + prev_node = way[i - 1] + new_lon, new_lat = computeLastRoadPixel(cur_node=cur_node, prev_node=prev_node, + found_node=found_node) + dist, node_idx, found_node = csearch.getNearestNeighbour(node=Node(lon=new_lon, lat=new_lat)) + ax.plot(cur_node.lon, cur_node.lat, '*b') + ax.plot(found_node.lon, found_node.lat, 'ob') + ax.plot(new_lon, new_lat, '+b') + else: + ax.plot(cur_node.lon, cur_node.lat, '*r') + ax.plot(found_node.lon, found_node.lat, 'or') + # Fill list of current way with node coordinates + poly_line_way.append(node_idx) + + prog_bar.update(value=way_iter + 1, every=10, suffix='{}/{} road segments'.format(way_iter + 1, num_roads)) + + img_pil_draw.line(poly_line_way, fill=255, width=width) + # img_pil_draw.polygon(poly_line_way, fill=255) + + way_iter += 1 + + img_np = np.array(img_pil.getdata()).reshape(img_pil.size[1], img_pil.size[0]).astype(int) + return img_np
+ + + +
+[docs] +def saveMask(*, work_dir: str, mask: np.ndarray, atr: dict, out_file_name: str): + """Save the mask to 'maskRoads.h5'. + + Parameters + ---------- + work_dir: str + Working directory. + mask: np.ndarray + Mask image. + atr: dict + Metadata data, e.g. from the geometryRadar.h5 file. + out_file_name: str + Output file name. + """ + # create the right attributes + ds_dict = dict() + ds_dict['mask'] = mask.transpose().astype('float32') + atr["FILE_TYPE"] = "mask" + + writefile.write(datasetDict=ds_dict, out_file=os.path.join(work_dir, out_file_name), metadata=atr)
+ + + +
+[docs] +def createMask(*, input_file: str, width: int, work_dir: str, out_file_name: str, geom_file: str, + logger: logging.Logger): + """Create a mask for the radar image from a shapefile containing lines or polygons. + + Parameters + ---------- + input_file: str + Path to input file. + width: int + Width of the mask in pixel. Applied to the lines only. + work_dir: str + Working directory. + out_file_name: str + Output file name. + geom_file: str + Path to geometryRadar.h5 file. + logger: logging.Logger + Logging handler. + """ + logger.info(msg="Start creating mask file based on openstreetmap data.") + + # get bounding box + _, _, _, coord, atr = getSpatialExtend(geom_file=geom_file, logger=logger) + + # create search tree + csearch = CoordinateSearch() + csearch.createSearchTree(coord=coord, logger=logger) + + logger.info(f"Read from input file: {input_file}") + gdf_infra = gpd.read_file(input_file) + + if gdf_infra.geometry[0].geom_type == "LineString": + mask_img = convertToRadarCoord(gdf_infra=gdf_infra, csearch=csearch, width=width, logger=logger) + + elif gdf_infra.geometry[0].geom_type == "Polygon": + mask_img = convertToRadarCoordPolygon(gdf_infra=gdf_infra, csearch=csearch, width=width, logger=logger) + else: + logger.error(msg=f"Geometry type is {gdf_infra.geometry[0].geom_type}." + f"Only 'LineString' and 'Polygon' supported!") + raise TypeError + + if '.h5' not in out_file_name: + out_file_name += ".h5" + saveMask(work_dir=work_dir, mask=mask_img, atr=atr, out_file_name=out_file_name) + + logger.info(msg="Masking finished.")
+ + + +
+[docs] +def main(iargs=None): + """Create mask from lines or polygons given in geographic coordinates (EPSG:4326). Input as shp or gpkg.""" + # check input + parser = create_parser() + inps = parser.parse_args(args=iargs) + + # initiate logger + logging_level = logging.getLevelName('DEBUG') + + log_format = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') + logger = logging.getLogger(__name__) + + current_datetime = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime()) + log_filename = f"sarvey_mask_{current_datetime}.log" + if not os.path.exists(os.path.join(os.getcwd(), "logfiles")): + os.mkdir(os.path.join(os.getcwd(), "logfiles")) + file_handler = logging.FileHandler(filename=os.path.join(os.getcwd(), "logfiles", log_filename)) + file_handler.setFormatter(log_format) + logger.addHandler(file_handler) + + console_handler = logging.StreamHandler(sys.stdout) + console_handler.setFormatter(log_format) + logger.addHandler(console_handler) + logger.setLevel(logging_level) + + if inps.work_dir is None: + work_dir = os.getcwd() + else: + work_dir = inps.work_dir + if not os.path.exists(path=work_dir): + logger.info(msg='create output folder: ' + work_dir) + os.mkdir(path=work_dir) + logger.info(msg='working directory: {}'.format(work_dir)) + + input_file = join(work_dir, inps.input_file) + out_file_name = join(work_dir, inps.out_file_name) + + createMask( + input_file=input_file, + width=inps.width, + work_dir=work_dir, + out_file_name=out_file_name, + logger=logger, + geom_file=inps.geom_file + )
+ + + +if __name__ == '__main__': + main() +
+ +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/_modules/sarvey/sarvey_osm.html b/doc/_modules/sarvey/sarvey_osm.html new file mode 100644 index 0000000..d9f9917 --- /dev/null +++ b/doc/_modules/sarvey/sarvey_osm.html @@ -0,0 +1,310 @@ + + + + + + + sarvey.sarvey_osm — SARvey 1.0.0 documentation + + + + + + + + + + + + + +
+
+
+
+ +

Source code for sarvey.sarvey_osm

+#!/usr/bin/env python
+
+# SARvey - A multitemporal InSAR time series tool for the derivation of displacements.
+#
+# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de)
+#
+# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context
+# of the SAR4Infra project with funds of the German Federal Ministry for Digital and
+# Transport and contributions from Landesamt fuer Vermessung und Geoinformation
+# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein.
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++
+# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you
+# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes.
+# This requirement extends to SARvey.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see <https://www.gnu.org/licenses/>.
+
+"""Download openstreetmap data for area of interest."""
+import argparse
+import logging
+import os
+import sys
+import time
+from os.path import join
+import geopandas as gpd
+from shapely import ops, Point
+import matplotlib
+
+from sarvey.osm_utils import runOsmQueryBridge, runOsmQuery, getSpatialExtend
+
+try:
+    matplotlib.use('TkAgg')
+except ImportError as e:
+    print(e)
+
+
+EXAMPLE = """Example:
+  sarvey_osm --geom ./geometryRadar.h5 --railway                       # download railway
+  sarvey_osm --geom ./geometryRadar.h5 --highway                       # download highway
+  sarvey_osm --geom ./geometryRadar.h5 --railway --bridge              # download railway bridge
+  sarvey_osm --geom ./geometryRadar.h5 --railway -o mask_railway.shp   # specify output path
+"""
+
+
+
+[docs] +def create_parser(): + """Create_parser.""" + parser = argparse.ArgumentParser( + description='Download transport infrastructure information from openstreetmap and store as shp-file.', + formatter_class=argparse.RawTextHelpFormatter, + epilog=EXAMPLE) + + parser.add_argument('-w', '--work_dir', dest='work_dir', default=None, + help='absolute path to working directory\n' + + '(default: current directory).') + + parser.add_argument('--geom', dest='geom_file', default=None, + help='path to existing geometryRadar.h5 file') + + parser.add_argument('--railway', dest='railway', action="store_true", default=False, + help='Set true to query railways.') + + parser.add_argument('--highway', dest='highway', action="store_true", default=False, + help='Set true to query highways.') + + parser.add_argument('--bridge', dest='bridge', action="store_true", default=False, + help='Set true to mask bridges.\n' + + 'If --railway or --highway set true, only railway/highway bridges are queried.') + + parser.add_argument('-o', dest='out_file_name', default='osm_infra.shp', + help="name of output file. (default: 'osm_infra.shp')") + + return parser
+ + + +
+[docs] +def downloadOSM(*, railway: bool, highway: bool, bridge: bool, + work_dir: str, out_file_name: str, logger: logging.Logger, geom_file: str): + """Download openstreetmap data and store to file. + + Parameters + ---------- + railway: bool + download railway data. + highway: bool + download highway data. + bridge: bool + download bridge data. + work_dir: str + working directory. + out_file_name: str + output file name. + logger: logging.Logger + logger. + geom_file: str + path to geometryRadar.h5 file. + """ + logger.info(msg="Start creating mask file based on openstreetmap data.") + + # get bounding box + ll_bbox, ur_bbox, img_ext, coord, atr = getSpatialExtend(geom_file=geom_file, logger=logger) + + # store image extend + gdf = gpd.GeoDataFrame({"geometry": gpd.geoseries.GeoSeries(img_ext)}) + gdf = gdf.dissolve().convex_hull + gdf.to_file(join(work_dir, "img_extend.gpkg")) + + # store bounding box + bbox_points = [ + Point(ll_bbox[1], ll_bbox[0]), + Point(ur_bbox[1], ll_bbox[0]), + Point(ur_bbox[1], ur_bbox[0]), + Point(ll_bbox[1], ur_bbox[0]) + ] + + gdf = gpd.GeoDataFrame({"geometry": gpd.geoseries.GeoSeries(bbox_points)}) + gdf = gdf.dissolve().convex_hull + gdf.to_file(join(work_dir, "bounding_box.gpkg")) + + if (not railway) & (not highway) & (not bridge): + logger.error(msg="No infrastructure type was specified.") + return + + if bridge: + # get requested OSM layer + query_result = runOsmQueryBridge( + ll_corner_wgs=ll_bbox, ur_corner_wgs=ur_bbox, + bridge_highway=highway, bridge_railway=railway, + logger=logger + ) + else: + type_list = list() + if railway: + type_list += ["rail"] + if highway: + type_list += ["motorway", "motorway_link", "trunk", "trunk_link"] + + # get requested OSM layer + query_result = runOsmQuery(ll_corner_wgs=ll_bbox, ur_corner_wgs=ur_bbox, + type_list=type_list, logger=logger) + + multi_line_list = list() + for way in query_result.ways: + if "area" in way.tags: + if way.tags["area"] == "yes": + logger.info('Area flag is true') + continue + else: + # keep coordinates in lat/lon. It will be needed in masking step. + coord = [[float(way.nodes[i].lon), float(way.nodes[i].lat)] for i in range(len(way.nodes))] + multi_line_list.append(coord) + + # Merge all road segments + merged_road = list(ops.linemerge(multi_line_list).geoms) + gdf = gpd.GeoDataFrame({"geometry": gpd.GeoSeries(merged_road)}) + # gdf = gdf.set_crs(crs=utm_crs) # set appropriate CRS + # todo: add attributes if required + + # todo: check ending of output file name + gdf.to_file(join(work_dir, out_file_name)) + logger.info(msg="OSM download finished.")
+ + + +
+[docs] +def main(iargs=None): + """Download openstreetmap data and store to file.""" + # check input + parser = create_parser() + inps = parser.parse_args(args=iargs) + + # initiate logger + logging_level = logging.getLevelName('DEBUG') + + log_format = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') + logger = logging.getLogger(__name__) + + current_datetime = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime()) + log_filename = f"sarvey_osm_{current_datetime}.log" + if not os.path.exists(os.path.join(os.getcwd(), "logfiles")): + os.mkdir(os.path.join(os.getcwd(), "logfiles")) + file_handler = logging.FileHandler(filename=os.path.join(os.getcwd(), "logfiles", log_filename)) + file_handler.setFormatter(log_format) + logger.addHandler(file_handler) + + console_handler = logging.StreamHandler(sys.stdout) + console_handler.setFormatter(log_format) + logger.addHandler(console_handler) + logger.setLevel(logging_level) + + if inps.work_dir is None: + work_dir = os.getcwd() + else: + work_dir = inps.work_dir + if not os.path.exists(path=work_dir): + logger.info(msg='create output folder: ' + work_dir) + os.mkdir(path=work_dir) + logger.info(msg='working directory: {}'.format(work_dir)) + + downloadOSM( + railway=inps.railway, + highway=inps.highway, + bridge=inps.bridge, + work_dir=work_dir, + out_file_name=inps.out_file_name, + logger=logger, + geom_file=inps.geom_file + )
+ + + +if __name__ == '__main__': + main() +
+ +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/_modules/sarvey/triangulation.html b/doc/_modules/sarvey/triangulation.html new file mode 100644 index 0000000..bc47975 --- /dev/null +++ b/doc/_modules/sarvey/triangulation.html @@ -0,0 +1,243 @@ + + + + + + + sarvey.triangulation — SARvey 1.0.0 documentation + + + + + + + + + + + + + +
+
+
+
+ +

Source code for sarvey.triangulation

+#!/usr/bin/env python
+
+# SARvey - A multitemporal InSAR time series tool for the derivation of displacements.
+#
+# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de)
+#
+# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context
+# of the SAR4Infra project with funds of the German Federal Ministry for Digital and
+# Transport and contributions from Landesamt fuer Vermessung und Geoinformation
+# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein.
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++
+# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you
+# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes.
+# This requirement extends to SARvey.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see <https://www.gnu.org/licenses/>.
+
+"""Triangulation module for SARvey."""
+import time
+from typing import Optional
+import numpy as np
+from scipy.spatial import Delaunay, distance_matrix, KDTree
+from scipy.sparse import lil_matrix, csr_matrix
+from scipy.sparse.csgraph import connected_components
+from logging import Logger
+
+from mintpy.utils import ptime
+
+
+
+[docs] +class PointNetworkTriangulation: + """PointNetworkTriangulation.""" + + def __init__(self, *, coord_xy: np.ndarray, coord_utmxy: Optional[np.ndarray], logger: Logger): + """Triangulate points in space based on distance. + + Parameters + ---------- + coord_xy: np.ndarray + Radar coordinates of the points. + coord_utmxy: np.ndarray + UTM coordinates of the points. + logger: Logger + Logging handler. + """ + self.coord_xy = coord_xy + num_points = self.coord_xy.shape[0] + self.logger = logger + + # create sparse matrix with dim (num_points x num_points), add 1 if connected. + # create network afterwards once. reduces time. + self.adj_mat = lil_matrix((num_points, num_points), dtype=np.bool_) + + if coord_utmxy is not None: + logger.info(msg="create distance matrix between all points...") + self.dist_mat = distance_matrix(coord_utmxy, coord_utmxy) + # todo: check out alternatives: + # scipy.spatial.KDTree.sparse_distance_matrix + else: # if only global delaunay shall be computed without memory issues + self.dist_mat = None + +
+[docs] + def getArcsFromAdjMat(self): + """Convert the adjacency matrix into a list of arcs. + + Returns + ------- + arcs: np.ndarray + List of arcs with indices of the start and end point. + """ + a = self.adj_mat.copy() + # copy entries from lower to upper triangular matrix + b = (a + a.T) + # remove entries from diagonal and lower part of matrix + arc_tmp = [[i, b.indices[b.indptr[i]:b.indptr[i + 1]]] for i in range(b.shape[0])] + arc_tmp = [[s, e_list[np.where(e_list < s)[0]]] for s, e_list in arc_tmp] + + arcs = list() + for s, e_list in arc_tmp: + for e in e_list: + arcs.append([s, e]) + arcs = np.array(arcs) + return arcs
+ + +
+[docs] + def removeLongArcs(self, *, max_dist: float): + """Remove arcs from network which are longer than given threshold. + + Parameter + --------- + max_dist: float + distance threshold on arc length in [m] + """ + mask = self.dist_mat > max_dist + self.adj_mat[mask] = False
+ + +
+[docs] + def isConnected(self): + """Check if the network is connected.""" + n_components = connected_components(csgraph=csr_matrix(self.adj_mat), directed=False, return_labels=False) + if n_components == 1: + return True + else: + return False
+ + +
+[docs] + def triangulateGlobal(self): + """Connect the points with a GLOBAL delaunay triangulation.""" + self.logger.info(msg="Triangulate points with global delaunay.") + + network = Delaunay(points=self.coord_xy) + for p1, p2, p3 in network.simplices: + self.adj_mat[p1, p2] = True + self.adj_mat[p1, p3] = True + self.adj_mat[p2, p3] = True
+ + +
+[docs] + def triangulateKnn(self, *, k: int): + """Connect points to the k-nearest neighbours.""" + self.logger.info(msg="Triangulate points with {}-nearest neighbours.".format(k)) + num_points = self.coord_xy.shape[0] + prog_bar = ptime.progressBar(maxValue=num_points) + start_time = time.time() + count = 0 + tree = KDTree(data=self.coord_xy) + + if k > num_points: + k = num_points + self.logger.info(msg="k > number of points. Connect all points with each other.") + for p1 in range(num_points): + idx = tree.query(self.coord_xy[p1, :], k)[1] + self.adj_mat[p1, idx] = True + count += 1 + prog_bar.update(value=count + 1, every=np.int16(num_points / 250), + suffix='{}/{} points triangulated'.format(count + 1, num_points + 1)) + prog_bar.close() + m, s = divmod(time.time() - start_time, 60) + self.logger.debug(msg='time used: {:02.0f} mins {:02.1f} secs.'.format(m, s))
+
+ +
+ +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/_modules/sarvey/unwrapping.html b/doc/_modules/sarvey/unwrapping.html new file mode 100644 index 0000000..4f9984c --- /dev/null +++ b/doc/_modules/sarvey/unwrapping.html @@ -0,0 +1,1174 @@ + + + + + + + sarvey.unwrapping — SARvey 1.0.0 documentation + + + + + + + + + + + + + +
+
+
+
+ +

Source code for sarvey.unwrapping

+#!/usr/bin/env python
+
+# SARvey - A multitemporal InSAR time series tool for the derivation of displacements.
+#
+# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de)
+#
+# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context
+# of the SAR4Infra project with funds of the German Federal Ministry for Digital and
+# Transport and contributions from Landesamt fuer Vermessung und Geoinformation
+# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein.
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++
+# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you
+# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes.
+# This requirement extends to SARvey.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see <https://www.gnu.org/licenses/>.
+
+"""Unwrapping module for SARvey."""
+import multiprocessing
+from os.path import join, dirname
+import time
+from typing import Union
+
+import matplotlib.pyplot as plt
+import numpy as np
+from kamui import unwrap_arbitrary
+from scipy.sparse import csr_matrix
+from scipy.sparse.csgraph import structural_rank
+from scipy.sparse.linalg import lsqr
+from scipy.optimize import minimize
+from logging import Logger
+
+from mintpy.utils import ptime
+
+import sarvey.utils as ut
+from sarvey.ifg_network import IfgNetwork
+from sarvey.objects import Network, NetworkParameter, AmplitudeImage
+
+
+
+[docs] +def objFuncTemporalCoherence(x, *args): + """Compute temporal coherence from parameters and phase. To be used as objective function for optimization. + + Parameters + ---------- + x: np.ndarray + Search space for the DEM error in a 1D grid. + args: tuple + Additional arguments: (design_mat, obs_phase, scale_vel, scale_demerr). + + Returns + ------- + 1 - gamma: float + """ + (design_mat, obs_phase, scale_vel, scale_demerr) = args + + # equalize the gradients in both directions + x[0] *= scale_demerr + x[1] *= scale_vel + + pred_phase = np.matmul(design_mat, x) + res = (obs_phase - pred_phase.T).ravel() + gamma = np.abs(np.mean(np.exp(1j * res))) + return 1 - gamma
+ + + +
+[docs] +def gridSearchTemporalCoherence(*, demerr_grid: np.ndarray, vel_grid: np.ndarray, design_mat: np.ndarray, + obs_phase: np.ndarray): + """Grid search which maximizes the temporal coherence as the objective function. + + Parameters + ---------- + demerr_grid: np.ndarray + Search space for the DEM error in a 2D grid. + vel_grid: np.ndarray + Search space for the velocity in a 2D grid. + design_mat: np.ndarray + Design matrix for estimating parameters from arc phase. + obs_phase: np.ndarray + Observed phase of the arc. + + Returns + ------- + demerr: float + estimated DEM error. + vel: float + estimated velocity. + gamma: float + estimated temporal coherence. + """ + demerr_grid_flat = demerr_grid.flatten() + vel_grid_flat = vel_grid.flatten() + gamma_flat = np.array( + [1 - objFuncTemporalCoherence(np.array([demerr_grid_flat[i], vel_grid_flat[i]]), + design_mat, obs_phase, 1, 1) + for i in range(demerr_grid_flat.shape[0])]) + gamma = gamma_flat.reshape(demerr_grid.shape) + idx_max_gamma = np.argmax(gamma_flat) + + # return demerr_grid_flat[idx_max_gamma], vel_grid_flat[idx_max_gamma], gamma_flat[idx_max_gamma] + return demerr_grid_flat[idx_max_gamma], vel_grid_flat[idx_max_gamma], gamma
+ + + +
+[docs] +def findOptimum(*, obs_phase: np.ndarray, design_mat: np.ndarray, val_range: np.ndarray): + """Find optimal value within a one dimensional search space that fits to the observed phase. + + Parameters + ---------- + obs_phase: np.ndarray + Observed phase of the arc. + design_mat: np.ndarray + Design matrix for estimating parameters from arc phase. + val_range: np.ndarray + Range of possible values for the solution. Can be either for DEM error or velocity. + + Returns + ------- + opt_val: scipy.optimize.minimize return value + gamma: float + pred_phase: np.ndarray + """ + pred_phase = design_mat[:, np.newaxis] * val_range[np.newaxis, :] # broadcasting + if len(obs_phase.shape) == 2: + # step densification + res = obs_phase[:, np.newaxis, :] - pred_phase.T + res = np.moveaxis(res, 0, 1) + res = res.reshape((pred_phase.shape[1], -1)) # combine residuals from all arcs + else: + # step consistency check + res = obs_phase - pred_phase.T + + gamma = np.abs(np.mean(np.exp(1j * res), axis=1)) + max_idx = np.argmax(gamma) + opt_val = val_range[max_idx] + return opt_val, gamma[max_idx], pred_phase[:, max_idx]
+ + + +
+[docs] +def oneDimSearchTemporalCoherence(*, demerr_range: np.ndarray, vel_range: np.ndarray, obs_phase: np.ndarray, + design_mat: np.ndarray): + """One dimensional search for maximum temporal coherence that fits the observed arc phase. + + Parameters + ---------- + demerr_range: np.ndarray + Search space for the DEM error in a 1D grid. + vel_range: np.ndarray + Search space for the velocity in a 1D grid. + design_mat: np.ndarray + Design matrix for estimating parameters from arc phase. + obs_phase: np.ndarray + Observed phase of the arc. + + Returns + ------- + demerr: float + vel: float + gamma: float + """ + demerr, gamma_demerr, pred_phase_demerr = findOptimum( + obs_phase=obs_phase, + design_mat=design_mat[:, 0], + val_range=demerr_range + ) + + vel, gamma_vel, pred_phase_vel = findOptimum( + obs_phase=obs_phase, + design_mat=design_mat[:, 1], + val_range=vel_range + ) + + if gamma_vel > gamma_demerr: + demerr, gamma_demerr, pred_phase_demerr = findOptimum( + obs_phase=obs_phase - pred_phase_vel, + design_mat=design_mat[:, 0], + val_range=demerr_range + ) + vel, gamma_vel, pred_phase_vel = findOptimum( + obs_phase=obs_phase - pred_phase_demerr, + design_mat=design_mat[:, 1], + val_range=vel_range + ) + else: + vel, gamma_vel, pred_phase_vel = findOptimum( + obs_phase=obs_phase - pred_phase_demerr, + design_mat=design_mat[:, 1], + val_range=vel_range + ) + demerr, gamma_demerr, pred_phase_demerr = findOptimum( + obs_phase=obs_phase - pred_phase_vel, + design_mat=design_mat[:, 0], + val_range=demerr_range + ) + + # improve initial estimate with gradient descent approach + scale_demerr = demerr_range.max() + scale_vel = vel_range.max() + + demerr, vel, gamma = gradientSearchTemporalCoherence( + scale_vel=scale_vel, + scale_demerr=scale_demerr, + obs_phase=obs_phase, + design_mat=design_mat, + x0=np.array([demerr / scale_demerr, + vel / scale_vel]).T + ) + + pred_phase = np.matmul(design_mat, np.array([demerr, vel])) + res = (obs_phase - pred_phase.T).ravel() + gamma = np.abs(np.mean(np.exp(1j * res))) + return demerr, vel, gamma
+ + + +
+[docs] +def gradientSearchTemporalCoherence(*, scale_vel: float, scale_demerr: float, obs_phase: np.ndarray, + design_mat: np.ndarray, x0: np.ndarray): + """GradientSearchTemporalCoherence. + + Parameters + ---------- + scale_demerr: float + Scaling factor for DEM error to equalize the axis of the search space. + scale_vel: float + Scaling factor for velocity to equalize the axis of the search space. + design_mat: np.ndarray + Design matrix for estimating parameters from arc phase. + obs_phase: np.ndarray + Observed phase of the arc. + x0: np.ndarray + Initial values for optimization. + + Returns + ------- + demerr: float + vel: float + gamma: float + """ + opt_res = minimize( + objFuncTemporalCoherence, + x0, + args=(design_mat, obs_phase, scale_vel, scale_demerr), + bounds=((-1, 1), (-1, 1)), + method='L-BFGS-B' + ) + gamma = 1 - opt_res.fun + demerr = opt_res.x[0] * scale_demerr + vel = opt_res.x[1] * scale_vel + return demerr, vel, gamma
+ + + +
+[docs] +def launchAmbiguityFunctionSearch(parameters: tuple): + """Wrap for launching ambiguity function for temporal unwrapping in parallel. + + Parameters + ---------- + parameters: tuple + Arguments for temporal unwrapping in parallel. + + Returns + ------- + arc_idx_range: np.ndarray + demerr: np.ndarray + vel: np.ndarray + gamma: np.ndarray + """ + (arc_idx_range, num_arcs, phase, slant_range, loc_inc, ifg_net_obj, wavelength, velocity_bound, demerr_bound, + num_samples) = parameters + + demerr = np.zeros((num_arcs, 1), dtype=np.float32) + vel = np.zeros((num_arcs, 1), dtype=np.float32) + gamma = np.zeros((num_arcs, 1), dtype=np.float32) + + design_mat = np.zeros((ifg_net_obj.num_ifgs, 2), dtype=np.float32) + + demerr_range = np.linspace(-demerr_bound, demerr_bound, num_samples) + vel_range = np.linspace(-velocity_bound, velocity_bound, num_samples) + + # prog_bar = ptime.progressBar(maxValue=num_arcs) + + factor = 4 * np.pi / wavelength + + for k in range(num_arcs): + design_mat[:, 0] = factor * ifg_net_obj.pbase_ifg / (slant_range[k] * np.sin(loc_inc[k])) + design_mat[:, 1] = factor * ifg_net_obj.tbase_ifg + + demerr[k], vel[k], gamma[k] = oneDimSearchTemporalCoherence( + demerr_range=demerr_range, + vel_range=vel_range, + obs_phase=phase[k, :], + design_mat=design_mat + ) + + return arc_idx_range, demerr, vel, gamma
+ + + +
+[docs] +def temporalUnwrapping(*, ifg_net_obj: IfgNetwork, net_obj: Network, wavelength: float, velocity_bound: float, + demerr_bound: float, num_samples: int, num_cores: int = 1, logger: Logger) -> \ + tuple[np.ndarray, np.ndarray, np.ndarray]: + """Solve ambiguities for every arc in spatial Network object. + + Parameters + ---------- + ifg_net_obj: IfgNetwork + The IfgNetwork object. + net_obj: Network + The Network object. + wavelength: float + The wavelength. + velocity_bound: float + The velocity bound. + demerr_bound: float + The DEM error bound. + num_samples: int + The number of samples for the search space. + num_cores: int + Number of cores to be used. Default is 1. + logger: Logger + Logging handler. + + Returns + ------- + demerr: np.ndarray + vel: np.ndarray + gamma: np.ndarray + """ + msg = "#" * 10 + msg += " TEMPORAL UNWRAPPING: AMBIGUITY FUNCTION " + msg += "#" * 10 + logger.info(msg=msg) + + start_time = time.time() + + if num_cores == 1: + args = ( + np.arange(net_obj.num_arcs), net_obj.num_arcs, net_obj.phase, + net_obj.slant_range, net_obj.loc_inc, ifg_net_obj, wavelength, velocity_bound, demerr_bound, num_samples) + arc_idx_range, demerr, vel, gamma = launchAmbiguityFunctionSearch(parameters=args) + else: + logger.info(msg="start parallel processing with {} cores.".format(num_cores)) + pool = multiprocessing.Pool(processes=num_cores) + + demerr = np.zeros((net_obj.num_arcs, 1), dtype=np.float32) + vel = np.zeros((net_obj.num_arcs, 1), dtype=np.float32) + gamma = np.zeros((net_obj.num_arcs, 1), dtype=np.float32) + + num_cores = net_obj.num_arcs if num_cores > net_obj.num_arcs else num_cores # avoids having more samples then + # cores + idx = ut.splitDatasetForParallelProcessing(num_samples=net_obj.num_arcs, num_cores=num_cores) + + args = [( + idx_range, + idx_range.shape[0], + net_obj.phase[idx_range, :], + net_obj.slant_range[idx_range], + net_obj.loc_inc[idx_range], + ifg_net_obj, + wavelength, + velocity_bound, + demerr_bound, + num_samples) for idx_range in idx] + + results = pool.map(func=launchAmbiguityFunctionSearch, iterable=args) + + # retrieve results + for i, demerr_i, vel_i, gamma_i in results: + demerr[i] = demerr_i + vel[i] = vel_i + gamma[i] = gamma_i + + m, s = divmod(time.time() - start_time, 60) + logger.info(msg="Finished temporal unwrapping.") + logger.debug(msg='time used: {:02.0f} mins {:02.1f} secs.'.format(m, s)) + return demerr, vel, gamma
+ + + +
+[docs] +def launchSpatialUnwrapping(parameters: tuple) -> tuple[np.ndarray, np.ndarray]: + """LaunchSpatialUnwrapping. + + Parameters + ---------- + parameters: tuple + idx_range, num_ifgs, num_points, edges, phase + + Returns + ------- + idx_range: np.ndarray + unw_phase: np.ndarray + """ + # Unpack the parameters + (idx_range, num_ifgs, num_points, method, edges, phase) = parameters + + prog_bar = ptime.progressBar(maxValue=num_ifgs) + + unw_phase = np.zeros((num_points, num_ifgs), dtype=np.float32) + + # Perform the PUMA phase unwrapping + for i in range(num_ifgs): + if method == "puma": + unw_phase[:, i] = unwrap_arbitrary( + psi=phase[:, i], + edges=edges, + simplices=None, + method="gc", + period=2*np.pi, + start_i=0, + p=0.2 + ) + else: + unw_phase[:, i] = unwrap_arbitrary( + psi=phase[:, i], + edges=edges, + simplices=None, # todo: compute simplices for ILP + method="ilp", + period=2*np.pi, + start_i=0, + ) + prog_bar.update(value=i + 1, every=1, + suffix='{}/{} ifgs unwrapped. '.format(i + 1, num_ifgs)) + + unw_phase = unw_phase - np.mean(unw_phase, axis=0) + return idx_range, unw_phase
+ + + +
+[docs] +def spatialUnwrapping(*, num_ifgs: int, num_points: int, phase: np.ndarray, edges: np.ndarray, method: str, + num_cores: int, logger: Logger): + """Spatial unwrapping of interferograms for a set of points. + + Parameters + ---------- + num_ifgs: int + Number of interferograms. + num_points: int + Number of points. + phase: np.ndarray + Phase of the interferograms at the points. + edges: np.ndarray + Edges/arcs of the graph. + method: str + Method for spatial unwrapping (puma or ilp). + num_cores: int + Number of cores to be used in multiprocessing. + logger: Logger + Logging handler. + + Returns + ------- + unw_phase: np.ndarray + Unwrapped phase of the interferograms at the points. + """ + msg = "#" * 10 + msg += f" SPATIAL UNWRAPPING: {method} " + msg += "#" * 10 + logger.info(msg=msg) + + start_time = time.time() + + if num_cores == 1: + parameters = ( + np.arange(num_ifgs), + num_ifgs, + num_points, + method, + edges, + phase + ) + idx_range, unw_phase = launchSpatialUnwrapping(parameters=parameters) + else: + logger.info(msg="start parallel processing with {} cores.".format(num_cores)) + pool = multiprocessing.Pool(processes=num_cores) + + unw_phase = np.zeros((num_points, num_ifgs), dtype=np.float32) + num_cores = num_ifgs if num_cores > num_ifgs else num_cores + # avoids having more samples than cores + idx = ut.splitDatasetForParallelProcessing(num_samples=num_ifgs, num_cores=num_cores) + + args = [( + idx_range, + idx_range.shape[0], + num_points, + method, + edges, + phase[:, idx_range]) for idx_range in idx] + results = pool.map(func=launchSpatialUnwrapping, iterable=args) + + # retrieve results + for i, phase in results: + unw_phase[:, i] = phase + + m, s = divmod(time.time() - start_time, 60) + logger.debug(msg='time used: {:02.0f} mins {:02.1f} secs.'.format(m, s)) + + return unw_phase
+ + + +
+[docs] +def spatialParameterIntegrationIterative(*, + val_arcs: np.ndarray, + all_arcs: np.ndarray, + coord_xy: np.ndarray, + all_weights: np.ndarray, + spatial_ref_idx: int = 0, + res_tol: float = 1e-3, + max_rm_fraction: float = 0.001, + logger: Logger): + """Unwrapping double-difference arc parameters spatially. + + The parameters at the arcs are integrated spatially to the points. The integration is done iteratively using + least-squares by removing the arcs with the highest residuals in each iteration. + The integration stops when the sum of the residuals is below a threshold. + Function is adopted from StaMPS software (Hooper et al., 2007). + + Parameters + ---------- + val_arcs: np.ndarray + Value at the arcs (e.g. DEM error, velocity). + all_arcs: np.ndarray + Arcs of the spatial network. + coord_xy: np.ndarray + Radar coordinates of the points in the spatial network. + all_weights: np.ndarray + Weights of the arcs (e.g. temporal coherence from temporal unwrapping) + spatial_ref_idx: int + Index of the spatial reference point (default = 0). Can be arbitrary. + res_tol: float + Threshold on the sum of the residual phase (default = 1e-3). Convergence criterion. + max_rm_fraction: float + Fraction of the arcs that are removed in each iteration (default = 0.001). + logger: Logger + Logging handler + + Returns + ------- + val_points: np.ndarray + Estimated parameters at the points resulting from the integration of the parameters at the arcs. + """ + all_arcs = np.array(all_arcs) + num_points = coord_xy.shape[0] + num_arcs = all_arcs.shape[0] + + # create design matrix + a = np.zeros((num_arcs, num_points)) + for i in range(num_arcs): + a[i, all_arcs[i][0]] = 1 + a[i, all_arcs[i][1]] = -1 + + # find the number of arcs per point + arcs_per_point = np.zeros(num_points, ) + + for i in range(num_points): + arcs_per_point[i] = np.where(a[:, i] != 0)[0].shape[0] + + # remove reference point from design matrix + all_a = csr_matrix(all_weights * np.delete(a, spatial_ref_idx, 1)) + + # don't even start if the network is not connected + if structural_rank(all_a) < all_a.shape[1]: + logger.exception(msg="Spatial point network is not connected. Phase cannot be unwrapped!") + raise Exception + + # set n_bad to maximum fraction of bad edges that can be removed + n_bad = np.ceil(num_arcs * max_rm_fraction).astype(np.int16) + + # initialize output + val_points = np.zeros((num_points,)) + points_idx = np.ones((num_points,), dtype=bool) + points_idx[spatial_ref_idx] = False + x_hat = np.zeros((num_points - 1,)) + + start_time = time.time() + + arcs = all_arcs + obv_vec = val_arcs.reshape(-1, ) * all_weights.reshape(-1, ) + a = all_a + weights = all_weights + num_arcs = obv_vec.size + + r = None + num_arcs_save = None + arcs_save = None + a_save = None + weights_save = None + obv_vec_save = None + i = 0 + while True: + if structural_rank(a) >= a.shape[1]: + x_hat[:] = lsqr(a, obv_vec)[0] + + # store the current version of variables, being able to go back to previous iteration if too many arcs + # removed + a_save = a + obv_vec_save = obv_vec + weights_save = weights + arcs_save = arcs + num_arcs_save = num_arcs + + # compute residuals + r = obv_vec - np.matmul(a.toarray(), x_hat) + + else: # network is not connected anymore, remove less psPoints and try again + # x_hat = np.linalg.lstsq(a_save, obv_vec_save, rcond=None)[0] # unclear: I think it is not necessary to + # recompute the inversion. + n_bad = np.ceil(n_bad / 10).astype(np.int16) # remove less point + + if np.all(np.abs(r) < res_tol): + break + else: + # drop arcs with the highest residuals, but only drop max one arc per point + ps_w_dropped_arc = np.zeros((num_points,)) + good_arc_idx = np.ones((num_arcs_save,), dtype=bool) + r_sort_idx = np.abs(r).argsort()[::-1] # descending order, makes for loop easier + + for j in range(n_bad): # remove arcs one by one + bad_arc_idx = r_sort_idx[j] + ps_idx0 = arcs_save[bad_arc_idx][0] + ps_idx1 = arcs_save[bad_arc_idx][1] + if (ps_w_dropped_arc[ps_idx0] == 0) and (ps_w_dropped_arc[ + ps_idx1] == 0): # if arc not already dropped for either + # point of current arc drop current arc + good_arc_idx[bad_arc_idx] = False + # mark both psPoints from the arc as having an arc dropped + ps_w_dropped_arc[ps_idx0] = 1 + ps_w_dropped_arc[ps_idx1] = 1 + + # update all variables for next iteration + arcs = arcs_save[good_arc_idx, :] + obv_vec = obv_vec_save[good_arc_idx] + a = a_save[good_arc_idx, :] + weights = weights_save[good_arc_idx] + num_arcs = obv_vec.size + + i += 1 + + val_points[points_idx] = x_hat + + m, s = divmod(time.time() - start_time, 60) + logger.debug(msg='time used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s)) + + return val_points
+ + + +
+[docs] +def spatialParameterIntegration(*, + val_arcs: np.ndarray, + arcs: np.ndarray, + coord_xy: np.ndarray, + weights: np.ndarray, + spatial_ref_idx: int = 0, + logger: Logger): + """Unwrapping double-difference arc parameters spatially. + + The parameters at the arcs are integrated spatially to the points. The integration is done using least-squares. + + Parameters + ---------- + val_arcs: np.ndarray + Value at the arcs (e.g. DEM error, velocity). + arcs: np.ndarray + Arcs of the spatial network. + coord_xy: np.ndarray + Radar coordinates of the points in the spatial network. + weights: np.ndarray + Weights of the arcs (e.g. temporal coherence from temporal unwrapping) + spatial_ref_idx: int + Index of the spatial reference point (default = 0). Can be arbitrary. + logger: Logger + Logging handler + + Returns + ------- + val_points: np.ndarray + Estimated parameters at the points resulting from the integration of the parameters at the arcs. + """ + arcs = np.array(arcs) + num_points = coord_xy.shape[0] + num_arcs = arcs.shape[0] + + # create design matrix + design_mat = np.zeros((num_arcs, num_points)) + for i in range(num_arcs): + design_mat[i, arcs[i][0]] = 1 + design_mat[i, arcs[i][1]] = -1 + + # remove reference point from design matrix + design_mat = csr_matrix(weights * np.delete(design_mat, spatial_ref_idx, 1)) + + # don't even start if the network is not connected + if structural_rank(design_mat) < design_mat.shape[1]: + raise Exception("Spatial point network is not connected. Cannot integrate parameters spatially!") + + start_time = time.time() + + obv_vec = val_arcs.reshape(-1, ) * weights.reshape(-1, ) + + x_hat = lsqr(design_mat, obv_vec)[0] + + m, s = divmod(time.time() - start_time, 60) + logger.debug(msg='time used: {:02.0f} mins {:02.1f} secs.'.format(m, s)) + + val_points = np.zeros((num_points,)) + points_idx = np.ones((num_points,), dtype=bool) + points_idx[spatial_ref_idx] = False + val_points[points_idx] = x_hat + + return val_points
+ + + +
+[docs] +def computeNumArcsPerPoints(*, net_obj: Network, point_id: np.ndarray, + logger: Logger) -> tuple[np.ndarray, np.ndarray]: + """Remove Points with less than specified number of arcs. + + Parameters + ---------- + net_obj: Network + The spatial Network object. + point_id: np.ndarray + ID of the points in the network. + logger: Logger + Logging handler. + + Returns + ------- + design_mat: np.ndarray + Design matrix of the spatial network + arcs_per_point: np.ndarray + Number of arcs that each point is connected with. + """ + logger.info(msg="Removal of arcs and PSC that cannot be tested.") + + num_points = point_id.shape[0] + + # create design matrix + design_mat = np.zeros((net_obj.num_arcs, num_points)) + for i in range(net_obj.num_arcs): + design_mat[i, net_obj.arcs[i][0]] = 1 + design_mat[i, net_obj.arcs[i][1]] = -1 + + # find the number of arcs per point + arcs_per_point = np.zeros(num_points, ) + + for i in range(num_points): + arcs_per_point[i] = np.where(design_mat[:, i] != 0)[0].shape[0] + + return design_mat, arcs_per_point
+ + + +
+[docs] +def computeAvgCoherencePerPoint(*, net_obj: Network, point_id: np.ndarray, logger: Logger) -> np.ndarray: + """Compute the average coherence from all arcs that a point is connected with. Used to remove incoherent points. + + Parameters + ---------- + net_obj: Network + The Network object. + point_id: np.ndarray + ID of the points in the network. + logger: Logger + Logging handler. + + Returns + ------- + mean_gamma_point: np.ndarray + Average coherence per point + """ + logger.info(msg="Removal of points whose arcs are incoherent in average.") + + num_points = point_id.shape[0] + + # create design matrix + a = np.zeros((net_obj.num_arcs, num_points)) + for i in range(net_obj.num_arcs): + a[i, net_obj.arcs[i][0]] = net_obj.gamma[i] + a[i, net_obj.arcs[i][1]] = net_obj.gamma[i] + + a[a == 0] = np.nan + mean_gamma_point = np.nanmean(a, axis=0) + + return mean_gamma_point
+ + + +
+[docs] +def removeArcsByPointMask(*, net_obj: Union[Network, NetworkParameter], point_id: np.ndarray, coord_xy: np.ndarray, + p_mask: np.ndarray, design_mat: np.ndarray, + logger: Logger) -> tuple[Network, np.ndarray, np.ndarray, np.ndarray]: + """Remove all entries related to the arc observations connected to the points which have a False value in p_mask. + + Parameters + ---------- + net_obj: Network + The Network object. + point_id: np.ndarray + ID of the points in the network. + coord_xy: np.ndarray + Radar coordinates of the points in the spatial network. + p_mask: np.ndarray + Boolean mask with True for points to keep, and False for points to remove. + design_mat: np.ndarray + Design matrix describing the relation between arcs and points. + logger: Logger + Logging handler. + + Returns + ------- + net_obj: Network + Network object without the removed arcs and points. + point_id: np.ndarray + ID of the points in the network without the removed points. + coord_xy: np.ndarray + Radar coordinates of the points in the spatial network without the removed points. + design_mat: np.ndarray + Design matrix describing the relation between arcs and points without the removed points and arcs. + """ + # find respective arcs + a_idx = list() + for p_idx in np.where(~p_mask)[0]: + a_idx.append(np.where(design_mat[:, p_idx] != 0)[0]) + + if len(a_idx) != 0: + a_idx = np.hstack(a_idx) + a_mask = np.ones((net_obj.num_arcs,), dtype=np.bool_) + a_mask[a_idx] = False + net_obj.removeArcs(mask=a_mask) + design_mat = design_mat[a_mask, :] + else: + a_idx = np.array(a_idx) # so I can check the size + + # remove psPoints + point_id = point_id[p_mask] + design_mat = design_mat[:, p_mask] + coord_xy = coord_xy[p_mask, :] + + # beside removing the arcs in "arcs", the tuple indices have to be changed to make them fit to new point indices + for p_idx in np.sort(np.where(~p_mask)[0])[::-1]: + net_obj.arcs[np.where((net_obj.arcs[:, 0] > p_idx)), 0] -= 1 + net_obj.arcs[np.where((net_obj.arcs[:, 1] > p_idx)), 1] -= 1 + + logger.info(msg="Removed {} arc(s) connected to the removed point(s)".format(a_idx.size)) + return net_obj, point_id, coord_xy, design_mat
+ + + +
+[docs] +def removeGrossOutliers(*, net_obj: Network, point_id: np.ndarray, coord_xy: np.ndarray, min_num_arc: int = 3, + quality_thrsh: float = 0.0, + logger: Logger) -> tuple[Network, np.ndarray, np.ndarray, np.ndarray]: + """Remove both gross outliers which have many low quality arcs and points which are not well connected. + + Parameters + ---------- + net_obj: Network + The spatial Network object. + point_id: np.ndarray + ID of the points in the network. + coord_xy: np.ndarray + Radar coordinates of the points in the spatial network. + min_num_arc: int + Threshold on the minimal number of arcs per point. Default = 3. + quality_thrsh: float + Threshold on the temporal coherence of the arcs. Default = 0.0. + logger: Logger + Logging handler. + + Returns + ------- + net_obj: Network + Network object without the removed arcs and points. + point_id: np.ndarray + ID of the points in the network without the removed points. + coord_xy: np.ndarray + Radar coordinates of the points in the spatial network without the removed points. + a: np.ndarray + Design matrix describing the relation between arcs and points without the removed points and arcs. + """ + logger.info(msg="Detect points with low quality arcs (mean): < {}".format(quality_thrsh)) + mean_gamma_point = computeAvgCoherencePerPoint(net_obj=net_obj, + point_id=point_id, logger=logger) + # not yet removed, because arcs are removed separately + p_mask_mean_coh = (mean_gamma_point >= quality_thrsh).ravel() + logger.info(msg="Detected {} point(s) with mean coherence of all connected arcs < {} ".format( + p_mask_mean_coh[~p_mask_mean_coh].shape[0], quality_thrsh)) + + logger.info(msg="Removal of low quality arcs: < {}".format(quality_thrsh)) + a_mask = (net_obj.gamma >= quality_thrsh).ravel() + logger.info(msg="Removed {} arc(s)".format(a_mask[~a_mask].shape[0])) + net_obj.removeArcs(mask=a_mask) + + design_mat, arcs_per_point = computeNumArcsPerPoints(net_obj=net_obj, point_id=point_id, logger=logger) + + p_mask_num_arcs = (arcs_per_point >= min_num_arc).ravel() + logger.info(msg="Detected {} point(s) with less than {} arcs".format(p_mask_num_arcs[~p_mask_num_arcs].shape[0], + min_num_arc)) + + # remove them jointly + p_mask = p_mask_num_arcs & p_mask_mean_coh + logger.info(msg="Remove {} point(s)".format(p_mask[~p_mask].shape[0])) + net_obj, point_id, coord_xy, design_mat = removeArcsByPointMask(net_obj=net_obj, point_id=point_id, + coord_xy=coord_xy, p_mask=p_mask, + design_mat=design_mat, logger=logger) + return net_obj, point_id, coord_xy, design_mat
+ + + +
+[docs] +def parameterBasedNoisyPointRemoval(*, net_par_obj: NetworkParameter, point_id: np.ndarray, coord_xy: np.ndarray, + design_mat: np.ndarray, rmse_thrsh: float = 0.02, num_points_remove: int = 1, + bmap_obj: AmplitudeImage = None, bool_plot: bool = False, + logger: Logger): + """Remove Points during spatial integration step if residuals at many connected arcs are high. + + The idea is similar to outlier removal in DePSI, but without hypothesis testing. + It can be used as a preprocessing step to spatial integration. + The points are removed based on the RMSE computed from the residuals of the parameters (DEM error, velocity) per + arc. The point with the highest RMSE is removed in each iteration. The process stops when the maximum RMSE is below + a threshold. + + + Parameters + ---------- + net_par_obj: NetworkParameter + The spatial NetworkParameter object containing the parameters estimates at each arc. + point_id: np.ndarray + ID of the points in the network. + coord_xy: np.ndarray + Radar coordinates of the points in the spatial network. + design_mat: np.ndarray + Design matrix describing the relation between arcs and points. + rmse_thrsh: float + Threshold for the RMSE of the residuals per point. Default = 0.02. + num_points_remove: int + Number of points to remove in each iteration. Default = 1. + bmap_obj: AmplitudeImage + Basemap object for plotting. Default = None. + bool_plot: bool + Plot the RMSE per point. Default = False. + logger: Logger + Logging handler. + + Returns + ------- + spatial_ref_id: int + ID of the spatial reference point. + point_id: np.ndarray + ID of the points in the network without the removed points. + net_par_obj: NetworkParameter + The NetworkParameter object without the removed points. + """ + msg = "#" * 10 + msg += " NOISY POINT REMOVAL BASED ON ARC PARAMETERS " + msg += "#" * 10 + logger.info(msg=msg) + + num_points = point_id.shape[0] + + logger.info(msg="Selection of the reference PSC") + # select one of the two pixels which are connected via the arc with the highest quality + spatial_ref_idx = np.where(design_mat[np.argmax(net_par_obj.gamma), :] != 0)[0][0] + coord_xy = np.delete(arr=coord_xy, obj=spatial_ref_idx, axis=0) + spatial_ref_id = point_id[spatial_ref_idx] + point_id = np.delete(arr=point_id, obj=spatial_ref_idx, axis=0) + num_points -= 1 + + # remove reference point from design matrix + design_mat = net_par_obj.gamma * np.delete(arr=design_mat, obj=spatial_ref_idx, axis=1) + + logger.info(msg="Spatial integration to detect noisy point") + start_time = time.time() + + it_count = 0 + while True: + logger.info(msg="ITERATION: {}".format(it_count)) + design_mat = csr_matrix(design_mat) + + if structural_rank(design_mat) < design_mat.shape[1]: + logger.error(msg="Singular normal matrix. Network is no longer connected!") + # point_id = np.sort(np.hstack([spatial_ref_id, point_id])) + # return spatial_ref_id, point_id, net_par_obj + raise ValueError + # demerr + obv_vec = net_par_obj.demerr.reshape(-1, ) + demerr_points = lsqr(design_mat.toarray(), obv_vec * net_par_obj.gamma.reshape(-1, ))[0] + r_demerr = obv_vec - np.matmul(design_mat.toarray(), demerr_points) + + # vel + obv_vec = net_par_obj.vel.reshape(-1, ) + vel_points = lsqr(design_mat.toarray(), obv_vec * net_par_obj.gamma.reshape(-1, ))[0] + r_vel = obv_vec - np.matmul(design_mat.toarray(), vel_points) + + rmse_demerr = np.zeros((num_points,)) + rmse_vel = np.zeros((num_points,)) + for p in range(num_points): + r_mask = design_mat[:, p].toarray() != 0 + rmse_demerr[p] = np.sqrt(np.mean(r_demerr[r_mask.ravel()].ravel() ** 2)) + rmse_vel[p] = np.sqrt(np.mean(r_vel[r_mask.ravel()].ravel() ** 2)) + + rmse = rmse_vel.copy() + max_rmse = np.max(rmse.ravel()) + logger.info(msg="Maximum RMSE DEM correction: {:.2f} m".format(np.max(rmse_demerr.ravel()))) + logger.info(msg="Maximum RMSE velocity: {:.4f} m / year".format(np.max(rmse_vel.ravel()))) + + if bool_plot: + # vel + ax = bmap_obj.plot(logger=logger) + sc = ax.scatter(coord_xy[:, 1], coord_xy[:, 0], c=rmse_vel * 1000, s=3.5, + cmap=plt.cm.get_cmap("autumn_r"), vmin=0, vmax=rmse_thrsh * 1000) + plt.colorbar(sc, pad=0.03, shrink=0.5) + ax.set_title("{}. iteration\nmean velocity - RMSE per point in [mm / year]".format(it_count)) + fig = ax.get_figure() + plt.tight_layout() + fig.savefig(join(dirname(net_par_obj.file_path), "pic", f"step_1_rmse_vel_{it_count}th_iter.png"), + dpi=300) + plt.close(fig) + + # demerr + ax = bmap_obj.plot(logger=logger) + sc = ax.scatter(coord_xy[:, 1], coord_xy[:, 0], c=rmse_demerr, s=3.5, + cmap=plt.cm.get_cmap("autumn_r")) + plt.colorbar(sc, pad=0.03, shrink=0.5) + ax.set_title("{}. iteration\nDEM correction - RMSE per point in [m]".format(it_count)) + fig = ax.get_figure() + plt.tight_layout() + fig.savefig(join(dirname(net_par_obj.file_path), "pic", + f"step_1_rmse_dem_correction_{it_count}th_iter.png"), + dpi=300) + plt.close(fig) + + if max_rmse <= rmse_thrsh: + logger.info(msg="No noisy pixels detected.") + break + + # remove point with highest rmse + p_mask = np.ones((num_points,), dtype=np.bool_) + p_mask[np.argsort(rmse)[::-1][:num_points_remove]] = False # see description of function removeArcsByPointMask + net_par_obj, point_id, coord_xy, design_mat = removeArcsByPointMask(net_obj=net_par_obj, point_id=point_id, + coord_xy=coord_xy, p_mask=p_mask, + design_mat=design_mat.toarray(), + logger=logger) + num_points -= num_points_remove + it_count += 1 + + m, s = divmod(time.time() - start_time, 60) + logger.debug(msg='time used: {:02.0f} mins {:02.1f} secs.'.format(m, s)) + + # add spatialRefIdx back to point_id + point_id = np.sort(np.hstack([spatial_ref_id, point_id])) + return spatial_ref_id, point_id, net_par_obj
+ +
+ +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/_modules/sarvey/utils.html b/doc/_modules/sarvey/utils.html new file mode 100644 index 0000000..0a9dd97 --- /dev/null +++ b/doc/_modules/sarvey/utils.html @@ -0,0 +1,994 @@ + + + + + + + sarvey.utils — SARvey 1.0.0 documentation + + + + + + + + + + + + + +
+
+
+
+ +

Source code for sarvey.utils

+#!/usr/bin/env python
+
+# SARvey - A multitemporal InSAR time series tool for the derivation of displacements.
+#
+# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de)
+#
+# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context
+# of the SAR4Infra project with funds of the German Federal Ministry for Digital and
+# Transport and contributions from Landesamt fuer Vermessung und Geoinformation
+# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein.
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++
+# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you
+# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes.
+# This requirement extends to SARvey.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see <https://www.gnu.org/licenses/>.
+
+"""Utils module for SARvey."""
+import multiprocessing
+import time
+from os.path import exists, join
+
+import numpy as np
+from scipy.sparse.linalg import lsqr
+from typing import Union
+from logging import Logger
+
+from mintpy.utils import ptime
+
+from sarvey.objects import Points, NetworkParameter, Network, BaseStack, AmplitudeImage
+from sarvey.ifg_network import IfgNetwork
+
+
+
+[docs] +def convertBboxToBlock(*, bbox: tuple): + """ConvertBboxToBlock. read box and write2hdf5_block block have different definitions.""" + block = None + if len(bbox) == 4: + block = (bbox[1], bbox[3], bbox[0], bbox[2]) + if len(bbox) == 6: + block = (bbox[2], bbox[5], bbox[1], bbox[4], bbox[0], bbox[3]) + return block
+ + + +
+[docs] +def invertIfgNetwork(*, phase: np.ndarray, num_points: int, ifg_net_obj: IfgNetwork, num_cores: int, ref_idx: int, + logger: Logger): + """Wrap the ifg network inversion running in parallel. + + Parameters + ---------- + phase: np.ndarray + interferometric phases of the points. + num_points: int + number of points. + ifg_net_obj: IfgNetwork + instance of class IfgNetwork. + num_cores: int + number of cores to use for multiprocessing. + ref_idx: int + index of temporal reference date for interferogram network inversion. + logger: Logger + logging handler + + Returns + ------- + phase_ts: np.ndarray + inverted phase time series of the points. + """ + msg = "#" * 10 + msg += " INVERT IFG NETWORK " + msg += "#" * 10 + logger.info(msg=msg) + + start_time = time.time() + design_mat = ifg_net_obj.getDesignMatrix() + + if num_cores == 1: + args = (np.arange(num_points), num_points, phase, design_mat, ifg_net_obj.num_images, ref_idx) + idx_range, phase_ts = launchInvertIfgNetwork(parameters=args) + else: + # use only 10 percent of the cores, because scipy.sparse.linalg.lsqr is already running in parallel + num_cores = int(np.floor(num_cores / 10)) + logger.info(msg="start parallel processing with {} cores.".format(num_cores)) + pool = multiprocessing.Pool(processes=num_cores) + + phase_ts = np.zeros((num_points, ifg_net_obj.num_images), dtype=np.float32) + + num_cores = num_points if num_cores > num_points else num_cores # avoids having more samples than cores + idx = splitDatasetForParallelProcessing(num_samples=num_points, num_cores=num_cores) + args = [( + idx_range, + idx_range.shape[0], + phase[idx_range, :], + design_mat, + ifg_net_obj.num_images, + ref_idx) for idx_range in idx] + + results = pool.map(func=launchInvertIfgNetwork, iterable=args) + + # retrieve results + for i, phase_i in results: + phase_ts[i, :] = phase_i + + m, s = divmod(time.time() - start_time, 60) + logger.debug(msg='time used: {:02.0f} mins {:02.1f} secs.'.format(m, s)) + return phase_ts
+ + + +
+[docs] +def launchInvertIfgNetwork(parameters: tuple): + """Launch the inversion of the interferogram network in parallel. + + Parameters + ---------- + parameters: tuple + parameters for inversion + + Tuple contains: + idx_range: np.ndarray + range of point indices to be processed + num_points: int + number of points + phase: np.ndarray + interferometric phases of the points + design_mat: np.ndarray + design matrix + num_images: int + number of images + ref_idx: int + index of temporal reference date for interferogram network inversion + + Returns + ------- + idx_range: np.ndarray + range of indices of the points processed + phase_ts: np.ndarray + inverted phase time series + """ + # Unpack the parameters + (idx_range, num_points, phase, design_mat, num_images, ref_idx) = parameters + + design_mat = np.delete(arr=design_mat, obj=ref_idx, axis=1) # remove reference date + idx = np.ones((num_images,), dtype=np.bool_) + idx[ref_idx] = False + phase_ts = np.zeros((num_points, num_images), dtype=np.float32) + + prog_bar = ptime.progressBar(maxValue=num_points) + for i in range(num_points): + phase_ts[i, idx] = lsqr(design_mat, phase[i, :])[0] + prog_bar.update(value=i + 1, every=np.ceil(num_points / 100), + suffix='{}/{} points'.format(i + 1, num_points)) + + return idx_range, phase_ts
+ + + +
+[docs] +def predictPhase(*, obj: [NetworkParameter, Points], vel: np.ndarray = None, demerr: np.ndarray = None, + ifg_space: bool = True, logger: Logger): + """Predicts the phase time series based on the estimated parameters DEM error and mean velocity. + + Can be used for both arc phase or point phase. Wrapper function for 'predictPhaseCore(...)' + + Parameters + ---------- + obj: Union[NetworkParameter, Points] + object of either 'networkParameter' or 'points'. If instance of 'points' is given, 'vel' and 'demerr' + also need to be specified. + vel: np.ndarray + velocity for each sample (default: None) + demerr: np.ndarray + dem error for each sample (default: None). + ifg_space: bool + set to True if the phase shall be predicted in interferogram space. If False, phase will be predicted + in acquisition space. (default: True) + logger: Logger + Logging handler. + + Returns + ------- + pred_phase_demerr: np.ndarray + predicted phase from DEM error + pred_phase_vel: np.ndarray + predicted phase from velocity + + Raises + ------ + ValueError + vel or demerr is none + TypeError + obj is of the wrong type + """ + if isinstance(obj, Points): + if (vel is None) or (demerr is None): + logger.error(msg="Both 'vel' and 'demerr' are needed if 'obj' is instance of class 'points'!") + raise ValueError + pred_phase_demerr, pred_phase_vel = predictPhaseCore( + ifg_net_obj=obj.ifg_net_obj, + wavelength=obj.wavelength, + vel=vel, + demerr=demerr, + slant_range=obj.slant_range, + loc_inc=obj.loc_inc, + ifg_space=ifg_space + ) + elif isinstance(obj, NetworkParameter): + pred_phase_demerr, pred_phase_vel = predictPhaseCore( + ifg_net_obj=obj.ifg_net_obj, + wavelength=obj.wavelength, + vel=obj.vel, + demerr=obj.demerr, + slant_range=obj.slant_range, + loc_inc=obj.loc_inc, + ifg_space=ifg_space + ) + else: + logger.error(msg="'obj' must be instance of 'points' or 'networkParameter'!") + raise TypeError + return pred_phase_demerr, pred_phase_vel
+ + + +
+[docs] +def predictPhaseCore(*, ifg_net_obj: IfgNetwork, wavelength: float, vel: np.ndarray, + demerr: np.ndarray, slant_range: np.ndarray, loc_inc: np.ndarray, ifg_space: bool = True): + """Predicts the phase time series based on the estimated parameters DEM error and mean velocity. + + Can be used for both arc phase or point phase. + + Parameters + ---------- + ifg_net_obj: IfgNetwork + instance of class ifgNetwork + wavelength: float + wavelength in [m] + vel: np.ndarray + velocity for each sample + demerr: np.ndarray + dem error for each sample + slant_range: np.ndarray + slant range distance for each sample + loc_inc: np.ndarray + local incidence angle for each sample + ifg_space: bool + set to True if the phase shall be predicted in interferogram space. If False, phase will be + predicted in acquisition space. (default: True) + + Returns + ------- + pred_phase_demerr: np.ndarray + predicted phase from DEM error + pred_phase_vel: np.ndarray + predicted phase from velocity + """ + factor = 4 * np.pi / wavelength + + if ifg_space: + tbase = ifg_net_obj.tbase_ifg + pbase = ifg_net_obj.pbase_ifg + else: + tbase = ifg_net_obj.tbase + pbase = ifg_net_obj.pbase + + # compute phase due to DEM error + pred_phase_demerr = factor * pbase[:, np.newaxis] / (slant_range * np.sin(loc_inc))[np.newaxis, :] * demerr + + # compute phase due to velocity + pred_phase_vel = factor * tbase[:, np.newaxis] * vel + + return pred_phase_demerr.T, pred_phase_vel.T
+ + + +
+[docs] +def predictPhaseSingle(*, demerr: float, vel: float, slant_range: float, loc_inc: float, + ifg_net_obj: IfgNetwork, wavelength: float, only_vel: bool = False, ifg_space: bool = True): + """Predict the phase time series for only one point based on the estimated parameters DEM error and mean velocity. + + Can be used for both arc phase or point phase. + + Parameters + ---------- + demerr: float + DEM error (scalar) + vel: float + mean velocity (scalar) + slant_range: float + slant range distance in [m] (scalar) + loc_inc: float + local incidence angle in [rad] (scalar) + ifg_net_obj: IfgNetwork + object of class IfgNetwork + wavelength: float + radar wavelength in [m] + only_vel: bool + set to True if only the mean velocity shall be predicted (default: False) + ifg_space: bool + set to True if the phase shall be predicted in interferogram space. If False, phase will be predicted in + acquisition space. (default: True) + + Returns + ------- + pred_phase: np.ndarray + predicted phase + """ + factor = 4 * np.pi / wavelength + + if ifg_space: + tbase = ifg_net_obj.tbase_ifg + pbase = ifg_net_obj.pbase_ifg + num_time = ifg_net_obj.num_ifgs + else: + tbase = ifg_net_obj.tbase + pbase = ifg_net_obj.pbase + num_time = ifg_net_obj.num_images + + if only_vel: + a = np.zeros((num_time, 1)) + else: + a = np.zeros((num_time, 2)) + a[:, 0] = factor * tbase + + if only_vel: + pred_phase = np.matmul(a, np.array([vel])).reshape((-1,)) + else: + a[:, 1] = factor * pbase / (slant_range * np.sin(loc_inc)) + pred_phase = np.matmul(a, np.array([vel, demerr])).reshape((-1,)) + + return pred_phase
+ + + +
+[docs] +def estimateParameters(*, obj: Union[Points, Network], estimate_ref_atmo: bool = True, ifg_space: bool = True): + """Estimate the parameters either per point or per arc. + + Parameters are velocity and DEM error (or additionally reference APS). + + Parameters + ---------- + obj: Union[Points, Network] + object of either network, networkParameter, points or pointsParameters + estimate_ref_atmo: bool + set to True if APS of reference date shall be estimated. corresponds to offset of linear + motion model (default: False). + ifg_space: bool + set to True if the phase shall be predicted in interferogram space. If False, phase will be + predicted in acquisition space. (default: True) + + Returns + ------- + vel: np.ndarray + velocity for each point + demerr: np.ndarray + dem error for each point + ref_atmo: np.ndarray + reference APS for each point + omega: + sum of squared residuals + v_hat: + residuals + """ + num = obj.phase.shape[0] # either number of points or number of arcs + + if ifg_space: + tbase = obj.ifg_net_obj.tbase_ifg + pbase = obj.ifg_net_obj.pbase_ifg + num_time = obj.ifg_net_obj.num_ifgs + else: + tbase = obj.ifg_net_obj.tbase + pbase = obj.ifg_net_obj.pbase + num_time = obj.ifg_net_obj.num_images + + vel = np.zeros((num,), dtype=np.float32) + demerr = np.zeros((num,), dtype=np.float32) + omega = np.zeros((num,), dtype=np.float32) + coherence = np.zeros((num,), dtype=np.float32) + v_hat = np.zeros((num, num_time), dtype=np.float32) + + ref_atmo = None + if estimate_ref_atmo: + ref_atmo = np.zeros((num,), dtype=np.float32) + a = np.zeros((num_time, 3), dtype=np.float32) + a[:, 2] = 4 * np.pi / obj.wavelength # atmospheric delay at reference acquisition + else: + a = np.zeros((num_time, 2)) + + a[:, 1] = 4 * np.pi / obj.wavelength * tbase # velocity + + for p in range(obj.num_points): + obv_vec = obj.phase[p, :] + a[:, 0] = 4 * np.pi / obj.wavelength * pbase / (obj.slant_range[p] * np.sin(obj.loc_inc[p])) # demerr + + x_hat, omega[p] = np.linalg.lstsq(a, obv_vec, rcond=None)[0:2] + demerr[p] = x_hat[0] + vel[p] = x_hat[1] + if estimate_ref_atmo: + ref_atmo[p] = x_hat[2] + v_hat[p, :] = obv_vec - np.matmul(a, x_hat) + coherence[p] = np.abs(np.mean(np.exp(1j * v_hat[p, :]))) + + if not estimate_ref_atmo: + ref_atmo = None + + return vel, demerr, ref_atmo, coherence, omega, v_hat
+ + + +
+[docs] +def splitImageIntoBoxesRngAz(*, length: int, width: int, num_box_az: int, num_box_rng: int): + """Split the image into several boxes. + + (adapted from mintpy.ifgram_inversion.split2boxes) + + Parameters + ---------- + num_box_rng: int + Number of boxes in range direction + num_box_az: + Number of boxes in azimuth direction + length: int + length of the image + width: int + width of the image + + Returns + ------- + box_list: list + of tuple of 4 int (xmin, ymin, xmax, ymax) + num_box: int + number of boxes + """ + y_step = int(np.rint((length / num_box_rng) / 10) * 10) + x_step = int(np.rint((width / num_box_az) / 10) * 10) + + box_list = [] + y0 = 0 + y1 = 0 + while y1 != length: + x0 = 0 + x1 = 0 + # y1 = min([length, y0 + y_step]) + if y0 + y_step + int(np.rint(y_step / 2)) > length: + y1 = length + else: + y1 = y0 + y_step + while x1 != width: + if x0 + x_step + int(np.rint(x_step / 2)) > width: + x1 = width + else: + x1 = x0 + x_step + # x1 = min([width, x0 + x_step]) + box = (x0, y0, x1, y1) + box_list.append(box) + x0 = x1 + y0 = y1 + + num_box = len(box_list) + return box_list, num_box
+ + + +
+[docs] +def preparePatches(*, num_patches: int, width: int, length: int, logger: Logger): + """Create patches to subset the image stack for parallel processing to reduce memory usage. + + Parameters + ---------- + num_patches: int + number of patches to split the image into + width: int + width of the image + length: int + length of the image + logger: Logger + logging handler + + Returns + ------- + box_list: list + tuples with the radar coordinates of the boxes + num_patches: int + number of actual patches created by the function + """ + patch_size_lut = { + 1: (1, 1), + 2: (1, 2), + 3: (1, 3), + 4: (2, 2), + 6: (2, 3), + 8: (2, 4), + 10: (2, 5), + 12: (3, 4), + 15: (3, 5), + 28: (4, 7), + } + if num_patches == 1: + box_list = [tuple(i for i in (0, 0, width, length))] + num_patches = 1 + else: + num_patches = num_patches + if num_patches > max(patch_size_lut.keys()): + num_patches = max(patch_size_lut.keys()) + logger.info(msg=f"Number of patches is higher than expected. Reduce to {num_patches} boxes.") + else: + while not (num_patches in patch_size_lut.keys()): + num_patches += 1 + box_list, num_patches = splitImageIntoBoxesRngAz(length=length, + width=width, + num_box_az=patch_size_lut[num_patches][1], + num_box_rng=patch_size_lut[num_patches][0]) + logger.info(msg=f"Process {num_patches} patches " + + f"({patch_size_lut[num_patches][1]} x {patch_size_lut[num_patches][0]}).") + return box_list, num_patches
+ + + +
+[docs] +def splitDatasetForParallelProcessing(*, num_samples: int, num_cores: int): + """Split the dataset into chunks of similar size for processing them in parallel. + + Parameters + ---------- + num_samples: int + number of samples to be split + num_cores: int + number of cores to split among + + Returns + ------- + idx: list + list of sample ranges for each core + """ + rest = np.mod(num_samples, num_cores) + avg_num_samples_per_core = int((num_samples - rest) / num_cores) + num_samples_per_core = np.zeros((num_cores,), dtype=np.int16) + num_samples_per_core[:] = avg_num_samples_per_core + c = rest + i = 0 + while c > 0: + num_samples_per_core[i] += 1 + i += 1 + c -= 1 + + idx = list() + cur_idx = 0 + for i in range(num_cores): + idx.append([cur_idx, cur_idx + num_samples_per_core[i]]) + cur_idx += num_samples_per_core[i] + + idx = [np.arange(s, e) for s, e in idx] + return idx
+ + + +
+[docs] +def createSpatialGrid(*, coord_utm_img: np.ndarray, length: int, width: int, grid_size: int): + """Create a spatial grid over the image. + + Parameters + ---------- + coord_utm_img: np.ndarray + coordinates of all image pixels in UTM + length: int + number of pixels in length of the image + width: int + number of pixels in width of the image + grid_size: int + size of the grid in [m] + + Returns + ------- + box_list: list + of tuples with the radar coordinates of the boxes + num_box: int + actual number of boxes created by the function + """ + p0 = coord_utm_img[:, 0, 0] + p1 = coord_utm_img[:, 0, -1] + p2 = coord_utm_img[:, -1, 0] + dist_width = np.linalg.norm(p0 - p1) + dist_length = np.linalg.norm(p0 - p2) + num_box_az = int(np.round(dist_width / grid_size)) + num_box_rng = int(np.round(dist_length / grid_size)) + + # split image into different parts + box_list, num_box = splitImageIntoBoxesRngAz(length=length, width=width, + num_box_az=num_box_az, num_box_rng=num_box_rng) + + return box_list, num_box
+ + + +
+[docs] +def selectBestPointsInGrid(*, box_list: list, quality: np.ndarray, sel_min: bool = True): + """Select the best point inside a grid. + + If several pixel fullfil the criteria, the first one is selected. + + Parameters + ---------- + box_list: list + of tuples with the radar coordinates of the boxes + quality: np.ndarray + quality of the pixels + sel_min: bool + set to True if the minimum value shall be selected (default: True) + + Returns + ------- + cand_mask_sparse: np.ndarray + boolean mask of the selected pixels + """ + cand_mask_sparse = np.zeros_like(quality).astype(np.bool_) + + for box in box_list: + qual_box = quality[box[1]:box[3], box[0]:box[2]] + if sel_min: + idx_box = np.where(np.min(qual_box) == qual_box) + if np.min(qual_box) == np.inf: # no mininum value exists in this box + continue + else: # max + idx_box = np.where(np.max(qual_box) == qual_box) + + if idx_box[0].shape[0] > 1: # more than one index might be found, due to quality(PS) = 1 in MiaplPy + idx_box_tmp = [idx_box[0][0], idx_box[1][0]] + idx_box = idx_box_tmp + idx_img = (idx_box[0] + box[1], idx_box[1] + box[0]) + cand_mask_sparse[idx_img] = True + return cand_mask_sparse
+ + + +
+[docs] +def spatiotemporalConsistency(*, coord_utm: np.ndarray, phase: np.ndarray, wavelength: float, min_dist: int = 15, + max_dist: float = np.inf, knn: int = 50): + """Spatiotemporal consistency proposed by Hanssen et al. (2008) and implemented in DePSI (van Leijen, 2014). + + Parameters + ---------- + coord_utm: np.ndarray + UTM coordinates of the points + phase: np.ndarray + phase time series of the points + wavelength: float + radar wavelength in [m] + min_dist: int + minimum distance to other points in [m] (default: 15) + max_dist: float + maximum distance to other points in [m] (default: np.inf) + knn: int + number of nearest neighbors to consider (default: 50) + + Returns + ------- + stc: np.ndarray + spatiotemporal consistency of the points + """ + from scipy.spatial import KDTree + + num_samples, num_time = phase.shape + tree = KDTree(data=coord_utm) + + stc = np.zeros((num_samples,), np.float64) + + for p in range(num_samples): + dist, idx = tree.query([coord_utm[p, 0], coord_utm[p, 1]], k=knn) + mask = (dist < max_dist) & (dist > min_dist) & (dist != 0) + rho = list() + for i in idx[mask]: + diff = (phase[i, :-1] - phase[p, :-1]) - (phase[i, 1:] - phase[p, 1:]) + rho.append(wavelength / (4 * np.pi) * np.sqrt((1 / (num_time - 1) * np.sum(diff ** 2)))) + if not rho: + stc[p] = np.nan + else: + stc[p] = np.min(rho) + return stc
+ + + +
+[docs] +def temporalAutoCorrelation(*, residuals: np.ndarray, lag: int): + """Compute the temporal autocorrelation for given time lag from the residuals. + + Parameters + ---------- + residuals: np.ndarray + residual phase time series (dim: num_points x num_time_steps) + lag: int + time lag used for computing the correlation + + Returns + ------- + auto_corr: np.ndarray + auto-correlation of each point (dim: num_points x lag) + """ + num_points = residuals.shape[0] + auto_corr = np.zeros((num_points, lag)) + for lag_num in range(1, lag + 1): + for p in range(num_points): + auto_corr[p, lag_num - 1] = abs(np.corrcoef( + np.array([residuals[p, :-lag_num], residuals[p, lag_num:]]))[0][1]) + return auto_corr
+ + + +
+[docs] +def readPhasePatchwise(*, stack_obj: BaseStack, dataset_name: str, num_patches: int, cand_mask: np.ndarray, + point_id_img: np.ndarray, + logger: Logger): + """Read the phase from a file in a patchwise manner to reduce memory usage. + + Parameters + ---------- + stack_obj: BaseStack + instance of class BaseStack + dataset_name: str + name of the dataset to read (e.g. 'ifgs' or 'phase') + num_patches: int + number of patches to split the image into + cand_mask: np.ndarray + boolean mask of the selected pixels + point_id_img: np.ndarray + image with point IDs for each pixel + logger: Logger + logging handler + + Returns + ------- + phase_points: np.ndarray + phase time series of the selected pixels + """ + if dataset_name == "ifgs": + length, width, num_images = stack_obj.getShape(dataset_name=dataset_name) + elif dataset_name == "phase": # result from miaplpy + num_images, length, width = stack_obj.getShape(dataset_name=dataset_name) + else: + logger.error(f"Reading '{dataset_name}' is not supported.") + raise NotImplementedError + + if num_patches == 1: + phase_img = stack_obj.read(dataset_name=dataset_name) + if dataset_name == "phase": # result from miaplpy + phase_img = np.moveaxis(phase_img, 0, -1) + phase_points = phase_img[cand_mask, :] + else: + phase_points = np.angle(phase_img[cand_mask, :]) + else: + box_list, num_patches = preparePatches(num_patches=num_patches, + width=width, + length=length, + logger=logger) + num_points = cand_mask[cand_mask].shape[0] + phase_points = np.zeros((num_points, num_images), dtype=np.float32) + start_idx = 0 + point_id_order = list() + for idx in range(num_patches): + bbox = box_list[idx] + if dataset_name == "phase": # result from miaplpy + # slcStack has different order: starts with num_images. Adjust bbox (x0, y0, z0, x1, y1, z1) + # read whole slcStack and subset to time span outside this function. + box = (bbox[1], 0, bbox[0], bbox[3], num_images, bbox[2]) + phase_img = stack_obj.read(dataset_name=dataset_name, box=box, print_msg=False) + phase_img = np.moveaxis(phase_img, 0, -1) + else: + phase_img = stack_obj.read(dataset_name=dataset_name, box=bbox, print_msg=False) + cur_cand_mask = cand_mask[bbox[1]:bbox[3], bbox[0]:bbox[2]] + + # extract the wrapped phase for the selected pixels in the patch + cur_num_points = cur_cand_mask[cur_cand_mask].shape[0] + stop_idx = start_idx + cur_num_points + if dataset_name == "phase": + phase_points[start_idx:stop_idx, :] = phase_img[cur_cand_mask, :] # miaplpy results are phases + else: + phase_points[start_idx:stop_idx, :] = np.angle(phase_img[cur_cand_mask, :]) + start_idx = stop_idx + + # store order of IDs to sort the points after loading all ifgs + cur_point_id = point_id_img[bbox[1]:bbox[3], bbox[0]:bbox[2]] + cur_point_id = cur_point_id[cur_cand_mask] + point_id_order.append(cur_point_id) + logger.info(msg="\r\033[KPatches read:\t {}/{}".format(idx + 1, num_patches)) + # reorder points to fit to the same structure for all datasets + idx = np.argsort(np.hstack(point_id_order)) + phase_points = phase_points[idx, :] + + return phase_points
+ + + +
+[docs] +def detectValidAreas(*, bmap_obj: AmplitudeImage, logger: Logger): + """Detect valid areas based on amplitude image. + + Parameters + ---------- + bmap_obj: AmplitudeImage + instance of class AmplitudeImage + logger: Logger + logging handler + + Returns + ------- + mask_valid_area: np.ndarray + boolean mask of the valid areas + """ + bmap_obj.open() + mask_valid_area = (10 ** (bmap_obj.background_map / 10)) > 0 + num_invalid = mask_valid_area[~mask_valid_area].shape[0] + if num_invalid > 0: + logger.info(msg=f"Number of invalid pixels found in image: {num_invalid}") + return mask_valid_area
+ + + +
+[docs] +def setReferenceToPeakOfHistogram(*, phase: np.ndarray, vel: np.ndarray, num_bins: int = 100): + """Set reference phase value to peak of the velocity histogram. + + It assumes that no velocity (i.e. stable area) is occuring most frequently. + + Parameters + ---------- + phase: np.ndarray + phase time series of the points + vel: np.ndarray + velocity of the points + num_bins: int + number of bins for the histogram (default: 100) + + Returns + ------- + phase: np.ndarray + phase time series adjusted by the new reference phase + """ + if phase.shape[0] < 40: # the method will not give meaningfull results if too few points are available + num_bins = 10 + + # find most frequent velocity + hist, bin_edges = np.histogram(vel, bins=num_bins, density=True) + max_idx = np.argmax(hist) + + # find a set of points which have the most frequent velocity + mask = (vel >= bin_edges[max_idx]) & (vel < bin_edges[max_idx + 1]) + + # determine reference phase from mean of the phase time series of the selected points + ref_phase = np.mean(phase[mask, :], axis=0) + + # adjust the phases by the reference sarvey + phase -= ref_phase + + return phase
+ + + +
+[docs] +def checkIfRequiredFilesExist(*, path_to_files: str, required_files: list, logger: Logger): + """ + Check if all required files exist from previous processing steps. + + Parameters + ---------- + path_to_files: str + path to the files + required_files: list + list of required files which are all checked + logger: Logger + logging handler + + Raises + ------ + FileNotFoundError + if a required file is missing + """ + # loop over all required files and check if they exist, if not: raise error + for file in required_files: + if not exists(join(path_to_files, file)): + logger.error(f"File from previous step(s) is missing: {file}.") + raise FileNotFoundError
+ +
+ +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/_modules/sarvey/viewer.html b/doc/_modules/sarvey/viewer.html new file mode 100644 index 0000000..88b71fa --- /dev/null +++ b/doc/_modules/sarvey/viewer.html @@ -0,0 +1,679 @@ + + + + + + + sarvey.viewer — SARvey 1.0.0 documentation + + + + + + + + + + + + + +
+
+
+
+ +

Source code for sarvey.viewer

+#!/usr/bin/env python
+
+# SARvey - A multitemporal InSAR time series tool for the derivation of displacements.
+#
+# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de)
+#
+# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context
+# of the SAR4Infra project with funds of the German Federal Ministry for Digital and
+# Transport and contributions from Landesamt fuer Vermessung und Geoinformation
+# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein.
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++
+# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you
+# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes.
+# This requirement extends to SARvey.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see <https://www.gnu.org/licenses/>.
+
+"""Viewer Module for SARvey."""
+import os
+from typing import Any
+from logging import Logger
+import matplotlib.cm as cm
+import matplotlib.patches as patches
+import matplotlib.pyplot as plt
+from matplotlib import colormaps, widgets
+from matplotlib.backend_bases import MouseButton
+from matplotlib.colors import Normalize
+import numpy as np
+from scipy.spatial import KDTree
+import datetime
+
+from mintpy.objects.colors import ColormapExt
+from mintpy.utils import readfile
+from mintpy.utils.plot import auto_flip_direction
+
+from sarvey.objects import AmplitudeImage, Points, BaseStack
+import sarvey.utils as ut
+
+
+
+[docs] +def plotIfgs(*, phase: np.ndarray, coord: np.ndarray, spatial_ref_idx: int = None, ttl: str = None, cmap: str = "cmy"): + """Plot one interferogram per subplot. + + Parameters + ---------- + phase: np.ndarray + phase per point and ifg, e.g. wrapped or unwrapped phase (dim: no. psPoints x no. ifgs) + coord: np.ndarray + coordinates of the psPoints, e.g. pixel or lat lon (dim: no. psPoints x 2) + spatial_ref_idx: int + index of the spatial reference point (default: None) + ttl: str + title for the figure (default: None) + cmap: str + colormap, use "cmy" for wrapped phase data (default) or "?" for unwrapped or residual phase + """ + if cmap == "cmy": + cmap = ColormapExt('cmy').colormap + else: + cmap = plt.get_cmap(cmap) + + num_ifgs = phase.shape[1] + min_val = np.min(phase) + max_val = np.max(phase) + fig, axs = plt.subplots(np.ceil(np.sqrt(num_ifgs + 1)).astype(np.int32), + np.ceil(np.sqrt(num_ifgs + 1)).astype(np.int32)) + sc = None + for i, ax in enumerate(axs.flat): + if i < num_ifgs: + sc = ax.scatter(coord[:, 1], coord[:, 0], c=phase[:, i], + vmin=min_val, vmax=max_val, s=1, cmap=cmap) + ax.axes.set_xticks([]) + ax.axes.set_yticks([]) + if spatial_ref_idx is not None: + ax.plot(coord[spatial_ref_idx, 1], + coord[spatial_ref_idx, 0], 'k*') + elif i == num_ifgs: + plt.colorbar(sc, cax=ax) + else: + ax.set_visible(False) + if ttl is not None: + fig.suptitle(ttl)
+ + + +
+[docs] +def plotScatter(*, value: np.ndarray, coord: np.ndarray, bmap_obj: AmplitudeImage = None, ttl: str = None, + unit: str = None, s: float = 5.0, cmap: colormaps = colormaps["jet_r"], symmetric: bool = False, + logger: Logger, **kwargs: Any): + """Plot a scatter map for given value. + + Parameters + ---------- + value: np.ndarray + value to be plotted per point giving the colour of the point (dim: no. points x 1) + coord: np.ndarray + coordinates of the points, e.g. radar or lat lon (dim: no. points x 2). If bmapObj is given, + the coordinates must be radar coordinates! + bmap_obj: AmplitudeImage + instance of amplitudeImage for plotting background image (default: None) + ttl: str + title for the figure (default: None) + unit: str + unit as title for the colorbar axis (default: None) + s: float + size of the scatter points (default: 5.0) + cmap: str + colormap (default: "jet_r") + symmetric: bool + plot symmetric colormap extend, i.e. abs(vmin) == abs(vmax) (default: False) + logger: Logger + logging Handler + kwargs: Any + additional keyword arguments for scatter plot + + Returns + ------- + fig: plt.Figure + current figure, + ax: plt.Axes + current axis + cb: plt.colorbar + current colorbar + """ + if bmap_obj is not None: + ax = bmap_obj.plot(logger=logger) + fig = plt.gcf() + else: + fig = plt.figure() + ax = fig.add_subplot() + + if symmetric: + v_range = np.max(np.abs(value.ravel())) + sc = ax.scatter(coord[:, 1], coord[:, 0], c=value, s=s, cmap=plt.get_cmap(cmap), + vmin=-v_range, vmax=v_range) + else: + sc = ax.scatter(coord[:, 1], coord[:, 0], c=value, s=s, cmap=plt.get_cmap(cmap), **kwargs) + cb = plt.colorbar(sc, ax=ax, pad=0.03, shrink=0.5) + cb.ax.set_title(unit) + ax.set_title(ttl) + plt.tight_layout() + return fig, ax, cb
+ + + +
+[docs] +def plotColoredPointNetwork(*, x: np.ndarray, y: np.ndarray, arcs: np.ndarray, val: np.ndarray, ax: plt.Axes = None, + linewidth: float = 2, cmap_name: str = "seismic", clim: tuple = None): + """Plot a network of points with colored arcs. + + Parameters + ---------- + x: np.ndarray + x-coordinates of the points (dim: no. points x 1) + y: np.ndarray + y-coordinates of the points (dim: no. points x 1) + arcs: np.ndarray + indices of the points to be connected (dim: no. arcs x 2) + val: np.ndarray + values for the color of the arcs (dim: no. arcs x 1) + ax: plt.Axes + axis for plotting (default: None) + linewidth: float + line width of the arcs (default: 2) + cmap_name: str + name of the colormap (default: "seismic") + clim: tuple + color limits for the colormap (default: None) + + Returns + ------- + ax: plt.Axes + current axis + cbar: plt.colorbar + current colorbar + """ + if ax is None: + fig = plt.figure(figsize=[15, 5]) + ax = fig.add_subplot() + else: + fig = ax.get_figure() + ax.scatter(x, y, s=3.5, c=np.ones_like(x)) + + if clim is None: + norm = Normalize(vmin=min(val), vmax=max(val)) + else: + norm = Normalize(vmin=clim[0], vmax=clim[1]) + + mapper = cm.ScalarMappable(norm=norm, cmap=cm.get_cmap(cmap_name)) + mapper_list = [mapper.to_rgba(v) for v in val] + for m in range(arcs.shape[0]): + x_val = [x[arcs[m, 0]], x[arcs[m, 1]]] + y_val = [y[arcs[m, 0]], y[arcs[m, 1]]] + + ax.plot(x_val, y_val, linewidth=linewidth, c=mapper_list[m]) + cbar = fig.colorbar(mapper, ax=ax, pad=0.03, shrink=0.5) + + return ax, cbar
+ + + +
+[docs] +def plotGridFromBoxList(*, box_list: list, ax: plt.Axes = None, edgecolor: str = "k", linewidth: float = 1): + """Plot a grid into an axis. + + Parameters + ---------- + box_list: list + boxes to be plotted. box_list can be created with 'splitImageIntoBoxesRngAz' or 'splitImageIntoBoxes' + ax: plt.Axes + axis for plotting (default: None) + edgecolor: str + edge color for the boxes (default: "k") + linewidth: float + line width for the boxes (default: 1) + + Returns + ------- + ax: plt.Axes + current axis + """ + if ax is None: + fig = plt.figure() + ax = fig.add_subplot() + + for box in box_list: + rect = patches.Rectangle((box[0], box[1]), box[2] - box[0], box[3] - box[1], linewidth=linewidth, + edgecolor=edgecolor, facecolor="none") + ax.add_patch(rect) + return ax
+ + + +
+[docs] +class TimeSeriesViewer: + """TimeSeriesViewer.""" + + def __init__(self, *, point_obj: Points, vel_scale: str = "mm", input_path: str, logger: Logger): + """Init.""" + self.sc = None + self.point_obj = point_obj + self.ts_point_marker = None # for ts point marker + self.ts_point_idx = 0 # index of ts_point + self.ts_refpoint_marker = None # for reference point marker + self.logger = logger + self.ts_refpoint_idx = None # index of reference point + self.vel_scale = vel_scale + scale_dict = {"mm": 1000, "cm": 100, "dm": 10, "m": 1} + if self.vel_scale not in scale_dict.keys(): + raise ValueError(f"Invalid argument: '{self.vel_scale}'") + self.scale = scale_dict[self.vel_scale] + self.tree = KDTree(self.point_obj.coord_xy) + if point_obj.ifg_net_obj.dates is not None: + self.times = [datetime.date.fromisoformat(date) for date in point_obj.ifg_net_obj.dates] + else: # backwards compatible, if ifg_net_obj does not contain dates + self.times = point_obj.ifg_net_obj.tbase + + vel, demerr, ref_atmo, coherence, omega, v_hat = ut.estimateParameters(obj=self.point_obj, ifg_space=False) + self.vel = vel + self.demerr = demerr + self.ref_atmo = ref_atmo + + self.bmap_obj = AmplitudeImage(file_path=os.path.join(os.path.dirname(self.point_obj.file_path), + "background_map.h5")) + self.bmap_obj.open() + self.height = readfile.read(os.path.join(input_path, "geometryRadar.h5"), datasetName='height')[0] + + temp_coh_obj = BaseStack( + file=os.path.join(os.path.dirname(self.point_obj.file_path), "temporal_coherence.h5"), + logger=logger) + self.temp_coh_img = temp_coh_obj.read(dataset_name="temp_coh") + + self.font_size = 10 + plt.rc('font', size=self.font_size) # controls default text size + plt.rc('axes', titlesize=self.font_size) # fontsize of the title + plt.rc('axes', labelsize=self.font_size) # fontsize of the x and y labels + plt.rc('xtick', labelsize=self.font_size) # fontsize of the x tick labels + plt.rc('ytick', labelsize=self.font_size) # fontsize of the y tick labels + plt.rc('legend', fontsize=self.font_size) # fontsize of the legend + + self.initFigureMap() + self.initFigureTimeseries() + self.plotMap(val=None) + self.plotPointTimeseries(val=None) # just any point + self.fig1.canvas.mpl_connect('button_press_event', self.onClick) + plt.show() + +
+[docs] + def initFigureMap(self): + """InitFigureMap.""" + self.fig1 = plt.figure() + self.ax_img = self.fig1.subplots(1, 1) + + self.ax_cb = self.fig1.add_axes([0.93, 0.6, 0.015, 0.15]) # (left, bottom, width, height) + self.cb = self.fig1.colorbar(self.sc, + cax=self.ax_cb, + ax=self.ax_img, + pad=0.03, + shrink=0.8, + aspect=10, + orientation='vertical') + + # add button to select reference point + self.set_reference_point = False + self.ax_button = self.fig1.add_axes([0.125, 0.9, 0.1, 0.08]) # (left, bottom, width, height) + self.button_mask = widgets.Button(ax=self.ax_button, label='Select\nReference', image=None, color='1') + self.button_mask.on_clicked(self.updateButtonStatus) + + # add radiobutton to select parameter + self.ax_radio_par = self.fig1.add_axes([0.225, 0.9, 0.2, 0.08]) # (left, bottom, width, height) + self.rb_par = widgets.RadioButtons(self.ax_radio_par, labels=['Velocity', 'DEM error', 'None'], active=0) + self.rb_par.on_clicked(self.plotMap) + + # add radiobutton to select background image + self.ax_radio_backgr = self.fig1.add_axes([0.425, 0.9, 0.2, 0.08]) # (left, bottom, width, height) + self.rb_backgr = widgets.RadioButtons(self.ax_radio_backgr, labels=['Amplitude', 'DEM', 'Coherence', 'None'], + active=0) + self.rb_backgr.on_clicked(self.plotMap) + + # add info box with info about velocity and DEM error of selected pixel + self.ax_info_box = self.fig1.add_axes([0.625, 0.9, 0.2, 0.08]) # (left, bottom, width, height) + self.text_obj_time = self.ax_info_box.text(0.1, 0.1, "") + self.ax_info_box.set_xticks([], []) + self.ax_info_box.set_yticks([], []) + + # add variable for axis of slider controlling the visualized coherence background image + self.ax_slide_coh = None + self.sl_last_val = 0.0 + self.sl_coh = None
+ + +
+[docs] + def initFigureTimeseries(self): + """InitFigureTimeseries.""" + self.fig2 = plt.figure(figsize=(15, 5)) + self.ax_ts = self.fig2.subplots(1, 1) + + # add radiobutton for fitting linear model + self.ax_radio_fit = self.fig2.add_axes([0.125, 0.9, 0.2, 0.08]) # (left, bottom, width, height) + self.rb_fit = widgets.RadioButtons(self.ax_radio_fit, labels=['None', 'Linear fit'], active=0) + + # add radiobutton for selecting baseline type + self.ax_radio_baselines = self.fig2.add_axes([0.325, 0.9, 0.2, 0.08]) # (left, bottom, width, height) + self.rb_baselines = widgets.RadioButtons( + self.ax_radio_baselines, + labels=['Temporal baseline', 'Perpendicular baseline'], + active=0 + ) + + # add check box for removing phase due to parameters + self.ax_cbox_par = self.fig2.add_axes([0.525, 0.9, 0.2, 0.08]) # (left, bottom, width, height) + self.cbox_par = widgets.CheckButtons( + self.ax_cbox_par, + ["Velocity", "DEM error"], + actives=[True, False] + ) + self.rb_fit.on_clicked(self.plotPointTimeseries) + self.rb_baselines.on_clicked(self.plotPointTimeseries) + self.cbox_par.on_clicked(self.plotPointTimeseries)
+ + +
+[docs] + def plotMap(self, val: object): # val seems to be unused, but its necessary for the function to work. + """Plot velocity map and time series.""" + flag_initial_plot = (0.0, 1.0) == self.ax_img.get_xlim() + ax_img_xlim = None + ax_img_ylim = None + if not flag_initial_plot: + ax_img_xlim = self.ax_img.get_xlim() + ax_img_ylim = self.ax_img.get_ylim() + + self.ax_img.cla() + + # get selected background from radiobutton + if self.rb_backgr.value_selected == "Amplitude": + self.ax_img = self.bmap_obj.plot(ax=self.ax_img, logger=self.logger) + if self.ax_slide_coh is not None: + self.sl_last_val = self.sl_coh.val + self.ax_slide_coh.remove() + self.ax_slide_coh = None + if self.rb_backgr.value_selected == "DEM": + self.ax_img.imshow(self.height, cmap=ColormapExt('DEM_print').colormap) + meta = {"ORBIT_DIRECTION": self.bmap_obj.orbit_direction} + auto_flip_direction(meta, ax=self.ax_img, print_msg=False) + self.ax_img.set_xlabel("Range") + self.ax_img.set_ylabel("Azimuth") + if self.ax_slide_coh is not None: + self.sl_last_val = self.sl_coh.val + self.ax_slide_coh.remove() + self.ax_slide_coh = None + if self.rb_backgr.value_selected == "Coherence": + if self.ax_slide_coh is None: + # add slider to change value of coherence for background map + self.ax_slide_coh = self.fig1.add_axes([0.425, 0.85, 0.2, 0.03]) # (left, bottom, width, height) + self.sl_coh = widgets.Slider(self.ax_slide_coh, + label='Coherence', + valmin=0.0, + valmax=1.0, + valinit=self.sl_last_val, + valfmt="%.1f") + + self.ax_img.imshow(self.temp_coh_img, + cmap=plt.get_cmap("gray"), + vmin=np.round(self.sl_coh.val, decimals=1), + vmax=1) + meta = {"ORBIT_DIRECTION": self.bmap_obj.orbit_direction} + auto_flip_direction(meta, ax=self.ax_img, print_msg=False) + self.ax_img.set_xlabel("Range") + self.ax_img.set_ylabel("Azimuth") + if self.rb_backgr.value_selected == "None": + self.ax_img.imshow(np.ones_like(self.height, dtype=np.int8), cmap=plt.cm.get_cmap("gray"), vmin=0, vmax=1) + meta = {"ORBIT_DIRECTION": self.bmap_obj.orbit_direction} + auto_flip_direction(meta, ax=self.ax_img, print_msg=False) + self.ax_img.set_xlabel("Range") + self.ax_img.set_ylabel("Azimuth") + if self.ax_slide_coh is not None: + self.sl_last_val = self.sl_coh.val + self.ax_slide_coh.remove() + self.ax_slide_coh = None + + par = None + v_range = None + cb_ttl = "" + if self.rb_par.value_selected == "Velocity": # show velocity + v_range = np.max(np.abs(self.vel * self.scale)) + par = self.vel * self.scale + cb_ttl = f"[{self.vel_scale}/\nyear]" + elif self.rb_par.value_selected == "DEM error": # show demerr + v_range = np.max(np.abs(self.demerr)) + par = self.demerr + cb_ttl = "[m]" + + if self.rb_par.value_selected != "None": + self.sc = self.ax_img.scatter(self.point_obj.coord_xy[:, 1], + self.point_obj.coord_xy[:, 0], + c=par, + s=5, + cmap=colormaps["jet_r"], + vmin=-v_range, + vmax=v_range) + + self.cb.ax.set_title(cb_ttl, fontsize=self.font_size) + self.cb = self.fig1.colorbar(self.sc, cax=self.ax_cb, ax=self.ax_img, pad=0.03, shrink=0.8, aspect=10, + orientation='vertical') + + # add back location of selected sarvey point and current reference + if self.ts_refpoint_idx is not None: # initial value is None + y, x = self.point_obj.coord_xy[self.ts_refpoint_idx, :] + self.ts_refpoint_marker = self.ax_img.scatter(x, y, marker='^', facecolors='none', edgecolors='k') + + y, x = self.point_obj.coord_xy[self.ts_point_idx, :] + self.ts_point_marker = self.ax_img.scatter(x, y, facecolors='none', edgecolors='k') + + if not flag_initial_plot: + self.ax_img.set_xlim(ax_img_xlim) + self.ax_img.set_ylim(ax_img_ylim) + + plt.draw()
+ + +
+[docs] + def updateButtonStatus(self, val: object): # val seems to be unused, but its necessary for the function to work. + """Set to true.""" + if self.set_reference_point: + self.set_reference_point = False + self.button_mask.color = '1' + else: + self.set_reference_point = True + self.button_mask.color = '0.5'
+ + +
+[docs] + def onClick(self, event): + """Event function to get y/x from button press.""" + if event.inaxes is None: + return + + if not plt.fignum_exists(self.fig2.number): + self.initFigureTimeseries() + plt.show() + + if event.button is MouseButton.RIGHT: + if event.inaxes == self.ax_img: + y, x = int(event.ydata + 0.5), int(event.xdata + 0.5) + idx = self.tree.query([y, x])[-1] + y, x = self.point_obj.coord_xy[idx, :] + + if self.set_reference_point: # update reference point + self.ts_refpoint_idx = idx + self.updateReference() + self.updateButtonStatus(val=None) + # if self.ts_refpoint_marker is not None: # initial value is None + # self.ts_refpoint_marker.remove() + # self.ts_refpoint_marker = self.ax_img.scatter(x, y, marker='^', facecolors='none', edgecolors='k') + else: + self.ts_point_idx = idx + + if self.ts_point_marker is not None: # initial value is None + self.ts_point_marker.remove() + y, x = self.point_obj.coord_xy[self.ts_point_idx, :] + self.ts_point_marker = self.ax_img.scatter(x, y, facecolors='none', edgecolors='k') + self.plotPointTimeseries(val=None) + return
+ + +
+[docs] + def updateReference(self): + """Change the phase of all points according to the new reference point. + + Update the plot of the velocity and time series. + """ + self.logger.info(msg="changed reference to ID: {}".format(self.point_obj.point_id[self.ts_refpoint_idx])) + self.point_obj.phase -= self.point_obj.phase[self.ts_refpoint_idx, :] + vel, demerr, ref_atmo, coherence, omega, v_hat = ut.estimateParameters(obj=self.point_obj, ifg_space=False) + self.vel = vel + self.demerr = demerr + self.ref_atmo = ref_atmo + self.plotMap(val=None)
+ + +
+[docs] + def plotPointTimeseries(self, val: object): # val seems to be unused, but its necessary for the function to work. + """Plot_point_timeseries.""" + self.ax_ts.cla() + + # transform phase time series into meters + resulting_ts = self.point_obj.wavelength / (4 * np.pi) * self.point_obj.phase[self.ts_point_idx, :] + cbox_status = self.cbox_par.get_status() + if not cbox_status[0]: # Displacement + resulting_ts = resulting_ts - self.point_obj.ifg_net_obj.tbase * self.vel[self.ts_point_idx] + if not cbox_status[1]: # DEM error + phase_topo = (self.point_obj.ifg_net_obj.pbase / (self.point_obj.slant_range[self.ts_point_idx] * + np.sin(self.point_obj.loc_inc[self.ts_point_idx])) * + self.demerr[self.ts_point_idx]) + resulting_ts = resulting_ts - phase_topo + + self.ax_ts.set_ylabel(f"Displacement [{self.vel_scale}]") + + # add trend + if self.rb_fit.value_selected == "Linear fit": + if self.rb_baselines.value_selected == "Temporal baseline": + line = self.point_obj.ifg_net_obj.tbase * self.vel[self.ts_point_idx] + self.ref_atmo[self.ts_point_idx] + self.ax_ts.plot(self.times, line * self.scale, '-k') + elif self.rb_baselines.value_selected == "Perpendicular baseline": + line = (self.point_obj.ifg_net_obj.pbase / (self.point_obj.slant_range[self.ts_point_idx] * + np.sin(self.point_obj.loc_inc[self.ts_point_idx])) * + self.demerr[self.ts_point_idx] + self.ref_atmo[self.ts_point_idx]) + self.ax_ts.plot(self.point_obj.ifg_net_obj.pbase, line * self.scale, '-k') + + # set y-lim to [-20, 20] mm except if it exceeds this scale + y_max = max([0.02, resulting_ts.max() + 0.005]) + y_min = min([-0.02, resulting_ts.min() - 0.005]) + + self.ax_ts.set_ylim(y_min * self.scale, y_max * self.scale) + if self.rb_baselines.value_selected == "Temporal baseline": + self.ax_ts.plot(self.times, resulting_ts * self.scale, '.') + self.ax_ts.set_xlabel("Time [years]") + if self.rb_baselines.value_selected == "Perpendicular baseline": + self.ax_ts.plot(self.point_obj.ifg_net_obj.pbase, resulting_ts * self.scale, '.') + self.ax_ts.set_xlabel("Perpendicular Baseline [m]") + + self.text_obj_time.remove() + point_info = "DEM error: {:.0f} m\nVelocity: {:.0f} {:s}/year".format( + self.demerr[self.ts_point_idx], + self.vel[self.ts_point_idx] * self.scale, + self.vel_scale, + ) + self.text_obj_time = self.ax_info_box.text(0.5, 0.5, point_info, ha='center', va='center') + + # update figure + self.fig1.canvas.draw() + self.fig2.canvas.draw()
+
+ +
+ +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/_sources/authors.rst.txt b/doc/_sources/authors.rst.txt new file mode 100644 index 0000000..e122f91 --- /dev/null +++ b/doc/_sources/authors.rst.txt @@ -0,0 +1 @@ +.. include:: ../AUTHORS.rst diff --git a/doc/_sources/contributing.rst.txt b/doc/_sources/contributing.rst.txt new file mode 100644 index 0000000..e582053 --- /dev/null +++ b/doc/_sources/contributing.rst.txt @@ -0,0 +1 @@ +.. include:: ../CONTRIBUTING.rst diff --git a/doc/_sources/demo/demo_masjed_dam.rst.txt b/doc/_sources/demo/demo_masjed_dam.rst.txt new file mode 100644 index 0000000..7648286 --- /dev/null +++ b/doc/_sources/demo/demo_masjed_dam.rst.txt @@ -0,0 +1,44 @@ +.. _demo_masjed_dam: + + +Masjed Soleyman dam +------------------- + +This tutorial focuses on measuring the post-construction settlement of the `Masjed Soleyman Dam `_, a rock-fill dam on the Karun river, opened in 2002. Previous investigations using GNSS and high-resolution TerraSAR-X data, as detailed in `Emadali et al., 2017 `_, have shown post-construction settlement of the dam. TerraSAR-X data indicates that the dam undergoes a maximum deformation rate of 13 cm/year in the radar line-of-sight. + + +Dataset +^^^^^^^ + +The dataset used in this tutorial is a **Sentinel-1** stack of 100 images. The details are provided in the table below. + + ++------------------------+-------------------------------------+ +| Number of SLC images | 100 | ++------------------------+-------------------------------------+ +| Start date | 2015-01-05 | ++------------------------+-------------------------------------+ +| End date | 2018-09-04 | ++------------------------+-------------------------------------+ +| Sensor | Sentinel-1 | ++------------------------+-------------------------------------+ +| Orbit direction | Descending | ++------------------------+-------------------------------------+ +| InSAR processor | GAMMA | ++------------------------+-------------------------------------+ + + +There are two tutorials for this demo dataset: one with a comprehensive description for beginners, and one with minimal description for advanced users. + +.. toctree:: + :maxdepth: 1 + :caption: Tutorials: + + demo_masjed_dam_detailed_guide.rst + demo_masjed_dam_fast_track.rst + + +Literature +^^^^^^^^^^ + +* Emadali L, Motagh M, Haghighi, MH (2017). Characterizing post-construction settlement of the Masjed-Soleyman embankment dam, Southwest Iran, using TerraSAR-X SpotLight radar imagery. Engineering Structures 143:261-273, DOI 10.1016/j.engstruct.2017.04.009. `Link to paper. `_ diff --git a/doc/_sources/demo/demo_masjed_dam_detailed_guide.rst.txt b/doc/_sources/demo/demo_masjed_dam_detailed_guide.rst.txt new file mode 100644 index 0000000..a43f624 --- /dev/null +++ b/doc/_sources/demo/demo_masjed_dam_detailed_guide.rst.txt @@ -0,0 +1,353 @@ +.. _demo_masjed_dam_detailed_guide: + +Detailed Guide for Masjed Soleyman Dam +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +This tutorial provides a comprehensive guide to SARvey processing. If you are an advanced user, you can proceed directly to the :ref:`fast track for advanced users `. + +.. note:: + + This instruction is based on SARvey version 1.0.0 (Strawberry Pie). Newer versions may differ slightly. + +Step 1: Before Running SARvey +""""""""""""""""""""""""""""" + +Step 1.1: Download the Data +""""""""""""""""""""""""""" + +Download the data by running the following commnad in the console: + +.. code-block:: bash + + wget https://zenodo.org/records/12189041/files/SARvey_input_data_Masjed_Soleyman_dam_S1_dsc_2015_2018.zip + + +Unzip the downloaded file and change the directory. + +.. code-block:: bash + + unzip SARvey_input_data_Masjed_Soleyman_dam_S1_dsc_2015_2018.zip + cd SARvey_input_data_Masjed_Soleyman_dam_S1_dsc_2015_2018 + + +Check the downloaded data using `info.py` and `view.py`. For example: + +.. code-block:: bash + + info.py inputs/slcStack.h5 + +.. code-block:: bash + + view.py inputs/geometryRadar.h5 + + +Step 1.2: Activate SARvey and Change Directory +""""""""""""""""""""""""""""""""""""""""""""""" + +If you have not installed SARvey, refer to the `installation instructions `_. Activate the SARvey environment: + +.. code-block:: bash + + conda activate sarvey + +Ensure SARvey can be called from the console. + +.. code-block:: bash + + sarvey -h + +If you see the following command, it indicates that SARvey cannot be called. Ensure it is installed correctly and the conda environment is activated. + +.. code-block:: none + + command not found: sarvey + +Step 1.3: Create a Config File +"""""""""""""""""""""""""""""" + +Create a config file, which is a JSON file containing the parameters for `sarvey`. The config file can be created using the following command: + +.. code-block:: bash + + sarvey -f config.json 0 0 -g + +Note: The above command only generates a configuration file. Although step 0 is specified, it will not be executed. + +Step 1.4: Modify the config.json File +""""""""""""""""""""""""""""""""""""" + +1.4.1. Open the config.json file and check the parameters. The first parameters to specify in the config file are **input_path** and **output_path**. For this example dataset, the `slcStack.h5` and `geometryRadar.h5` files are in the `inputs/` directory, which is the default value in the config file. Therefore, you do not need to change it. The **output_path** should be `outputs/` for this example. + +.. code-block:: json + + { + "general": { + "input_path": "inputs/", + "output_path": "outputs/" + } + // other parameters + } + +1.4.2. Specify the **num_cores**. You can check the number of cores on your computer using the following commands. + +In Linux, run: + +.. code-block:: bash + + nproc --all + +In MacOS, run: + +.. code-block:: bash + + sysctl -n hw.ncpu + +It is a good practice to specify a number lower than the number of available cores in the config file. + +.. code-block:: json + + { + // other parameters + "general": { + "num_cores": 5, + // other parameters + }, + //other parameters + } + + + +Step 2: Running SARvey +"""""""""""""""""""""" + +SARvey consists of five steps as detailed in :ref:`processing`. You can run all steps by specifying starting step `0` and ending step `4`. In this tutorial, however, we will run the steps separately as follows. + +When running `sarvey`, if it finishes normally, you will see a message like the following in the command line: + +.. code-block:: none + + 2024-06-19 11:05:10,305 - INFO - MTI finished normally. + +.. note:: + If you encounter an error, first read all the prompts in the console and carefully track all error and warning messages. If the issue is not clear from the console messages, check the log files stored in the directory specified in the config file. If the error persists and you need assistance, sharing the corresponding log file will help. + + +Step 2.0: Run Step 0 of SARvey: Preparation +''''''''''''''''''''''''''''''''''''''''''' + +The first step creates an interferogram network and calculates the temporal coherence for all pixels. Run the following command: + +.. code-block:: bash + + sarvey -f config.json 0 0 + +In the command line, you will see a list of parameters used by SARvey to run step 0. All parameters that have been changed from the default are indicated: + +.. code-block:: none + + ... + 2024-06-19 11:04:28,137 - INFO - Parameter value default + 2024-06-19 11:04:28,137 - INFO - _________ _____ _______ + 2024-06-19 11:04:28,138 - INFO - num_cores 5 <--- 50 + 2024-06-19 11:04:28,138 - INFO - num_patches 1 1 + 2024-06-19 11:04:28,138 - INFO - apply_temporal_unwrapping True True + 2024-06-19 11:04:28,138 - INFO - spatial_unwrapping_method puma puma + 2024-06-19 11:04:28,138 - INFO - + 2024-06-19 11:04:28,138 - INFO - --------------------------------------------------------------------------------- + 2024-06-19 11:04:28,138 - INFO - STEP 0: PREPARATION + 2024-06-19 11:04:28,138 - INFO - --------------------------------------------------------------------------------- + 2024-06-19 11:04:28,138 - INFO - Parameter value default + 2024-06-19 11:04:28,139 - INFO - _________ _____ _______ + 2024-06-19 11:04:28,139 - INFO - start_date None None + 2024-06-19 11:04:28,139 - INFO - end_date None None + 2024-06-19 11:04:28,139 - INFO - ifg_network_type sb <--- delaunay + 2024-06-19 11:04:28,139 - INFO - num_ifgs 3 3 + 2024-06-19 11:04:28,139 - INFO - max_tbase 100 100 + 2024-06-19 11:04:28,139 - INFO - filter_window_size 9 9 + ... + +After running this step, a `sbas` directory is created. Inside this directory, you can find the following files: + +.. code-block:: none + + outputs/ + ├── temporal_coherence.h5 + ├── ifg_stack.h5 + ├── ifg_network.h5 + ├── coordinates_utm.h5 + ├── config.json + ├── background_map.h5 + └── pic/ + ├── step_0_temporal_phase_coherence.png + ├── step_0_interferogram_network.png + └── step_0_amplitude_image.png + + +Check the PNG files inside the `outputs/pic` directory and ensure the amplitude image, interferogram network, and temporal coherence look fine. If you are not satisfied with the interferogram network, you can modify the corresponding parameters in the `config.json` file and run step 0 again. + +Use the following command to plot the interferograms: + +.. code-block:: bash + + sarvey_plot outputs/ifg_stack.h5 -i + +This command creates the interferograms as PNG files in the following directory: + +.. code-block:: none + + outputs/ + └── pic/ + └── ifgs/ + ├── 0_ifg.png + ├── 1_ifg.png + └── ... + +Check the interferograms one by one and ensure they look reasonable. In various interferograms, there are fringes associated with deformation approximately at ranges 100-200, azimuth 40-60. + + +Step 2.1: Run Step 1 of SARvey +'''''''''''''''''''''''''''''' + +.. code-block:: bash + + sarvey -f config.json 1 1 + +Outputs of this step are: + +.. code-block:: none + + outputs/ + ├── point_network.h5 + ├── p1_ifg_wr.h5 + ├── point_network_parameter.h5 + └── pic/ + ├── selected_pixels_temp_coh_0.8.png + ├── step_1_mask_p1.png + ├── step_1_arc_coherence.png + ├── step_1_arc_coherence_reduced.png + ├── step_1_rmse_vel_0th_iter.png + └── step_1_rmse_dem_error_0th_iter.png + + +Step 2.2: Run Step 2 of SARvey +'''''''''''''''''''''''''''''' + +.. code-block:: bash + + sarvey -f config.json 2 2 + + +Outputs of this step are: + +.. code-block:: none + + outputs/ + ├── p1_ifg_unw.h5 + ├── p1_ts.h5 + └── pic/ + ├── step_2_estimation_dem_error.png + └── step_2_estimation_velocity.png + +Step 2.3: Run Step 3 of SARvey +'''''''''''''''''''''''''''''' + +.. code-block:: bash + + sarvey -f config.json 3 3 + + +Outputs of this step are: + +.. code-block:: none + + outputs/ + ├── p2_coh80_ifg_wr.h5 + ├── p2_coh80_aps.h5 + ├── p1_aps.h5 + ├── p1_ts_filt.h5 + └── pic/ + ├── step_3_temporal_autocorrelation.png + ├── step_3_stable_points.png + ├── selected_pixels_temp_coh_0.8.png + └── step_3_mask_p2_coh80.png + + +Step 2.4: Run Step 4 of SARvey +'''''''''''''''''''''''''''''' + +.. code-block:: bash + + sarvey -f config.json 4 4 + +.. outputs directory structure to be added + + +The results of step 4 of SARvey, including the time series, are stored in the `p2_coh80_ts.h5` file. The file is named based on the `coherence_p2` parameter in the config.json file. + + +Step 3: Plot Time Series Results +"""""""""""""""""""""""""""""""" + +Check the instruction on how to use the `sarvey_plot`. + +.. code-block:: bash + + sarvey_plot -h + + +Plot the time series using the following command. Flag `-t` indicates that you want to plot the time series. + +.. code-block:: bash + + sarvey_plot outputs/p2_coh80_ts.h5 -t + + +You can visualize velocity and DEM error estimation of second-order points. You can also visualize amplitude, DEM, or temporal coherence as the background. Right-click on any point to see its time series. As you will see in the plot, the density of measurement points on the dam is relatively low. In the next section, you will learn how to modify the config file to increase the density of points. + + +Step 4: Modify Config File and Rerun SARvey +""""""""""""""""""""""""""""""""""""""""""" + +Modify the config.json file and change **coherence_p2** from 0.8 to 0.7. + +Run steps 3 and 4 using the following command: + +.. code-block:: bash + + sarvey -f config.json 3 4 + + +A new file `p2_coh70_ts.h5` is created. You can now visualize this file that has a higher point density. + +.. code-block:: bash + + sarvey_plot outputs/p2_coh70_ts.h5 -t + + +.. note:: + Be cautious that reducing the value of **coherence_p2** too much may include noisy points of low quality in the analysis, potentially leading to poor final results. + + You should carefully read the :ref:`processing` documentation to understand the meaning of each parameter and carefully choose reasonable values. You should also check the details of all parameters using the -p flag in `sarvey` and decide how to tune them. + +.. code-block:: bash + + sarvey -f config.json 0 0 -p + + +Step 5: Export to GIS Format +"""""""""""""""""""""""""""" + +Export the data to Shapefiles using the following command: + +.. code-block:: bash + + sarvey_export outputs/p2_coh70_ts.h5 -o outputs/shp/p2_coh70_ts.shp + +You can open the exported data in any GIS software. If you use QGIS, you can use the `PS Time Series Viewer `_ plugin to draw the time series. + + +Step 6: Validate Your Results +""""""""""""""""""""""""""""" + +You can download a copy of the final SARvey products from `this link `_. Use these files to compare your results and ensure everything worked correctly. + diff --git a/doc/_sources/demo/demo_masjed_dam_fast_track.rst.txt b/doc/_sources/demo/demo_masjed_dam_fast_track.rst.txt new file mode 100644 index 0000000..7565eb0 --- /dev/null +++ b/doc/_sources/demo/demo_masjed_dam_fast_track.rst.txt @@ -0,0 +1,92 @@ +.. _demo_masjed_dam_fast_track: + +Fast Track Guide for Masjed Soleyman Dam +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +If you are an advanced user, you can proceed with this fast track tutorial. If you prefer a more detailed, step-by-step guide, please refer to the :ref:`detailed guide ` for this example. + +.. note:: + + These instructions are based on SARvey version 1.0.0 (Strawberry Pie). Newer versions may differ slightly. + + +Download the Data +""""""""""""""""" + +In this tutorial, a processed stack of data is provided. If you wish to generate data for other areas, please refer to the :ref:`preparation` section. + +.. code-block:: bash + + wget https://zenodo.org/records/12189041/files/SARvey_input_data_Masjed_Soleyman_dam_S1_dsc_2015_2018.zip + unzip SARvey_input_data_Masjed_Soleyman_dam_S1_dsc_2015_2018.zip + cd SARvey_input_data_Masjed_Soleyman_dam_S1_dsc_2015_2018 + + +Activate SARvey environment +""""""""""""""""""""""""""" + +.. code-block:: bash + + conda activate sarvey + + +Create a Config File +"""""""""""""""""""" + +.. code-block:: bash + + sarvey -f config.json 0 0 -g + +Specify parameters in the config file. Set a reasonable value for **num_cores**. + +Run **SARvey** +"""""""""""""" + +You can run each step individually or a range of steps by specifying the first and last step. + +.. code-block:: bash + + sarvey -f config.json 0 4 + +Check Outputs +""""""""""""" + +First, check the output snapshots in the `outputs/pics` directory. You can also use **`sarvey_plot`** to plot various products to assess the quality of the results and decide how to adjust parameters. Modify the parameters in the config file and rerun the corresponding steps of `sarvey` to improve the results. For instance, changing **`coherence_p2`** from 0.8 to 0.7 and rerunning steps 3 and 4 can increase the density of the final set of points. However, be cautious that reducing the value too much may include noisy points of low quality in the analysis, potentially leading to poor final results. You can check the details of all parameters using the -p flag in `sarvey` and decide how to tune them. For more explanations, please refer to :ref:`processing` + + + +Plot Time Series Results +"""""""""""""""""""""""" + +The final products, including the time series, are stored in the coh\*\*_ts.h5 file. The file is named based on the coherence_p2 parameter you used. Plot the time series using the following command: + +.. code-block:: bash + + sarvey_plot outputs/p2_coh80_ts.h5 -t + +You can visualize velocity and DEM error estimation of second-order points. You can also visualize amplitude, DEM, or temporal coherence as the background. Right-click on any point to see its time series. + +.. description of time series options to be added. + + + + +Export to GIS Format +"""""""""""""""""""" + +Export the data to Shapefiles using the following command. + + +.. code-block:: bash + + sarvey_export outputs/p2_coh80_ts.h5 -o outputs/shp/p2_coh80_ts.shp + +You can visualize the data in any GIS software. If you use QGIS, you can use the `PS Time Series Viewer `_ plugin to draw the time series. + + + +Validate Your Results +""""""""""""""""""""" + +You can download a copy of the final SARvey products from `this link `_. Use these files to compare your results and ensure everything worked correctly. + diff --git a/doc/_sources/demo_datasets.rst.txt b/doc/_sources/demo_datasets.rst.txt new file mode 100644 index 0000000..4affd08 --- /dev/null +++ b/doc/_sources/demo_datasets.rst.txt @@ -0,0 +1,17 @@ +.. _example_datasets: + +============= +Demo Datasets +============= + +Several demo datasets are available to help you learn how to perform SARvey processing effectively. + +.. note:: + The demo datasets and instructions provided serve as a practical guide for using SARvey. They do not cover all the software details or offer the best processing strategies for every specific dataset. + +.. toctree:: + :maxdepth: 1 + :caption: Demo Datasets: + + demo/demo_masjed_dam.rst + diff --git a/doc/_sources/history.rst.txt b/doc/_sources/history.rst.txt new file mode 100644 index 0000000..2506499 --- /dev/null +++ b/doc/_sources/history.rst.txt @@ -0,0 +1 @@ +.. include:: ../HISTORY.rst diff --git a/doc/_sources/index.rst.txt b/doc/_sources/index.rst.txt new file mode 100644 index 0000000..afb0dbc --- /dev/null +++ b/doc/_sources/index.rst.txt @@ -0,0 +1,25 @@ +==================== +SARvey documentation +==================== + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + readme + Source code repository + installation + usage + preparation + processing + demo_datasets + modules + contributing + authors + history + +Indices and tables +================== +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/doc/_sources/installation.rst.txt b/doc/_sources/installation.rst.txt new file mode 100644 index 0000000..3c575b5 --- /dev/null +++ b/doc/_sources/installation.rst.txt @@ -0,0 +1,186 @@ +.. _installation: + +============ +Installation +============ + +SARvey is a cross-platform python-based software and can be installed on + * `Linux`_ + * `MacOS ARM (Apple Silicon M2)`_ + * `Windows using WSL`_ + + +Linux +----- + +On Linux, SARvey can be installed `Using Mamba (recommended)`_ or `Using Anaconda or Miniconda`_. + +Using Mamba (recommended) +^^^^^^^^^^^^^^^^^^^^^^^^^ + +Using mamba_ (latest version recommended), **SARvey** is installed as follows: + + +1. Clone the SARvey source code and install SARvey and all dependencies from the environment_sarvey.yml file: + + .. code-block:: bash + + git clone git@gitlab.projekt.uni-hannover.de:ipi-sar4infra/timeseries.git + cd timeseries + + +2. Create virtual environment for **SARvey** (optional but recommended): + + .. code-block:: bash + + pip install conda-merge + wget https://raw.githubusercontent.com/insarlab/MiaplPy/main/conda-env.yml + conda-merge conda-env.yml tests/CI_docker/context/environment_sarvey.yml > env.yml + mamba env create -n sarvey -f env.yml + rm env.yml conda-env.yml + mamba activate sarvey + pip install git+https://github.com/insarlab/MiaplPy.git + pip install . + + +This is the preferred method to install **SARvey**, as it always installs the most recent stable release and +automatically resolves all the dependencies. + + +Using Anaconda or Miniconda +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Using conda_ (latest version recommended), **SARvey** is installed as follows: + + +1. Then clone the **SARvey** source code and install **SARvey** and all dependencies from the environment_sarvey.yml file: + + .. code-block:: bash + + git clone git@gitlab.projekt.uni-hannover.de:ipi-sar4infra/timeseries.git + cd timeseries + + +1. Create virtual environment for **SARvey** (optional but recommended): + + .. code-block:: bash + + pip install conda-merge + wget https://raw.githubusercontent.com/insarlab/MiaplPy/main/conda-env.yml + conda-merge conda-env.yml tests/CI_docker/context/environment_sarvey.yml > env.yml + conda env create -n sarvey -f env.yml + rm env.yml conda-env.yml + conda activate sarvey + pip install git+https://github.com/insarlab/MiaplPy.git + pip install . + + +MacOS ARM (Apple Silicon M2) +---------------------------- + +This guide provides instructions for installing SARvey on MacOS ARM M2 using conda_. +If you do not have Conda, install `Conda for Mac`_. +Using conda_ (latest version recommended), SARvey is installed as follows: + +0. **Create a directory for the SARvey package and navigate to it in the terminal. You can choose any other directory if you prefer.** + + .. code-block:: bash + + mkdir -p ~/software/sarvey + +1. **Install MiaplPy before installing SARvey in the same environment where you want to install SARvey.** + + .. code-block:: bash + + cd ~/software/sarvey + git clone https://github.com/insarlab/MiaplPy.git + cd MiaplPy + + 1.1 Open `conda-env.yml` in an editor of your choice and comment out the line `isce2`. Alternatively, you can run the following command:. + + .. code-block:: bash + + sed -i '' '/isce2/s/^/# /' conda-env.yml + + 1.2 Install the package using Conda. + + .. code-block:: bash + + conda env update --name sarvey --file conda-env.yml + conda activate sarvey + python -m pip install . + +2. **Install SARvey** + + 2.1 Download the source code of the SARvey package. + + .. code-block:: bash + + cd ~/software/sarvey + git clone git@gitlab.projekt.uni-hannover.de:ipi-sar4infra/timeseries.git + cd timeseries + + 2.2 Open `tests/CI_docker/context/environment_sarvey.yml` in an editor of your choice and comment out the lines `isce2` and `gcc_linux-64`. Alternatively, you can run the following commands. + + .. code-block:: bash + + sed -i '' '/isce2/s/^/# /' tests/CI_docker/context/environment_sarvey.yml + sed -i '' '/gcc_linux-64/s/^/# /' tests/CI_docker/context/environment_sarvey.yml + + Note: As of the time of creation of this document, `isce2` for MacOS ARM64 is not available in Conda repositories. Therefore, it is skipped, but it should not cause any problems for running SARvey. Also, `gcc_linux-64` is not required on ARM64. + + 2.3 Install Timeseries using the same environment that you used to install MiaplPy. + + .. code-block:: bash + + conda env update --name sarvey -f tests/CI_docker/context/environment_sarvey.yml + conda activate sarvey + pip install . + +3. **Set up the PATH for MiaplPy and SARvey.** + + 3.1 Run the following commands to set up the path in `~/source_sarvey.sh`. + + .. code-block:: bash + + echo 'export miaplpy_path=~/software/sarvey/MiaplPy/src/' > ~/source_sarvey.sh + echo 'export PYTHONPATH=${PYTHONPATH:+$PYTHONPATH:}$miaplpy_path' >> ~/source_sarvey.sh + echo 'export timeseries_path=~/software/sarvey/timeseries' >> ~/source_sarvey.sh + echo 'export PATH=${PATH}:$timeseries_path:$timeseries_path/sarvey' >> ~/source_sarvey.sh + echo 'export PYTHONPATH=${PYTHONPATH:+$PYTHONPATH:}:$timeseries_path' >> ~/source_sarvey.sh + +4. **Test the installation** + + 4.1. Open a new terminal and activate the software. + + .. code-block:: bash + + conda activate sarvey + source ~/source_sarvey.sh + + 4.2. Run the following commands. If the help messages of SARvey and MiaplPy are shown, the installation is correctly done. + + .. code-block:: bash + + sarvey -h + + +Windows using WSL +----------------- + +On Windows, SARvey is tested on Windows Subsystem for Linux (WSL_) version 2. Please follow the `Linux`_ installation. + + + +.. note:: + + Timeseries has been tested with Python 3.6+., i.e., should be fully compatible to all Python versions from 3.6 onwards. + + +.. _pip: https://pip.pypa.io +.. _Python installation guide: http://docs.python-guide.org/en/latest/starting/installation/ +.. _conda: https://conda.io/docs +.. _mamba: https://github.com/mamba-org/mamba +.. _Conda for Mac: https://docs.conda.io/projects/conda/en/latest/user-guide/install/macos.html +.. _WSL: https://learn.microsoft.com/en-us/windows/wsl/ + diff --git a/doc/_sources/modules.rst.txt b/doc/_sources/modules.rst.txt new file mode 100644 index 0000000..7257096 --- /dev/null +++ b/doc/_sources/modules.rst.txt @@ -0,0 +1,7 @@ +Python API reference +==================== + +.. toctree:: + :maxdepth: 4 + + sarvey diff --git a/doc/_sources/preparation.rst.txt b/doc/_sources/preparation.rst.txt new file mode 100644 index 0000000..141a38d --- /dev/null +++ b/doc/_sources/preparation.rst.txt @@ -0,0 +1,188 @@ +.. _preparation: + +=========== +Preparation +=========== + +SARvey requires a coregistered stack of SLC and the related geometry information in the MiaplPy_ data format. +The coregistered stack of SLC can be created using an InSAR processor, such as ISCE, GAMMA, or SNAP. +Currently MiaplPy only supports ISCE_. Support for GAMMA and SNAP_ is planned for future. +After creating the coregistered stack of SLC, run the “load_data” step from MiaplPy to create the “inputs” directory which contains “slcStack.h5” and “geometryRadar.h5”. + + + +Preprocessing +------------- + +ISCE +^^^^ +... ISCE brief processing to be added + +The ISCE products should have the following directory structure that is later in `Loading Data into MiaplPy`_ step. + +:: + + ISCE_processed_data + ├─ reference + │ ├─ IW*.xml + │ └─ ... + ├─ merged + │ ├─ SLC + │ │ ├─ YYYYMMDD + │ │ │ ├─ YYYYMMDD.slc.full + │ │ │ └─ ... + │ │ ├─ YYYYMMDD + │ │ ├─ YYYYMMDD + │ ├─ geom_reference + │ │ ├─ hgt.rdr.full + │ │ ├─ lat.rdr.full + │ │ ├─ lon.rdr.full + │ │ ├─ los.rdr.full + │ │ └─ ... + └─ baselines + └─ YYYYMMDD_YYYYMMDD + └─ YYYYMMDD_YYYYMMDD.txt + + +GAMMA +^^^^^ +Support is in progress. + + +SNAP +^^^^ +Support is planned for future. + + +Loading Data to MiaplPy Format +------------------------------ + +**Loading Data into MiaplPy** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Run the `load_data` step of MiaplPy to convert the preprocessed stack of SLC to `slcStack.h5` and `geometryRadar.h5`. +Refer to MiaplPy_ instruction on how to prepare the stack of coregistered SLC and modify the template file. + +.. code-block:: bash + + miaplpyApp miaplpy_template_file.txt --dostep load_data + +The output includes the following directory structure that is later used as input in SARvey processing: + +:: + + inputs + ├── slcStack.h5 + └── geometryRadar.h5 + + + +**Check the data** +^^^^^^^^^^^^^^^^^^ + +Use `info.py` from MintPy_ to check the files' information. + +.. code-block:: bash + + info.py inputs/slcStack.h5 + info.py inputs/geometryRadar.h5 + + +Use `view.py` from MintPy_ to visualize the files and make sure they look fine. + +.. code-block:: bash + + view.py inputs/slcStack.h5 + view.py inputs/geometryRadar.h5 + + + +Optional Steps +-------------- + + +**Phase Linking** +^^^^^^^^^^^^^^^^^ + + +This step is optional. You can run it if you wish to perform distributed scatterers (DS) analysis. +**Caution:** This step is computationally heavy and might be time-consuming for large datasets. + +.. code-block:: bash + + miaplpyApp miaplpy_template_file.txt --dostep phase_linking + miaplpyApp miaplpy_template_file.txt --dostep concatenate_patches + +The output includes the following directory structure that is later used as additional input in SARvey processing if the config file is modified to inclued DS analysis. + +:: + + MiaplPy working directory + ├─ inverted + │ ├── phase_series.h5 + │ ├── ... + ├── maskPS.h5 + └── ... + + + +Subset Data +^^^^^^^^^^^ + +Data loaded into MiaplPy can be subset using Mintpy_'s subset function. +This is particularly useful if you have a dataset in MiaplPy format and want to crop a small area of it. +Both slcStack.h5 and geometryRadar.h5 should be subset with the same range and azimuth coordinate ranges. +Also the Phase Linking results (phase_series.h5 and maskPS.h5) should be subset if it has been created. +Please refer to Mintpy_ for more instruction to subset. +Run `subset.py -h` for information about parameters. +The following example crops the data between 500 and 800 in range and 100 and 1000 in azimuth coordinates. + + +.. code-block:: bash + + subset.py -h + + subset.py inputs/slcStack.h5 -x 500 800 -y 100 1000 -o inputs_crop/slcStack.h5 + subset.py inputs/geometryRadar.h5 -x 500 800 -y 100 1000 -o inputs_crop/geometryRadar.h5 + + subset.py inverted/phase_series.h5 -x 500 800 -y 100 1000 -o inverted_crop/phase_series.h5 + subset.py maskPS.h5 -x 500 800 -y 100 1000 -o inverted_crop/maskPS.h5 + + +`Check the data`_ after subsetting it and make sure all products look correct. + + + +Create Manual Mask +^^^^^^^^^^^^^^^^^^ +A mask can be created manually using MintPy's `generate_mask.py` tool. +This is particularly useful if you want to limit the MTInSAR processing to certain areas. +Run `generate_mask.py -h` for information about parameters. +The following example allows to draw a polygon on top of the DEM to create a mask. + +.. code-block:: bash + + generate_mask.py -h + + generate_mask.py inputs/geometryRadar.h5 height -o mask.h5 --roipoly # draw polygon on top of the DEM + +Alternatively, a mask can be drawn on top of the temporal coherence map, in case step 0 (preparation) of `sarvey` has been executed already. + +.. code-block:: bash + + generate_mask.py results_dir/temporal_coherence.h5 -o mask.h5 --roipoly # draw polygon on top of the temporal coherence image + +Follow the instructions in the terminal: + + Select points in the figure by enclosing them within a polygon. + Press the 'esc' key to start a new polygon. + Try hold to left key to move a single vertex. + After complete the selection, close the figure/window to continue. + + + +.. _MiaplPy: https://github.com/insarlab/MiaplPy +.. _ISCE: https://github.com/isce-framework/isce2 +.. _SNAP: https://step.esa.int/main/toolboxes/snap +.. _MintPy: https://github.com/insarlab/MintPy + diff --git a/doc/_sources/processing.rst.txt b/doc/_sources/processing.rst.txt new file mode 100644 index 0000000..128a826 --- /dev/null +++ b/doc/_sources/processing.rst.txt @@ -0,0 +1,344 @@ +.. _processing: + +======================================= +Multitemporal InSAR processing workflow +======================================= + +The `sarvey` command line interface executes the multitemporal InSAR processing workflow. +The workflow is described in the paper + + Piter, A., Haghshenas Haghighi, M., Motagh, M.(2024). An in-depth study on Sentinel-1 InSAR for transport infrastructure monitoring. PFG - Journal of Photogrammetry, Remote Sensing and Geoinformation Science. (paper currently under review). + +All processing steps are described in detail in the following sections. +Two processing strategies are provided with either one- or two-step unwrapping. +The workflow should be decided based on the characteristics of the displacement (spatial extend, magnitude, temporal behaviour). +The parameters of each step are handled via the `configuration file`_ for which the parameters are named within the description of each step. + + +Configuration file +------------------ +The configuration file is a JSON file containing all the parameters required to run `sarvey`. +This file can be generated using the `sarvey` command with the **"-g"** flag, where you can specify your desired filename. + + +.. code-block:: bash + + sarvey -f config.json 0 0 -g + +Note: The above command only generates a configuration file. Although step 0 is specified, it will not be executed. + +The configuration file has various sections, as detailed below: + + +* General + + + This section includes top-level parameters such as the number of cores and the unwrapping method. + It specifies the paths to the input and output data. The paths can be either absolute or relative. + Further, it defines the logging level displayed in the command line and the directory path where log files will be stored. + + + +* phase_linking + + + This section specifies the Phase Linking parameters. By default, `"use_phase_linking_results": false`. + If you wish to perform DS analysis, change it to `true`. Note: If `"use_phase_linking_results": true`, you must complete the corresponding step of MiaplPy as described in `preparation `_. In the configuration file, set `inverted_path` to the path of the inverted directory of MiaplPy data. + + + +* preparation + + + This section includes network parameters, such as the start and end dates, network type, and `filter_window_size`, which specifies the window size used to estimate the temporal coherence for each pixel. + + +* consistency_check + + + This section contains parameters related to the first order points. + +* unwrapping + + + This section will specify parameters related to the unwrapping process. + +* filtering + + + This section defines the parameters for atmospheric estimation and filtering. Atmospheric filtering is enabled by default. To skip it, set `"apply_aps_filtering": false`. + + +* densification + + + This section includes the settings for second order points. + + + + + +Processing steps for two-step unwrapping workflow +------------------------------------------------- + +Step 0: Preparation +^^^^^^^^^^^^^^^^^^^ + +- Loading the resampled SLC data: + The resampled SLC (Single Look Complex) data is read from the inputs/slcStack.h5 + This data is complex-valued and contains both amplitude and phase information. + The data is subsetted to the specified time span (via **preparation:start_date** and **preparation:end_date** in the config file). + A description of how to prepare the data and make a spatial subset of the data is described in `data preparation in MiaplPy `_. + +- Designing the interferogram network: + From the stack of SLC images, the interferogram network is designed. + The network of interferograms is designed based on the temporal and perpendicular baselines of the SLC images. + Different networks can be created (via **preparation:ifg_network_type** in the config file) and should be chosen based on the characteristics of the displacement (spatial extend, magnitude, temporal behaviour). + Currently five types of networks are supported: + + a) small baseline network ('sb') (Berardino et al. 2002), + b) small temporal baseline network ('stb') (only consecutive images are used to form interferograms) + c) small temporal baselines + yearly interferograms ('stb_yearly') + d) delaunay network ('delaunay') + e) star network ('star', single-reference network) (Ferretti et al. 2001) + + +- Generating a stack of interferograms: + The stack of interferograms is generated based on the specified interferogram network. + +- Estimating the temporal coherence: + The phase noise of each pixel is approximated by the estimation of the temporal phase coherence (Zhao and Mallorqui 2019). + Thereby, a low-pass filter with a certain window size is used (**preparation:filter_window_size**). + The temporal coherence is used to select the first- and second-order points in the later steps (**consistency_check:coherence_p1** and **filtering:coherence_p2**). + +- Output of this step + - background_map.h5 + - ifg_stack.h5 + - coordinates_utm.h5 + - ifg_network.h5 + - temporal_coherence.h5 + + +Step 1: Consistency Check +^^^^^^^^^^^^^^^^^^^^^^^^^ + + +- Selecting candidates for first order points: + Candidates for the first-order points are selected based on the temporal coherence threshold (**consistency_check:coherence_p1**). + However, not all points with a coherence above the threshold are selected, but only those which have the highest coherence within a grid cell of size **consistency_check:grid_size** (in [m]). + A mask file can be specified (**consistency_check:mask_p1_file**) to limit the first-order points to the given area of interest. + +- Creating a spatial network: + After selecting the candidates for first order points, the method creates a spatial network to connect the first-order points. + For each arc in the network, the double difference phase time series is calculated. + A delaunay network ensures the connectivity in the spatial network and k-nearest neighbors (**consistency_check:num_nearest_neighbours**) can be used to increase the redundancy in the network. + Arcs with a distance above a threshold (**consistency_check:max_arc_length**) are removed from the network to reduce the impact of the atmospheric effects. + +- Temporal unwrapping: + All arcs in the spatial network are temporally unwrapped based on a phase model consisting of DEM error difference and velocity difference between the two points of the arc. + The temporal coherence derived from the model fit is maximized by searching within a search space of given bounds (**consistency_check:velocity_bound** and **consistency_check:dem_error_bound**). + Within the bounds, the search space is discretized (**consistency_check:num_optimization_samples**). + The final parameters for each arc are derived from a gradient descent refinement of the discrete search space result. + +- Performing a consistency check on the data: + During the atmospheric filtering in step 3, only high quality first-order points are supposed to be used. + Therefore, outliers among the candidates are removed with a consistency check. + The consistency check is based on the estimated temporal coherence of the temporal unwrapping of each arc. + A point is assumed to be an outlier, if it is connected by many arcs having a low temporal coherence from temporal unwrapping. + Arcs with a temporal coherence below a threshold are removed (**consistency_check:arc_unwrapping_coherence**). + Similarly, points with mean coherence of all connected arcs are removed (specified by the same parameter **consistency_check:arc_unwrapping_coherence**). + Moreover, points which are connected by a number of arcs less than a threshold (**consistency_check:min_num_arc**) are removed. + Afterwards, the consistency within the spatial network is checked. + For this purpose, the parameters (DEM error difference and velocity difference) of all arcs are integrated in the spatial network relative to an arbitrary reference point using least squares. + The residuals of the integration are used to identify outliers. + +- Output of this step + - point_network.h5 + - point_network_parameter.h5 + - p1_ifg_wr.h5 + +Step 2: Unwrapping +^^^^^^^^^^^^^^^^^^ + +Two unwrapping options (**general:apply_temporal_unwrapping**, also applies to step 4) are implemented and should be chosen based on the characteristics of the displacement (spatial extend, magnitude, temporal behaviour). + +- Output of this step + - p1_ifg_unw.h5 + - p1_ifg_ts.h5 + +Option 1) Unwrapping in time and space +"""""""""""""""""""""""""""""""""""""" + +- Integrating parameters from arcs to points: + The temporal unwrapping results of the spatial network from consistency check in step 1 are used in this step. + The parameters of the arcs are integrated relative to an arbitrary reference point from the arcs to the points using least squares. + +- Removing phase contributions (mean velocity and DEM error): + After integrating the parameters, the phase contributions are removed from the wrapped interferometric phase of the first-order points. + +- Spatial unwrapping of the residuals: + The residuals in each interferogram are unwrapped in space using a sparse point network unwrapping method (**general:spatial_unwrapping_method**) (Bioucas-Dias and Valadao 2007, Boykov and Kolmogorov 2004). + The spatial neighbourhood for unwrapping is defined by the arcs of the spatial network. + There are two options (**unwrapping:use_arcs_from_temporal_unwrapping**). + Either the spatial network from consistency check (step 2) can be used for unwrapping, i.e. the spatial network after removing arcs with a low temporal coherence from temporal unwrapping. + Or, the spatial network is re-created with a delaunay network. + +- Restore phase contributions to the spatially unwrapped residual phase: + Finally, the phase contributions are added back to the spatially unwrapped residual phase of each point. + +- Adjust reference: + All restored unwrapped interferograms are referenced to the peak of velocity histogram derived from all points. + +- Inverting the interferogram network: + The interferogram network is inverted for each point to retrieve the displacement time series relative to the first acquisition. + +Option 2) Unwrapping in space +""""""""""""""""""""""""""""" + +- Spatial unwrapping: + The interferograms are unwrapped independently in space with a sparse point network unwrapping method (**general:spatial_unwrapping_method**) (Bioucas-Dias and Valadao 2007, Boykov and Kolmogorov 2004). + The spatial neighbourhood for unwrapping is defined by the arcs of the spatial network. + There are two options (**unwrapping:use_arcs_from_temporal_unwrapping**). + Either the spatial network from consistency check (step 2) can be used for unwrapping, i.e. the spatial network after removing arcs with a low temporal coherence from temporal unwrapping. + Or, the spatial network is re-created with a delaunay network. + +- Adjust reference: + All unwrapped interferograms are referenced to the peak of velocity histogram derived from all points. + +- Inverting the interferogram network: + The interferogram network is inverted for each point to retrieve the displacement time series relative to the first acquisition. + +Step 3: Filtering +^^^^^^^^^^^^^^^^^ + +In this step, the atmospheric phase screen (APS) is estimated from the displacement time series of the first-order points. +Afterwards, the APS is interpolated to the location of the second-order points. +The filtering can be skipped by setting **filtering:apply_aps_filtering** to True. +However, the step 3 has to be executed as the second-order points are selected during this step. + +- Selecting pixels with no or linear displacement: + Among the first-order points, the points with no or merely linear displacement are selected (**filtering:use_moving_points**). + It is assumed that for these points, the phase consists only of atmospheric effect and noise after removing the mean velocity and DEM error. + Points with a non-linear displacement behaviour are removed by a threshold on the temporal autocorrelation of the displacement time series (**filtering:max_temporal_autocorrelation**) (Crosetto et al. 2018). + A regular grid (**filtering:grid_size** in [m]) is applied to select the first-order points with the lowest temporal autocorrelation to reduce the computational complexity during filtering. + +- Selecting second-order points: + Second-order points are selected based on a temporal coherence threshold (**filtering:coherence_p2**) on the temporal phase coherence computed during step 0. + A mask file can be specified (**filtering:mask_p2_file**) to limit the second-order points to the given area of interest. + Second-order points can also be selected based on the results of phase-linking (set **phase_linking:use_phase_linking_results** to True) implemented in MiaplPy (Mirzaee et al. 2023). + More information on Miaplpy and phase-linking can be found `here `_. + The number of siblings (**phase_linking:num_siblings**) used during phase-linking within MiaplPy processing needs to be specified to identify the distributed scatterers (DS) among the pixels selected by MiaplPy. + A mask file can be specified (**phase_linking:mask_phase_linking_file**) to limit the phase-linking to the given area of interest. + MiaplPy also provides a selection of persistent scatterers (PS) which can be included as second-order points (set **phase_linking:use_ps** to True) and also specify the path to the maskPS.h5 (**phase_linking:mask_ps_file**) which is also an output of MiaplPy. + In case the second-order points are selected among the results from MiaplPy, the filtered interferometric phase (MiaplPy result) is used for the respective points. + The DS pixels from MiaplPy and the pixels selected with the temporal phase coherence from step 0 are both selected with the same coherence threshold (**filtering:coherence_p2**). + +- Estimating the atmospheric phase screen (APS): + The estimation of the APS takes place in time-domain and not interferogram-domain to reduce the computational time. + The phase contributions are removed from the first-order points which were selected for atmospheric filtering. + Their residual time series contains atmospheric phase contributions and noise. + As the APS is assumed to be spatially correlated, the residuals of all points are spatially filtered (**filtering:interpolation_method**) independently for each time step. + After filtering, the estimated APS is interpolated to the location of the second-order points. + +- Output of this step + - p1_ts_filt.h5 + - p1_aps.h5 + - p2_cohXX_aps.h5 + - p2_cohXX_ifg_wr.h5 + +The placeholder XX depends on the threshold for the temporal coherence used for selecting the second-order points. +For example, a threshold of 0.8 would result in p2_coh80_aps.h5 and p2_coh80_ifg_wr.h5. + +Step 4: Densification +^^^^^^^^^^^^^^^^^^^^^ + +Two unwrapping options (**general:apply_temporal_unwrapping**, also applies to step 2) are implemented and should be chosen based on the characteristics of the displacement (spatial extend, magnitude, temporal behaviour). + +- Output of this step + - p2_cohXX_ifg_unw.h5 + - p2_cohXX_ts.h5 + +The placeholder XX depends on the threshold for the temporal coherence used for selecting the second-order points during filtering in step 3. +For example, a threshold of 0.8 would result in p2_coh80_ifg_unw.h5 and p2_coh80_ts.h5. + +Option 1: Unwrapping in time and space +"""""""""""""""""""""""""""""""""""""" + +- Removing APS from interferograms + The wrapped interferograms are corrected for the interpolated APS for both the first and second order points. + +- Densify network: + The parameters (DEM error and velocity) of each second-order point are estimated independently from the other second-order points. + The parameters are estimated by temporal unwrapping with respect to the closest first-order points (**densification:num_connections_to_p1**, **densification:max_distance_to_p1**) with a phase model consisting of DEM error and velocity (**densification:velocity_bound** and **densification:dem_error_bound**, **densification:num_optimization_samples**). + The densification is similar to the approach described by Van Leijen (2014), but jointly maximizes the temporal coherence to find the parameters that fit best to all arcs connecting the second-order point to the first-order points. + +- Remove outliers: + Second-order points which could not be temporally unwrapped with respect to the closest first-order points are removed. + For this purpose, a threshold on the joint temporal coherence considering the residuals of all arcs connecting the respective second-order point to the closest first-order points is applied (**densification:arc_unwrapping_coherence**). + First-order points receive a joint temporal coherence value of 1.0 to avoid them being removed from the final set of points. + +- Removing phase contributions (mean velocity and DEM error): + After estimating the parameters of the second-order points, the phase contributions are removed from the wrapped interferometric phase of the first-order points. + +- Spatial unwrapping of the residuals: + The residuals in each interferogram are unwrapped in space using a sparse point network unwrapping method (**general:spatial_unwrapping_method**) (Bioucas-Dias and Valadao 2007, Boykov and Kolmogorov 2004). + The spatial neighbourhood for unwrapping is defined by spatial network including both first- and second-order points. + It is created with a delaunay network. + +- Restore phase contributions to the spatially unwrapped residual phase: + Finally, the phase contributions are added back to the spatially unwrapped residual phase of each point. + +- Adjust reference: + All restored unwrapped interferograms are referenced to the peak of velocity histogram derived from all points. + +- Inverting the interferogram network: + The interferogram network is inverted for each point to retrieve the displacement time series relative to the first acquisition. + +Option 2: Unwrapping in space +""""""""""""""""""""""""""""" + +- Removing APS from interferograms + The wrapped interferograms are corrected for the interpolated APS for both the first and second order points. + +Afterwards, the processing is the same as in the spatial unwrapping during step 2. + + +Handling big datasets +--------------------- +The processing of large datasets can be computationally expensive and time-consuming. +Especially the estimation of the temporal phase coherence in step 0 is a bottleneck, also in terms of memory consumption. +Therefore, it is recommended to set **general:num_cores** for parallel processing. +By setting **general:num_patches** the data is split into spatial patches and processed subsequently to fit into memory. + + +Processing steps for one-step unwrapping workflow +------------------------------------------------- +The one-step unwrapping workflow is an alternative to the two-step unwrapping workflow. +The steps are similar to the workflow described above, but is only executed until step 2. +This workflow is meant for processing small areas where the atmospheric filtering is not required as the reference point will be selected close to the area of interest. +The idea behind the one-step unwrapping workflow is to apply the consistency check based on the temporal unwrapping (step 1) to all pixels, without differentiating between first and second order points. +This can yield better unwrapping results compared to the two-step unwrapping in case DEM error and/or velocity highly vary in space. +For this purpose, the pixels are selected without gridding (set **preparation:grid_size** to Zero, i.e. all pixels above the specified coherence threshold are selected as final points. +Since the densification step is not performed, you should reduce the coherence threshold (**consistency_check:coherence_p1**) to select the desired number of points. + + +Literature +---------- + +* Piter, A., Haghshenas Haghighi, M., Motagh, M.(2024). An in-depth study on Sentinel-1 InSAR for transport infrastructure monitoring. PFG - Journal of Photogrammetry, Remote Sensing and Geoinformation Science. (paper currently under review). + +* Zhao F, Mallorqui JJ (2019). A Temporal Phase Coherence Estimation Algorithm and Its Application on DInSAR Pixel Selection. IEEE Transactions on Geoscience and Remote Sensing 57(11):8350–8361, DOI 10.1109/TGRS.2019.2920536 + +* Ferretti A, Prati C, Rocca F (2001). Permanent scatterers in SAR interferometry. IEEE Transactions on Geoscience and Remote Sensing 39(1):8–20 + +* Berardino P, Fornaro G, Lanari R, Sansosti E (2002). A new algorithm for surface deformation monitoring based on small baseline differential SAR interferograms. IEEE Transactions on Geoscience and Remote Sensing 40(11):2375–2383 + +* Bioucas-Dias JM, Valadao G (2007). Phase Unwrapping via Graph Cuts. IEEE Transactions on Image Processing 16(3):698–709, DOI 10.1109/TIP.2006.888351 + +* Mirzaee S, Amelung F, Fattahi H (2023). Non-linear phase linking using joined distributed and persistent scatterers. Computers & Geosciences 171:105291, DOI 10.1016/j.cageo.2022.105291 + +* Crosetto M, Devanthéry N, Monserrat O, Barra A, Cuevas-González M, Mróz M, Botey-Bassols J, Vázquez-Suné E, Crippa B (2018). A persistent scatterer interferometry procedure based on stable areas to filter the atmospheric component. Remote Sensing 10(11):1780 + +* Van Leijen FJ (2014). Persistent scatterer interferometry based on geodetic estimation theory. PhD thesis + +* Boykov Y, Kolmogorov V (2004) An experimental comparison of min-cut/max- flow algorithms for energy minimization in vision. IEEE Transactions on Pattern Analysis and Machine Intelligence 26(9):1124–1137, DOI 10.1109/TPAMI.2004.60 diff --git a/doc/_sources/readme.rst.txt b/doc/_sources/readme.rst.txt new file mode 100644 index 0000000..72a3355 --- /dev/null +++ b/doc/_sources/readme.rst.txt @@ -0,0 +1 @@ +.. include:: ../README.rst diff --git a/doc/_sources/sarvey.rst.txt b/doc/_sources/sarvey.rst.txt new file mode 100644 index 0000000..2c45c78 --- /dev/null +++ b/doc/_sources/sarvey.rst.txt @@ -0,0 +1,203 @@ +sarvey package +============== + +Submodules +---------- + +sarvey.coherence module +----------------------- + +.. automodule:: sarvey.coherence + :members: + :undoc-members: + :show-inheritance: + :private-members: + +sarvey.config module +-------------------- + +.. automodule:: sarvey.config + :members: + :undoc-members: + :show-inheritance: + :private-members: + +sarvey.console module +--------------------- + +.. automodule:: sarvey.console + :members: + :undoc-members: + :show-inheritance: + :private-members: + +sarvey.densification module +--------------------------- + +.. automodule:: sarvey.densification + :members: + :undoc-members: + :show-inheritance: + :private-members: + +sarvey.filtering module +----------------------- + +.. automodule:: sarvey.filtering + :members: + :undoc-members: + :show-inheritance: + :private-members: + +sarvey.geolocation module +------------------------- + +.. automodule:: sarvey.geolocation + :members: + :undoc-members: + :show-inheritance: + :private-members: + +sarvey.ifg\_network module +-------------------------- + +.. automodule:: sarvey.ifg_network + :members: + :undoc-members: + :show-inheritance: + :private-members: + +sarvey.objects module +--------------------- + +.. automodule:: sarvey.objects + :members: + :undoc-members: + :show-inheritance: + :private-members: + +sarvey.osm\_utils module +------------------------ + +.. automodule:: sarvey.osm_utils + :members: + :undoc-members: + :show-inheritance: + :private-members: + +sarvey.preparation module +------------------------- + +.. automodule:: sarvey.preparation + :members: + :undoc-members: + :show-inheritance: + :private-members: + +sarvey.processing module +------------------------ + +.. automodule:: sarvey.processing + :members: + :undoc-members: + :show-inheritance: + :private-members: + +sarvey.sarvey\_export module +---------------------------- + +.. automodule:: sarvey.sarvey_export + :members: + :undoc-members: + :show-inheritance: + :private-members: + +sarvey.sarvey\_mask module +-------------------------- + +.. automodule:: sarvey.sarvey_mask + :members: + :undoc-members: + :show-inheritance: + :private-members: + +sarvey.sarvey\_mti module +------------------------- + +.. automodule:: sarvey.sarvey_mti + :members: + :undoc-members: + :show-inheritance: + :private-members: + +sarvey.sarvey\_osm module +------------------------- + +.. automodule:: sarvey.sarvey_osm + :members: + :undoc-members: + :show-inheritance: + :private-members: + +sarvey.sarvey\_plot module +-------------------------- + +.. automodule:: sarvey.sarvey_plot + :members: + :undoc-members: + :show-inheritance: + :private-members: + +sarvey.triangulation module +--------------------------- + +.. automodule:: sarvey.triangulation + :members: + :undoc-members: + :show-inheritance: + :private-members: + +sarvey.unwrapping module +------------------------ + +.. automodule:: sarvey.unwrapping + :members: + :undoc-members: + :show-inheritance: + :private-members: + +sarvey.utils module +------------------- + +.. automodule:: sarvey.utils + :members: + :undoc-members: + :show-inheritance: + :private-members: + +sarvey.version module +--------------------- + +.. automodule:: sarvey.version + :members: + :undoc-members: + :show-inheritance: + :private-members: + +sarvey.viewer module +-------------------- + +.. automodule:: sarvey.viewer + :members: + :undoc-members: + :show-inheritance: + :private-members: + +Module contents +--------------- + +.. automodule:: sarvey + :members: + :undoc-members: + :show-inheritance: + :private-members: diff --git a/doc/_sources/usage.rst.txt b/doc/_sources/usage.rst.txt new file mode 100644 index 0000000..af5ec35 --- /dev/null +++ b/doc/_sources/usage.rst.txt @@ -0,0 +1,75 @@ +.. _usage: + +===== +Usage +===== + +.. image:: https://seafile.projekt.uni-hannover.de/f/39209355cabc4607bf0a/?dl=1 + :alt: SARvey workflow + :width: 600px + :align: center + +Processing workflow for using the SARvey software to derive displacement time series. The minimal required processing +steps and datasets are depicted in grey. All other steps are optional. + + + +Command-line tools +------------------ + +The following command-line tools are available and can be run directly in the terminal. + +`sarvey` + A tool to derive displacements from the SLC stack with Multi-Temporal InSAR (MTI). + A detailed description of the processing steps is given `here `_. + +`sarvey_plot` + A tool to plot the results from `sarvey` processing. + +`sarvey_export` + A tool to export the results from `sarvey` processing to shapefile or geopackage. + +`sarvey_mask` + A tool to create a mask from shapefile containing the area of interest, which can be used in `sarvey` processing. + The tool reads from an input file, which is a shapefile or geopackage containing the geographic data. + It supports both 'LineString' and 'Polygon' geometries. + The tool first gets the spatial extent of the geographic data and searches the location of the polygon/line nodes in the image coordinates of the radar image. + A buffer around the polygon/line is created specified by a width in pixel. + The buffer is then used to create the mask. + + Here is an example of how to use the `sarvey_mask` tool: + + .. code-block:: bash + + sarvey_mask --input_file my_shapefile.shp --geom_file ./inputs/geometryRadar.h5 --out_file_name my_mask.h5 --width 5 + + + +`sarvey_osm` + A tool to download OpenStreetMap data for the area of interest specified by the spatial extend of the SLC stack. + The tool first gets the spatial extent of the SAR image from the geometry file. + It then uses this spatial extent to download the OpenStreetMap data for the corresponding area. + The download of railway tracks, highways and bridges is supported. + After downloading the data, the tool saves it to a shapefile. + + After downloading the OpenStreetMap data with `sarvey_osm`, you can use the `sarvey_mask` tool to create a mask from the shapefile. + + Here is an example of how to use the `sarvey_osm` tool: + + .. code-block:: bash + + sarvey_osm --geom ./geometryRadar.h5 --railway # download railway + sarvey_osm --geom ./geometryRadar.h5 --highway # download highway + sarvey_osm --geom ./geometryRadar.h5 --railway --bridge # download railway bridge + sarvey_osm --geom ./geometryRadar.h5 --railway -o mask_railway.shp # specify output path + + +Usage of the Python API +----------------------- + +To use SARvey in a project: + + .. code-block:: python + + import sarvey + diff --git a/doc/_static/basic.css b/doc/_static/basic.css new file mode 100644 index 0000000..f316efc --- /dev/null +++ b/doc/_static/basic.css @@ -0,0 +1,925 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +div.section::after { + display: block; + content: ''; + clear: left; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 230px; + margin-left: -100%; + font-size: 90%; + word-wrap: break-word; + overflow-wrap : break-word; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox form.search { + overflow: hidden; +} + +div.sphinxsidebar #searchbox input[type="text"] { + float: left; + width: 80%; + padding: 0.25em; + box-sizing: border-box; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + float: left; + width: 20%; + border-left: none; + padding: 0.25em; + box-sizing: border-box; +} + + +img { + border: 0; + max-width: 100%; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li p.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; + margin-left: auto; + margin-right: auto; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable ul { + margin-top: 0; + margin-bottom: 0; + list-style-type: none; +} + +table.indextable > tbody > tr > td > ul { + padding-left: 0em; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- domain module index --------------------------------------------------- */ + +table.modindextable td { + padding: 2px; + border-collapse: collapse; +} + +/* -- general body styles --------------------------------------------------- */ + +div.body { + min-width: 360px; + max-width: 800px; +} + +div.body p, div.body dd, div.body li, div.body blockquote { + -moz-hyphens: auto; + -ms-hyphens: auto; + -webkit-hyphens: auto; + hyphens: auto; +} + +a.headerlink { + visibility: hidden; +} + +a:visited { + color: #551A8B; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink, +caption:hover > a.headerlink, +p.caption:hover > a.headerlink, +div.code-block-caption:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, figure.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, figure.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, figure.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +img.align-default, figure.align-default, .figure.align-default { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-default { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar, +aside.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px; + background-color: #ffe; + width: 40%; + float: right; + clear: right; + overflow-x: auto; +} + +p.sidebar-title { + font-weight: bold; +} + +nav.contents, +aside.topic, +div.admonition, div.topic, blockquote { + clear: left; +} + +/* -- topics ---------------------------------------------------------------- */ + +nav.contents, +aside.topic, +div.topic { + border: 1px solid #ccc; + padding: 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- content of sidebars/topics/admonitions -------------------------------- */ + +div.sidebar > :last-child, +aside.sidebar > :last-child, +nav.contents > :last-child, +aside.topic > :last-child, +div.topic > :last-child, +div.admonition > :last-child { + margin-bottom: 0; +} + +div.sidebar::after, +aside.sidebar::after, +nav.contents::after, +aside.topic::after, +div.topic::after, +div.admonition::after, +blockquote::after { + display: block; + content: ''; + clear: both; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + margin-top: 10px; + margin-bottom: 10px; + border: 0; + border-collapse: collapse; +} + +table.align-center { + margin-left: auto; + margin-right: auto; +} + +table.align-default { + margin-left: auto; + margin-right: auto; +} + +table caption span.caption-number { + font-style: italic; +} + +table caption span.caption-text { +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +th > :first-child, +td > :first-child { + margin-top: 0px; +} + +th > :last-child, +td > :last-child { + margin-bottom: 0px; +} + +/* -- figures --------------------------------------------------------------- */ + +div.figure, figure { + margin: 0.5em; + padding: 0.5em; +} + +div.figure p.caption, figcaption { + padding: 0.3em; +} + +div.figure p.caption span.caption-number, +figcaption span.caption-number { + font-style: italic; +} + +div.figure p.caption span.caption-text, +figcaption span.caption-text { +} + +/* -- field list styles ----------------------------------------------------- */ + +table.field-list td, table.field-list th { + border: 0 !important; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +/* -- hlist styles ---------------------------------------------------------- */ + +table.hlist { + margin: 1em 0; +} + +table.hlist td { + vertical-align: top; +} + +/* -- object description styles --------------------------------------------- */ + +.sig { + font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; +} + +.sig-name, code.descname { + background-color: transparent; + font-weight: bold; +} + +.sig-name { + font-size: 1.1em; +} + +code.descname { + font-size: 1.2em; +} + +.sig-prename, code.descclassname { + background-color: transparent; +} + +.optional { + font-size: 1.3em; +} + +.sig-paren { + font-size: larger; +} + +.sig-param.n { + font-style: italic; +} + +/* C++ specific styling */ + +.sig-inline.c-texpr, +.sig-inline.cpp-texpr { + font-family: unset; +} + +.sig.c .k, .sig.c .kt, +.sig.cpp .k, .sig.cpp .kt { + color: #0033B3; +} + +.sig.c .m, +.sig.cpp .m { + color: #1750EB; +} + +.sig.c .s, .sig.c .sc, +.sig.cpp .s, .sig.cpp .sc { + color: #067D17; +} + + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +:not(li) > ol > li:first-child > :first-child, +:not(li) > ul > li:first-child > :first-child { + margin-top: 0px; +} + +:not(li) > ol > li:last-child > :last-child, +:not(li) > ul > li:last-child > :last-child { + margin-bottom: 0px; +} + +ol.simple ol p, +ol.simple ul p, +ul.simple ol p, +ul.simple ul p { + margin-top: 0; +} + +ol.simple > li:not(:first-child) > p, +ul.simple > li:not(:first-child) > p { + margin-top: 0; +} + +ol.simple p, +ul.simple p { + margin-bottom: 0; +} + +aside.footnote > span, +div.citation > span { + float: left; +} +aside.footnote > span:last-of-type, +div.citation > span:last-of-type { + padding-right: 0.5em; +} +aside.footnote > p { + margin-left: 2em; +} +div.citation > p { + margin-left: 4em; +} +aside.footnote > p:last-of-type, +div.citation > p:last-of-type { + margin-bottom: 0em; +} +aside.footnote > p:last-of-type:after, +div.citation > p:last-of-type:after { + content: ""; + clear: both; +} + +dl.field-list { + display: grid; + grid-template-columns: fit-content(30%) auto; +} + +dl.field-list > dt { + font-weight: bold; + word-break: break-word; + padding-left: 0.5em; + padding-right: 5px; +} + +dl.field-list > dd { + padding-left: 0.5em; + margin-top: 0em; + margin-left: 0em; + margin-bottom: 0em; +} + +dl { + margin-bottom: 15px; +} + +dd > :first-child { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +.sig dd { + margin-top: 0px; + margin-bottom: 0px; +} + +.sig dl { + margin-top: 0px; + margin-bottom: 0px; +} + +dl > dd:last-child, +dl > dd:last-child > :last-child { + margin-bottom: 0; +} + +dt:target, span.highlighted { + background-color: #fbe54e; +} + +rect.highlighted { + fill: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +.classifier:before { + font-style: normal; + margin: 0 0.5em; + content: ":"; + display: inline-block; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +.translated { + background-color: rgba(207, 255, 207, 0.2) +} + +.untranslated { + background-color: rgba(255, 207, 207, 0.2) +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +pre, div[class*="highlight-"] { + clear: both; +} + +span.pre { + -moz-hyphens: none; + -ms-hyphens: none; + -webkit-hyphens: none; + hyphens: none; + white-space: nowrap; +} + +div[class*="highlight-"] { + margin: 1em 0; +} + +td.linenos pre { + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + display: block; +} + +table.highlighttable tbody { + display: block; +} + +table.highlighttable tr { + display: flex; +} + +table.highlighttable td { + margin: 0; + padding: 0; +} + +table.highlighttable td.linenos { + padding-right: 0.5em; +} + +table.highlighttable td.code { + flex: 1; + overflow: hidden; +} + +.highlight .hll { + display: block; +} + +div.highlight pre, +table.highlighttable pre { + margin: 0; +} + +div.code-block-caption + div { + margin-top: 0; +} + +div.code-block-caption { + margin-top: 1em; + padding: 2px 5px; + font-size: small; +} + +div.code-block-caption code { + background-color: transparent; +} + +table.highlighttable td.linenos, +span.linenos, +div.highlight span.gp { /* gp: Generic.Prompt */ + user-select: none; + -webkit-user-select: text; /* Safari fallback only */ + -webkit-user-select: none; /* Chrome/Safari */ + -moz-user-select: none; /* Firefox */ + -ms-user-select: none; /* IE10+ */ +} + +div.code-block-caption span.caption-number { + padding: 0.1em 0.3em; + font-style: italic; +} + +div.code-block-caption span.caption-text { +} + +div.literal-block-wrapper { + margin: 1em 0; +} + +code.xref, a code { + background-color: transparent; + font-weight: bold; +} + +h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +span.eqno a.headerlink { + position: absolute; + z-index: 1; +} + +div.math:hover a.headerlink { + visibility: visible; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff --git a/doc/_static/classic.css b/doc/_static/classic.css new file mode 100644 index 0000000..5530147 --- /dev/null +++ b/doc/_static/classic.css @@ -0,0 +1,269 @@ +/* + * classic.css_t + * ~~~~~~~~~~~~~ + * + * Sphinx stylesheet -- classic theme. + * + * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +@import url("basic.css"); + +/* -- page layout ----------------------------------------------------------- */ + +html { + /* CSS hack for macOS's scrollbar (see #1125) */ + background-color: #FFFFFF; +} + +body { + font-family: sans-serif; + font-size: 100%; + background-color: #11303d; + color: #000; + margin: 0; + padding: 0; +} + +div.document { + display: flex; + background-color: #1c4e63; +} + +div.documentwrapper { + float: left; + width: 100%; +} + +div.bodywrapper { + margin: 0 0 0 230px; +} + +div.body { + background-color: #ffffff; + color: #000000; + padding: 0 20px 30px 20px; +} + +div.footer { + color: #ffffff; + width: 100%; + padding: 9px 0 9px 0; + text-align: center; + font-size: 75%; +} + +div.footer a { + color: #ffffff; + text-decoration: underline; +} + +div.related { + background-color: #133f52; + line-height: 30px; + color: #ffffff; +} + +div.related a { + color: #ffffff; +} + +div.sphinxsidebar { +} + +div.sphinxsidebar h3 { + font-family: 'Trebuchet MS', sans-serif; + color: #ffffff; + font-size: 1.4em; + font-weight: normal; + margin: 0; + padding: 0; +} + +div.sphinxsidebar h3 a { + color: #ffffff; +} + +div.sphinxsidebar h4 { + font-family: 'Trebuchet MS', sans-serif; + color: #ffffff; + font-size: 1.3em; + font-weight: normal; + margin: 5px 0 0 0; + padding: 0; +} + +div.sphinxsidebar p { + color: #ffffff; +} + +div.sphinxsidebar p.topless { + margin: 5px 10px 10px 10px; +} + +div.sphinxsidebar ul { + margin: 10px; + padding: 0; + color: #ffffff; +} + +div.sphinxsidebar a { + color: #98dbcc; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + + + +/* -- hyperlink styles ------------------------------------------------------ */ + +a { + color: #355f7c; + text-decoration: none; +} + +a:visited { + color: #551a8b; + text-decoration: none; +} + +a:hover { + text-decoration: underline; +} + + + +/* -- body styles ----------------------------------------------------------- */ + +div.body h1, +div.body h2, +div.body h3, +div.body h4, +div.body h5, +div.body h6 { + font-family: 'Trebuchet MS', sans-serif; + background-color: #f2f2f2; + font-weight: normal; + color: #20435c; + border-bottom: 1px solid #ccc; + margin: 20px -20px 10px -20px; + padding: 3px 0 3px 10px; +} + +div.body h1 { margin-top: 0; font-size: 200%; } +div.body h2 { font-size: 160%; } +div.body h3 { font-size: 140%; } +div.body h4 { font-size: 120%; } +div.body h5 { font-size: 110%; } +div.body h6 { font-size: 100%; } + +a.headerlink { + color: #c60f0f; + font-size: 0.8em; + padding: 0 4px 0 4px; + text-decoration: none; +} + +a.headerlink:hover { + background-color: #c60f0f; + color: white; +} + +div.body p, div.body dd, div.body li, div.body blockquote { + text-align: justify; + line-height: 130%; +} + +div.admonition p.admonition-title + p { + display: inline; +} + +div.admonition p { + margin-bottom: 5px; +} + +div.admonition pre { + margin-bottom: 5px; +} + +div.admonition ul, div.admonition ol { + margin-bottom: 5px; +} + +div.note { + background-color: #eee; + border: 1px solid #ccc; +} + +div.seealso { + background-color: #ffc; + border: 1px solid #ff6; +} + +nav.contents, +aside.topic, +div.topic { + background-color: #eee; +} + +div.warning { + background-color: #ffe4e4; + border: 1px solid #f66; +} + +p.admonition-title { + display: inline; +} + +p.admonition-title:after { + content: ":"; +} + +pre { + padding: 5px; + background-color: unset; + color: unset; + line-height: 120%; + border: 1px solid #ac9; + border-left: none; + border-right: none; +} + +code { + background-color: #ecf0f3; + padding: 0 1px 0 1px; + font-size: 0.95em; +} + +th, dl.field-list > dt { + background-color: #ede; +} + +.warning code { + background: #efc2c2; +} + +.note code { + background: #d6d6d6; +} + +.viewcode-back { + font-family: sans-serif; +} + +div.viewcode-block:target { + background-color: #f4debf; + border-top: 1px solid #ac9; + border-bottom: 1px solid #ac9; +} + +div.code-block-caption { + color: #efefef; + background-color: #1c4e63; +} \ No newline at end of file diff --git a/doc/_static/custom.css b/doc/_static/custom.css new file mode 100644 index 0000000..1e71926 --- /dev/null +++ b/doc/_static/custom.css @@ -0,0 +1,3 @@ +.wy-nav-content { +max-width: 1200px !important; +} diff --git a/doc/_static/doctools.js b/doc/_static/doctools.js new file mode 100644 index 0000000..4d67807 --- /dev/null +++ b/doc/_static/doctools.js @@ -0,0 +1,156 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Base JavaScript utilities for all Sphinx HTML documentation. + * + * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([ + "TEXTAREA", + "INPUT", + "SELECT", + "BUTTON", +]); + +const _ready = (callback) => { + if (document.readyState !== "loading") { + callback(); + } else { + document.addEventListener("DOMContentLoaded", callback); + } +}; + +/** + * Small JavaScript module for the documentation. + */ +const Documentation = { + init: () => { + Documentation.initDomainIndexTable(); + Documentation.initOnKeyListeners(); + }, + + /** + * i18n support + */ + TRANSLATIONS: {}, + PLURAL_EXPR: (n) => (n === 1 ? 0 : 1), + LOCALE: "unknown", + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext: (string) => { + const translated = Documentation.TRANSLATIONS[string]; + switch (typeof translated) { + case "undefined": + return string; // no translation + case "string": + return translated; // translation exists + default: + return translated[0]; // (singular, plural) translation tuple exists + } + }, + + ngettext: (singular, plural, n) => { + const translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated !== "undefined") + return translated[Documentation.PLURAL_EXPR(n)]; + return n === 1 ? singular : plural; + }, + + addTranslations: (catalog) => { + Object.assign(Documentation.TRANSLATIONS, catalog.messages); + Documentation.PLURAL_EXPR = new Function( + "n", + `return (${catalog.plural_expr})` + ); + Documentation.LOCALE = catalog.locale; + }, + + /** + * helper function to focus on search bar + */ + focusSearchBar: () => { + document.querySelectorAll("input[name=q]")[0]?.focus(); + }, + + /** + * Initialise the domain index toggle buttons + */ + initDomainIndexTable: () => { + const toggler = (el) => { + const idNumber = el.id.substr(7); + const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`); + if (el.src.substr(-9) === "minus.png") { + el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`; + toggledRows.forEach((el) => (el.style.display = "none")); + } else { + el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`; + toggledRows.forEach((el) => (el.style.display = "")); + } + }; + + const togglerElements = document.querySelectorAll("img.toggler"); + togglerElements.forEach((el) => + el.addEventListener("click", (event) => toggler(event.currentTarget)) + ); + togglerElements.forEach((el) => (el.style.display = "")); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); + }, + + initOnKeyListeners: () => { + // only install a listener if it is really needed + if ( + !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && + !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS + ) + return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.altKey || event.ctrlKey || event.metaKey) return; + + if (!event.shiftKey) { + switch (event.key) { + case "ArrowLeft": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const prevLink = document.querySelector('link[rel="prev"]'); + if (prevLink && prevLink.href) { + window.location.href = prevLink.href; + event.preventDefault(); + } + break; + case "ArrowRight": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const nextLink = document.querySelector('link[rel="next"]'); + if (nextLink && nextLink.href) { + window.location.href = nextLink.href; + event.preventDefault(); + } + break; + } + } + + // some keyboard layouts may need Shift to get / + switch (event.key) { + case "/": + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; + Documentation.focusSearchBar(); + event.preventDefault(); + } + }); + }, +}; + +// quick alias for translations +const _ = Documentation.gettext; + +_ready(Documentation.init); diff --git a/doc/_static/documentation_options.js b/doc/_static/documentation_options.js new file mode 100644 index 0000000..89435bb --- /dev/null +++ b/doc/_static/documentation_options.js @@ -0,0 +1,13 @@ +const DOCUMENTATION_OPTIONS = { + VERSION: '1.0.0', + LANGUAGE: 'en', + COLLAPSE_INDEX: false, + BUILDER: 'html', + FILE_SUFFIX: '.html', + LINK_SUFFIX: '.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt', + NAVIGATION_WITH_KEYS: false, + SHOW_SEARCH_SUMMARY: true, + ENABLE_SEARCH_SHORTCUTS: true, +}; \ No newline at end of file diff --git a/doc/_static/file.png b/doc/_static/file.png new file mode 100644 index 0000000..a858a41 Binary files /dev/null and b/doc/_static/file.png differ diff --git a/doc/_static/language_data.js b/doc/_static/language_data.js new file mode 100644 index 0000000..367b8ed --- /dev/null +++ b/doc/_static/language_data.js @@ -0,0 +1,199 @@ +/* + * language_data.js + * ~~~~~~~~~~~~~~~~ + * + * This script contains the language-specific data used by searchtools.js, + * namely the list of stopwords, stemmer, scorer and splitter. + * + * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +var stopwords = ["a", "and", "are", "as", "at", "be", "but", "by", "for", "if", "in", "into", "is", "it", "near", "no", "not", "of", "on", "or", "such", "that", "the", "their", "then", "there", "these", "they", "this", "to", "was", "will", "with"]; + + +/* Non-minified version is copied as a separate JS file, if available */ + +/** + * Porter Stemmer + */ +var Stemmer = function() { + + var step2list = { + ational: 'ate', + tional: 'tion', + enci: 'ence', + anci: 'ance', + izer: 'ize', + bli: 'ble', + alli: 'al', + entli: 'ent', + eli: 'e', + ousli: 'ous', + ization: 'ize', + ation: 'ate', + ator: 'ate', + alism: 'al', + iveness: 'ive', + fulness: 'ful', + ousness: 'ous', + aliti: 'al', + iviti: 'ive', + biliti: 'ble', + logi: 'log' + }; + + var step3list = { + icate: 'ic', + ative: '', + alize: 'al', + iciti: 'ic', + ical: 'ic', + ful: '', + ness: '' + }; + + var c = "[^aeiou]"; // consonant + var v = "[aeiouy]"; // vowel + var C = c + "[^aeiouy]*"; // consonant sequence + var V = v + "[aeiou]*"; // vowel sequence + + var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0 + var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 + var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 + var s_v = "^(" + C + ")?" + v; // vowel in stem + + this.stemWord = function (w) { + var stem; + var suffix; + var firstch; + var origword = w; + + if (w.length < 3) + return w; + + var re; + var re2; + var re3; + var re4; + + firstch = w.substr(0,1); + if (firstch == "y") + w = firstch.toUpperCase() + w.substr(1); + + // Step 1a + re = /^(.+?)(ss|i)es$/; + re2 = /^(.+?)([^s])s$/; + + if (re.test(w)) + w = w.replace(re,"$1$2"); + else if (re2.test(w)) + w = w.replace(re2,"$1$2"); + + // Step 1b + re = /^(.+?)eed$/; + re2 = /^(.+?)(ed|ing)$/; + if (re.test(w)) { + var fp = re.exec(w); + re = new RegExp(mgr0); + if (re.test(fp[1])) { + re = /.$/; + w = w.replace(re,""); + } + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1]; + re2 = new RegExp(s_v); + if (re2.test(stem)) { + w = stem; + re2 = /(at|bl|iz)$/; + re3 = new RegExp("([^aeiouylsz])\\1$"); + re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re2.test(w)) + w = w + "e"; + else if (re3.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + else if (re4.test(w)) + w = w + "e"; + } + } + + // Step 1c + re = /^(.+?)y$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(s_v); + if (re.test(stem)) + w = stem + "i"; + } + + // Step 2 + re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step2list[suffix]; + } + + // Step 3 + re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step3list[suffix]; + } + + // Step 4 + re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; + re2 = /^(.+?)(s|t)(ion)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + if (re.test(stem)) + w = stem; + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1] + fp[2]; + re2 = new RegExp(mgr1); + if (re2.test(stem)) + w = stem; + } + + // Step 5 + re = /^(.+?)e$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + re2 = new RegExp(meq1); + re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) + w = stem; + } + re = /ll$/; + re2 = new RegExp(mgr1); + if (re.test(w) && re2.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + + // and turn initial Y back to y + if (firstch == "y") + w = firstch.toLowerCase() + w.substr(1); + return w; + } +} + diff --git a/doc/_static/minus.png b/doc/_static/minus.png new file mode 100644 index 0000000..d96755f Binary files /dev/null and b/doc/_static/minus.png differ diff --git a/doc/_static/plus.png b/doc/_static/plus.png new file mode 100644 index 0000000..7107cec Binary files /dev/null and b/doc/_static/plus.png differ diff --git a/doc/_static/pygments.css b/doc/_static/pygments.css new file mode 100644 index 0000000..0d49244 --- /dev/null +++ b/doc/_static/pygments.css @@ -0,0 +1,75 @@ +pre { line-height: 125%; } +td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +.highlight .hll { background-color: #ffffcc } +.highlight { background: #eeffcc; } +.highlight .c { color: #408090; font-style: italic } /* Comment */ +.highlight .err { border: 1px solid #FF0000 } /* Error */ +.highlight .k { color: #007020; font-weight: bold } /* Keyword */ +.highlight .o { color: #666666 } /* Operator */ +.highlight .ch { color: #408090; font-style: italic } /* Comment.Hashbang */ +.highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */ +.highlight .cp { color: #007020 } /* Comment.Preproc */ +.highlight .cpf { color: #408090; font-style: italic } /* Comment.PreprocFile */ +.highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */ +.highlight .cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */ +.highlight .gd { color: #A00000 } /* Generic.Deleted */ +.highlight .ge { font-style: italic } /* Generic.Emph */ +.highlight .ges { font-weight: bold; font-style: italic } /* Generic.EmphStrong */ +.highlight .gr { color: #FF0000 } /* Generic.Error */ +.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ +.highlight .gi { color: #00A000 } /* Generic.Inserted */ +.highlight .go { color: #333333 } /* Generic.Output */ +.highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */ +.highlight .gs { font-weight: bold } /* Generic.Strong */ +.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ +.highlight .gt { color: #0044DD } /* Generic.Traceback */ +.highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */ +.highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */ +.highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */ +.highlight .kp { color: #007020 } /* Keyword.Pseudo */ +.highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */ +.highlight .kt { color: #902000 } /* Keyword.Type */ +.highlight .m { color: #208050 } /* Literal.Number */ +.highlight .s { color: #4070a0 } /* Literal.String */ +.highlight .na { color: #4070a0 } /* Name.Attribute */ +.highlight .nb { color: #007020 } /* Name.Builtin */ +.highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */ +.highlight .no { color: #60add5 } /* Name.Constant */ +.highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */ +.highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */ +.highlight .ne { color: #007020 } /* Name.Exception */ +.highlight .nf { color: #06287e } /* Name.Function */ +.highlight .nl { color: #002070; font-weight: bold } /* Name.Label */ +.highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */ +.highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */ +.highlight .nv { color: #bb60d5 } /* Name.Variable */ +.highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */ +.highlight .w { color: #bbbbbb } /* Text.Whitespace */ +.highlight .mb { color: #208050 } /* Literal.Number.Bin */ +.highlight .mf { color: #208050 } /* Literal.Number.Float */ +.highlight .mh { color: #208050 } /* Literal.Number.Hex */ +.highlight .mi { color: #208050 } /* Literal.Number.Integer */ +.highlight .mo { color: #208050 } /* Literal.Number.Oct */ +.highlight .sa { color: #4070a0 } /* Literal.String.Affix */ +.highlight .sb { color: #4070a0 } /* Literal.String.Backtick */ +.highlight .sc { color: #4070a0 } /* Literal.String.Char */ +.highlight .dl { color: #4070a0 } /* Literal.String.Delimiter */ +.highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */ +.highlight .s2 { color: #4070a0 } /* Literal.String.Double */ +.highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */ +.highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */ +.highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */ +.highlight .sx { color: #c65d09 } /* Literal.String.Other */ +.highlight .sr { color: #235388 } /* Literal.String.Regex */ +.highlight .s1 { color: #4070a0 } /* Literal.String.Single */ +.highlight .ss { color: #517918 } /* Literal.String.Symbol */ +.highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */ +.highlight .fm { color: #06287e } /* Name.Function.Magic */ +.highlight .vc { color: #bb60d5 } /* Name.Variable.Class */ +.highlight .vg { color: #bb60d5 } /* Name.Variable.Global */ +.highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */ +.highlight .vm { color: #bb60d5 } /* Name.Variable.Magic */ +.highlight .il { color: #208050 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff --git a/doc/_static/searchtools.js b/doc/_static/searchtools.js new file mode 100644 index 0000000..b08d58c --- /dev/null +++ b/doc/_static/searchtools.js @@ -0,0 +1,620 @@ +/* + * searchtools.js + * ~~~~~~~~~~~~~~~~ + * + * Sphinx JavaScript utilities for the full-text search. + * + * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +/** + * Simple result scoring code. + */ +if (typeof Scorer === "undefined") { + var Scorer = { + // Implement the following function to further tweak the score for each result + // The function takes a result array [docname, title, anchor, descr, score, filename] + // and returns the new score. + /* + score: result => { + const [docname, title, anchor, descr, score, filename] = result + return score + }, + */ + + // query matches the full name of an object + objNameMatch: 11, + // or matches in the last dotted part of the object name + objPartialMatch: 6, + // Additive scores depending on the priority of the object + objPrio: { + 0: 15, // used to be importantResults + 1: 5, // used to be objectResults + 2: -5, // used to be unimportantResults + }, + // Used when the priority is not in the mapping. + objPrioDefault: 0, + + // query found in title + title: 15, + partialTitle: 7, + // query found in terms + term: 5, + partialTerm: 2, + }; +} + +const _removeChildren = (element) => { + while (element && element.lastChild) element.removeChild(element.lastChild); +}; + +/** + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping + */ +const _escapeRegExp = (string) => + string.replace(/[.*+\-?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string + +const _displayItem = (item, searchTerms, highlightTerms) => { + const docBuilder = DOCUMENTATION_OPTIONS.BUILDER; + const docFileSuffix = DOCUMENTATION_OPTIONS.FILE_SUFFIX; + const docLinkSuffix = DOCUMENTATION_OPTIONS.LINK_SUFFIX; + const showSearchSummary = DOCUMENTATION_OPTIONS.SHOW_SEARCH_SUMMARY; + const contentRoot = document.documentElement.dataset.content_root; + + const [docName, title, anchor, descr, score, _filename] = item; + + let listItem = document.createElement("li"); + let requestUrl; + let linkUrl; + if (docBuilder === "dirhtml") { + // dirhtml builder + let dirname = docName + "/"; + if (dirname.match(/\/index\/$/)) + dirname = dirname.substring(0, dirname.length - 6); + else if (dirname === "index/") dirname = ""; + requestUrl = contentRoot + dirname; + linkUrl = requestUrl; + } else { + // normal html builders + requestUrl = contentRoot + docName + docFileSuffix; + linkUrl = docName + docLinkSuffix; + } + let linkEl = listItem.appendChild(document.createElement("a")); + linkEl.href = linkUrl + anchor; + linkEl.dataset.score = score; + linkEl.innerHTML = title; + if (descr) { + listItem.appendChild(document.createElement("span")).innerHTML = + " (" + descr + ")"; + // highlight search terms in the description + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + } + else if (showSearchSummary) + fetch(requestUrl) + .then((responseData) => responseData.text()) + .then((data) => { + if (data) + listItem.appendChild( + Search.makeSearchSummary(data, searchTerms, anchor) + ); + // highlight search terms in the summary + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + }); + Search.output.appendChild(listItem); +}; +const _finishSearch = (resultCount) => { + Search.stopPulse(); + Search.title.innerText = _("Search Results"); + if (!resultCount) + Search.status.innerText = Documentation.gettext( + "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories." + ); + else + Search.status.innerText = _( + "Search finished, found ${resultCount} page(s) matching the search query." + ).replace('${resultCount}', resultCount); +}; +const _displayNextItem = ( + results, + resultCount, + searchTerms, + highlightTerms, +) => { + // results left, load the summary and display it + // this is intended to be dynamic (don't sub resultsCount) + if (results.length) { + _displayItem(results.pop(), searchTerms, highlightTerms); + setTimeout( + () => _displayNextItem(results, resultCount, searchTerms, highlightTerms), + 5 + ); + } + // search finished, update title and status message + else _finishSearch(resultCount); +}; +// Helper function used by query() to order search results. +// Each input is an array of [docname, title, anchor, descr, score, filename]. +// Order the results by score (in opposite order of appearance, since the +// `_displayNextItem` function uses pop() to retrieve items) and then alphabetically. +const _orderResultsByScoreThenName = (a, b) => { + const leftScore = a[4]; + const rightScore = b[4]; + if (leftScore === rightScore) { + // same score: sort alphabetically + const leftTitle = a[1].toLowerCase(); + const rightTitle = b[1].toLowerCase(); + if (leftTitle === rightTitle) return 0; + return leftTitle > rightTitle ? -1 : 1; // inverted is intentional + } + return leftScore > rightScore ? 1 : -1; +}; + +/** + * Default splitQuery function. Can be overridden in ``sphinx.search`` with a + * custom function per language. + * + * The regular expression works by splitting the string on consecutive characters + * that are not Unicode letters, numbers, underscores, or emoji characters. + * This is the same as ``\W+`` in Python, preserving the surrogate pair area. + */ +if (typeof splitQuery === "undefined") { + var splitQuery = (query) => query + .split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu) + .filter(term => term) // remove remaining empty strings +} + +/** + * Search Module + */ +const Search = { + _index: null, + _queued_query: null, + _pulse_status: -1, + + htmlToText: (htmlString, anchor) => { + const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html'); + for (const removalQuery of [".headerlink", "script", "style"]) { + htmlElement.querySelectorAll(removalQuery).forEach((el) => { el.remove() }); + } + if (anchor) { + const anchorContent = htmlElement.querySelector(`[role="main"] ${anchor}`); + if (anchorContent) return anchorContent.textContent; + + console.warn( + `Anchored content block not found. Sphinx search tries to obtain it via DOM query '[role=main] ${anchor}'. Check your theme or template.` + ); + } + + // if anchor not specified or not found, fall back to main content + const docContent = htmlElement.querySelector('[role="main"]'); + if (docContent) return docContent.textContent; + + console.warn( + "Content block not found. Sphinx search tries to obtain it via DOM query '[role=main]'. Check your theme or template." + ); + return ""; + }, + + init: () => { + const query = new URLSearchParams(window.location.search).get("q"); + document + .querySelectorAll('input[name="q"]') + .forEach((el) => (el.value = query)); + if (query) Search.performSearch(query); + }, + + loadIndex: (url) => + (document.body.appendChild(document.createElement("script")).src = url), + + setIndex: (index) => { + Search._index = index; + if (Search._queued_query !== null) { + const query = Search._queued_query; + Search._queued_query = null; + Search.query(query); + } + }, + + hasIndex: () => Search._index !== null, + + deferQuery: (query) => (Search._queued_query = query), + + stopPulse: () => (Search._pulse_status = -1), + + startPulse: () => { + if (Search._pulse_status >= 0) return; + + const pulse = () => { + Search._pulse_status = (Search._pulse_status + 1) % 4; + Search.dots.innerText = ".".repeat(Search._pulse_status); + if (Search._pulse_status >= 0) window.setTimeout(pulse, 500); + }; + pulse(); + }, + + /** + * perform a search for something (or wait until index is loaded) + */ + performSearch: (query) => { + // create the required interface elements + const searchText = document.createElement("h2"); + searchText.textContent = _("Searching"); + const searchSummary = document.createElement("p"); + searchSummary.classList.add("search-summary"); + searchSummary.innerText = ""; + const searchList = document.createElement("ul"); + searchList.classList.add("search"); + + const out = document.getElementById("search-results"); + Search.title = out.appendChild(searchText); + Search.dots = Search.title.appendChild(document.createElement("span")); + Search.status = out.appendChild(searchSummary); + Search.output = out.appendChild(searchList); + + const searchProgress = document.getElementById("search-progress"); + // Some themes don't use the search progress node + if (searchProgress) { + searchProgress.innerText = _("Preparing search..."); + } + Search.startPulse(); + + // index already loaded, the browser was quick! + if (Search.hasIndex()) Search.query(query); + else Search.deferQuery(query); + }, + + _parseQuery: (query) => { + // stem the search terms and add them to the correct list + const stemmer = new Stemmer(); + const searchTerms = new Set(); + const excludedTerms = new Set(); + const highlightTerms = new Set(); + const objectTerms = new Set(splitQuery(query.toLowerCase().trim())); + splitQuery(query.trim()).forEach((queryTerm) => { + const queryTermLower = queryTerm.toLowerCase(); + + // maybe skip this "word" + // stopwords array is from language_data.js + if ( + stopwords.indexOf(queryTermLower) !== -1 || + queryTerm.match(/^\d+$/) + ) + return; + + // stem the word + let word = stemmer.stemWord(queryTermLower); + // select the correct list + if (word[0] === "-") excludedTerms.add(word.substr(1)); + else { + searchTerms.add(word); + highlightTerms.add(queryTermLower); + } + }); + + if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js + localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" ")) + } + + // console.debug("SEARCH: searching for:"); + // console.info("required: ", [...searchTerms]); + // console.info("excluded: ", [...excludedTerms]); + + return [query, searchTerms, excludedTerms, highlightTerms, objectTerms]; + }, + + /** + * execute search (requires search index to be loaded) + */ + _performSearch: (query, searchTerms, excludedTerms, highlightTerms, objectTerms) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + const allTitles = Search._index.alltitles; + const indexEntries = Search._index.indexentries; + + // Collect multiple result groups to be sorted separately and then ordered. + // Each is an array of [docname, title, anchor, descr, score, filename]. + const normalResults = []; + const nonMainIndexResults = []; + + _removeChildren(document.getElementById("search-progress")); + + const queryLower = query.toLowerCase().trim(); + for (const [title, foundTitles] of Object.entries(allTitles)) { + if (title.toLowerCase().trim().includes(queryLower) && (queryLower.length >= title.length/2)) { + for (const [file, id] of foundTitles) { + const score = Math.round(Scorer.title * queryLower.length / title.length); + const boost = titles[file] === title ? 1 : 0; // add a boost for document titles + normalResults.push([ + docNames[file], + titles[file] !== title ? `${titles[file]} > ${title}` : title, + id !== null ? "#" + id : "", + null, + score + boost, + filenames[file], + ]); + } + } + } + + // search for explicit entries in index directives + for (const [entry, foundEntries] of Object.entries(indexEntries)) { + if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) { + for (const [file, id, isMain] of foundEntries) { + const score = Math.round(100 * queryLower.length / entry.length); + const result = [ + docNames[file], + titles[file], + id ? "#" + id : "", + null, + score, + filenames[file], + ]; + if (isMain) { + normalResults.push(result); + } else { + nonMainIndexResults.push(result); + } + } + } + } + + // lookup as object + objectTerms.forEach((term) => + normalResults.push(...Search.performObjectSearch(term, objectTerms)) + ); + + // lookup as search terms in fulltext + normalResults.push(...Search.performTermsSearch(searchTerms, excludedTerms)); + + // let the scorer override scores with a custom scoring function + if (Scorer.score) { + normalResults.forEach((item) => (item[4] = Scorer.score(item))); + nonMainIndexResults.forEach((item) => (item[4] = Scorer.score(item))); + } + + // Sort each group of results by score and then alphabetically by name. + normalResults.sort(_orderResultsByScoreThenName); + nonMainIndexResults.sort(_orderResultsByScoreThenName); + + // Combine the result groups in (reverse) order. + // Non-main index entries are typically arbitrary cross-references, + // so display them after other results. + let results = [...nonMainIndexResults, ...normalResults]; + + // remove duplicate search results + // note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept + let seen = new Set(); + results = results.reverse().reduce((acc, result) => { + let resultStr = result.slice(0, 4).concat([result[5]]).map(v => String(v)).join(','); + if (!seen.has(resultStr)) { + acc.push(result); + seen.add(resultStr); + } + return acc; + }, []); + + return results.reverse(); + }, + + query: (query) => { + const [searchQuery, searchTerms, excludedTerms, highlightTerms, objectTerms] = Search._parseQuery(query); + const results = Search._performSearch(searchQuery, searchTerms, excludedTerms, highlightTerms, objectTerms); + + // for debugging + //Search.lastresults = results.slice(); // a copy + // console.info("search results:", Search.lastresults); + + // print the results + _displayNextItem(results, results.length, searchTerms, highlightTerms); + }, + + /** + * search for object names + */ + performObjectSearch: (object, objectTerms) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const objects = Search._index.objects; + const objNames = Search._index.objnames; + const titles = Search._index.titles; + + const results = []; + + const objectSearchCallback = (prefix, match) => { + const name = match[4] + const fullname = (prefix ? prefix + "." : "") + name; + const fullnameLower = fullname.toLowerCase(); + if (fullnameLower.indexOf(object) < 0) return; + + let score = 0; + const parts = fullnameLower.split("."); + + // check for different match types: exact matches of full name or + // "last name" (i.e. last dotted part) + if (fullnameLower === object || parts.slice(-1)[0] === object) + score += Scorer.objNameMatch; + else if (parts.slice(-1)[0].indexOf(object) > -1) + score += Scorer.objPartialMatch; // matches in last name + + const objName = objNames[match[1]][2]; + const title = titles[match[0]]; + + // If more than one term searched for, we require other words to be + // found in the name/title/description + const otherTerms = new Set(objectTerms); + otherTerms.delete(object); + if (otherTerms.size > 0) { + const haystack = `${prefix} ${name} ${objName} ${title}`.toLowerCase(); + if ( + [...otherTerms].some((otherTerm) => haystack.indexOf(otherTerm) < 0) + ) + return; + } + + let anchor = match[3]; + if (anchor === "") anchor = fullname; + else if (anchor === "-") anchor = objNames[match[1]][1] + "-" + fullname; + + const descr = objName + _(", in ") + title; + + // add custom score for some objects according to scorer + if (Scorer.objPrio.hasOwnProperty(match[2])) + score += Scorer.objPrio[match[2]]; + else score += Scorer.objPrioDefault; + + results.push([ + docNames[match[0]], + fullname, + "#" + anchor, + descr, + score, + filenames[match[0]], + ]); + }; + Object.keys(objects).forEach((prefix) => + objects[prefix].forEach((array) => + objectSearchCallback(prefix, array) + ) + ); + return results; + }, + + /** + * search for full-text terms in the index + */ + performTermsSearch: (searchTerms, excludedTerms) => { + // prepare search + const terms = Search._index.terms; + const titleTerms = Search._index.titleterms; + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + + const scoreMap = new Map(); + const fileMap = new Map(); + + // perform the search on the required terms + searchTerms.forEach((word) => { + const files = []; + const arr = [ + { files: terms[word], score: Scorer.term }, + { files: titleTerms[word], score: Scorer.title }, + ]; + // add support for partial matches + if (word.length > 2) { + const escapedWord = _escapeRegExp(word); + if (!terms.hasOwnProperty(word)) { + Object.keys(terms).forEach((term) => { + if (term.match(escapedWord)) + arr.push({ files: terms[term], score: Scorer.partialTerm }); + }); + } + if (!titleTerms.hasOwnProperty(word)) { + Object.keys(titleTerms).forEach((term) => { + if (term.match(escapedWord)) + arr.push({ files: titleTerms[term], score: Scorer.partialTitle }); + }); + } + } + + // no match but word was a required one + if (arr.every((record) => record.files === undefined)) return; + + // found search word in contents + arr.forEach((record) => { + if (record.files === undefined) return; + + let recordFiles = record.files; + if (recordFiles.length === undefined) recordFiles = [recordFiles]; + files.push(...recordFiles); + + // set score for the word in each file + recordFiles.forEach((file) => { + if (!scoreMap.has(file)) scoreMap.set(file, {}); + scoreMap.get(file)[word] = record.score; + }); + }); + + // create the mapping + files.forEach((file) => { + if (!fileMap.has(file)) fileMap.set(file, [word]); + else if (fileMap.get(file).indexOf(word) === -1) fileMap.get(file).push(word); + }); + }); + + // now check if the files don't contain excluded terms + const results = []; + for (const [file, wordList] of fileMap) { + // check if all requirements are matched + + // as search terms with length < 3 are discarded + const filteredTermCount = [...searchTerms].filter( + (term) => term.length > 2 + ).length; + if ( + wordList.length !== searchTerms.size && + wordList.length !== filteredTermCount + ) + continue; + + // ensure that none of the excluded terms is in the search result + if ( + [...excludedTerms].some( + (term) => + terms[term] === file || + titleTerms[term] === file || + (terms[term] || []).includes(file) || + (titleTerms[term] || []).includes(file) + ) + ) + break; + + // select one (max) score for the file. + const score = Math.max(...wordList.map((w) => scoreMap.get(file)[w])); + // add result to the result list + results.push([ + docNames[file], + titles[file], + "", + null, + score, + filenames[file], + ]); + } + return results; + }, + + /** + * helper function to return a node containing the + * search summary for a given text. keywords is a list + * of stemmed words. + */ + makeSearchSummary: (htmlText, keywords, anchor) => { + const text = Search.htmlToText(htmlText, anchor); + if (text === "") return null; + + const textLower = text.toLowerCase(); + const actualStartPosition = [...keywords] + .map((k) => textLower.indexOf(k.toLowerCase())) + .filter((i) => i > -1) + .slice(-1)[0]; + const startWithContext = Math.max(actualStartPosition - 120, 0); + + const top = startWithContext === 0 ? "" : "..."; + const tail = startWithContext + 240 < text.length ? "..." : ""; + + let summary = document.createElement("p"); + summary.classList.add("context"); + summary.textContent = top + text.substr(startWithContext, 240).trim() + tail; + + return summary; + }, +}; + +_ready(Search.init); diff --git a/doc/_static/sidebar.js b/doc/_static/sidebar.js new file mode 100644 index 0000000..f28c206 --- /dev/null +++ b/doc/_static/sidebar.js @@ -0,0 +1,70 @@ +/* + * sidebar.js + * ~~~~~~~~~~ + * + * This script makes the Sphinx sidebar collapsible. + * + * .sphinxsidebar contains .sphinxsidebarwrapper. This script adds + * in .sphixsidebar, after .sphinxsidebarwrapper, the #sidebarbutton + * used to collapse and expand the sidebar. + * + * When the sidebar is collapsed the .sphinxsidebarwrapper is hidden + * and the width of the sidebar and the margin-left of the document + * are decreased. When the sidebar is expanded the opposite happens. + * This script saves a per-browser/per-session cookie used to + * remember the position of the sidebar among the pages. + * Once the browser is closed the cookie is deleted and the position + * reset to the default (expanded). + * + * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +const initialiseSidebar = () => { + + + + + // global elements used by the functions. + const bodyWrapper = document.getElementsByClassName("bodywrapper")[0] + const sidebar = document.getElementsByClassName("sphinxsidebar")[0] + const sidebarWrapper = document.getElementsByClassName('sphinxsidebarwrapper')[0] + const sidebarButton = document.getElementById("sidebarbutton") + const sidebarArrow = sidebarButton.querySelector('span') + + // for some reason, the document has no sidebar; do not run into errors + if (typeof sidebar === "undefined") return; + + const flipArrow = element => element.innerText = (element.innerText === "»") ? "«" : "»" + + const collapse_sidebar = () => { + bodyWrapper.style.marginLeft = ".8em"; + sidebar.style.width = ".8em" + sidebarWrapper.style.display = "none" + flipArrow(sidebarArrow) + sidebarButton.title = _('Expand sidebar') + window.localStorage.setItem("sidebar", "collapsed") + } + + const expand_sidebar = () => { + bodyWrapper.style.marginLeft = "" + sidebar.style.removeProperty("width") + sidebarWrapper.style.display = "" + flipArrow(sidebarArrow) + sidebarButton.title = _('Collapse sidebar') + window.localStorage.setItem("sidebar", "expanded") + } + + sidebarButton.addEventListener("click", () => { + (sidebarWrapper.style.display === "none") ? expand_sidebar() : collapse_sidebar() + }) + + if (!window.localStorage.getItem("sidebar")) return + const value = window.localStorage.getItem("sidebar") + if (value === "collapsed") collapse_sidebar(); + else if (value === "expanded") expand_sidebar(); +} + +if (document.readyState !== "loading") initialiseSidebar() +else document.addEventListener("DOMContentLoaded", initialiseSidebar) \ No newline at end of file diff --git a/doc/_static/sphinx_highlight.js b/doc/_static/sphinx_highlight.js new file mode 100644 index 0000000..8a96c69 --- /dev/null +++ b/doc/_static/sphinx_highlight.js @@ -0,0 +1,154 @@ +/* Highlighting utilities for Sphinx HTML documentation. */ +"use strict"; + +const SPHINX_HIGHLIGHT_ENABLED = true + +/** + * highlight a given string on a node by wrapping it in + * span elements with the given class name. + */ +const _highlight = (node, addItems, text, className) => { + if (node.nodeType === Node.TEXT_NODE) { + const val = node.nodeValue; + const parent = node.parentNode; + const pos = val.toLowerCase().indexOf(text); + if ( + pos >= 0 && + !parent.classList.contains(className) && + !parent.classList.contains("nohighlight") + ) { + let span; + + const closestNode = parent.closest("body, svg, foreignObject"); + const isInSVG = closestNode && closestNode.matches("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.classList.add(className); + } + + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + const rest = document.createTextNode(val.substr(pos + text.length)); + parent.insertBefore( + span, + parent.insertBefore( + rest, + node.nextSibling + ) + ); + node.nodeValue = val.substr(0, pos); + /* There may be more occurrences of search term in this node. So call this + * function recursively on the remaining fragment. + */ + _highlight(rest, addItems, text, className); + + if (isInSVG) { + const rect = document.createElementNS( + "http://www.w3.org/2000/svg", + "rect" + ); + const bbox = parent.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute("class", className); + addItems.push({ parent: parent, target: rect }); + } + } + } else if (node.matches && !node.matches("button, select, textarea")) { + node.childNodes.forEach((el) => _highlight(el, addItems, text, className)); + } +}; +const _highlightText = (thisNode, text, className) => { + let addItems = []; + _highlight(thisNode, addItems, text, className); + addItems.forEach((obj) => + obj.parent.insertAdjacentElement("beforebegin", obj.target) + ); +}; + +/** + * Small JavaScript module for the documentation. + */ +const SphinxHighlight = { + + /** + * highlight the search words provided in localstorage in the text + */ + highlightSearchWords: () => { + if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight + + // get and clear terms from localstorage + const url = new URL(window.location); + const highlight = + localStorage.getItem("sphinx_highlight_terms") + || url.searchParams.get("highlight") + || ""; + localStorage.removeItem("sphinx_highlight_terms") + url.searchParams.delete("highlight"); + window.history.replaceState({}, "", url); + + // get individual terms from highlight string + const terms = highlight.toLowerCase().split(/\s+/).filter(x => x); + if (terms.length === 0) return; // nothing to do + + // There should never be more than one element matching "div.body" + const divBody = document.querySelectorAll("div.body"); + const body = divBody.length ? divBody[0] : document.querySelector("body"); + window.setTimeout(() => { + terms.forEach((term) => _highlightText(body, term, "highlighted")); + }, 10); + + const searchBox = document.getElementById("searchbox"); + if (searchBox === null) return; + searchBox.appendChild( + document + .createRange() + .createContextualFragment( + '" + ) + ); + }, + + /** + * helper function to hide the search marks again + */ + hideSearchWords: () => { + document + .querySelectorAll("#searchbox .highlight-link") + .forEach((el) => el.remove()); + document + .querySelectorAll("span.highlighted") + .forEach((el) => el.classList.remove("highlighted")); + localStorage.removeItem("sphinx_highlight_terms") + }, + + initEscapeListener: () => { + // only install a listener if it is really needed + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return; + if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) { + SphinxHighlight.hideSearchWords(); + event.preventDefault(); + } + }); + }, +}; + +_ready(() => { + /* Do not call highlightSearchWords() when we are on the search page. + * It will highlight words from the *previous* search query. + */ + if (typeof Search === "undefined") SphinxHighlight.highlightSearchWords(); + SphinxHighlight.initEscapeListener(); +}); diff --git a/doc/authors.html b/doc/authors.html new file mode 100644 index 0000000..e1807fb --- /dev/null +++ b/doc/authors.html @@ -0,0 +1,137 @@ + + + + + + + + Credits — SARvey 1.0.0 documentation + + + + + + + + + + + + + + + +
+
+
+
+ +
+

Credits

+
+

Development Lead

+ +
+
+

Contributors

+ +
+
+ + +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/contributing.html b/doc/contributing.html new file mode 100644 index 0000000..9e931ec --- /dev/null +++ b/doc/contributing.html @@ -0,0 +1,285 @@ + + + + + + + + Contributing — SARvey 1.0.0 documentation + + + + + + + + + + + + + + + +
+
+
+
+ +
+

Contributing

+

Contributions are welcome, and they are greatly appreciated! Every little bit +helps, and credit will always be given.

+

You can contribute in many ways:

+
+

Types of Contributions

+
+

Report Bugs

+

Report bugs at https://git.gfz-potsdam.de/fernlab/timeseries/issues.

+

If you are reporting a bug, please include:

+
    +
  • Your operating system name and version.

  • +
  • Any details about your local setup that might be helpful in troubleshooting.

  • +
  • Detailed steps to reproduce the bug.

  • +
+
+
+

Fix Bugs

+

Look through the GitLab issues for bugs. Anything tagged with “bug” and “help +wanted” is open to whoever wants to implement it.

+
+
+

Implement Features

+

Look through the GitLab issues for features. Anything tagged with “enhancement” +and “help wanted” is open to whoever wants to implement it.

+
+
+

Write Documentation

+

SARvey could always use more documentation, whether as part of the +official SARvey docs, in docstrings, or even on the web in blog posts, +articles, and such.

+
+
+

Submit Feedback

+

The best way to send feedback is to file an issue at https://git.gfz-potsdam.de/fernlab/timeseries/issues.

+

If you are proposing a feature:

+
    +
  • Explain in detail how it would work.

  • +
  • Keep the scope as narrow as possible, to make it easier to implement.

  • +
  • Remember that this is a volunteer-driven project, and that contributions +are welcome :)

  • +
+
+
+
+

Commit Changes

+
+

How to

+
    +
  1. Fork the sarvey repo on GitLab.

  2. +
  3. Clone your fork locally:

    +
    $ git clone git@git.gfz-potsdam.de:fernlab/timeseries.git
    +
    +
    +
  4. +
  5. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development:

    +
    $ mkvirtualenv sarvey
    +$ cd sarvey/
    +$ python setup.py develop
    +
    +
    +
  6. +
  7. Create a branch for local development:

    +
    $ git checkout -b name-of-your-bugfix-or-feature
    +
    +
    +

    Now you can make your changes locally.

    +
  8. +
  9. When you’re done making changes, check that your changes pass flake8 and the +tests, including testing other Python versions with tox:

    +
    $ make pytest
    +$ make lint
    +$ make urlcheck
    +$ tox
    +
    +
    +

    To get flake8 and tox, just pip install them into your virtualenv.

    +
  10. +
  11. Commit your changes and push your branch to GitLab:

    +
    $ git add .
    +$ git commit -m "Your detailed description of your changes."
    +$ git push origin name-of-your-bugfix-or-feature
    +
    +
    +
  12. +
  13. Submit a merge request through the GitLab website.

  14. +
+
+
+

Sign your commits

+

Please note that our license terms only allow signed commits. +A guideline how to sign your work can be found here: https://git-scm.com/book/en/v2/Git-Tools-Signing-Your-Work

+

If you are using the PyCharm IDE, the Commit changes dialog has an option called Sign-off commit to +automatically sign your work.

+
+
+

License header

+

If you commit new Python files, please note that they have to contain the following license header:

+
# SARvey - A multitemporal InSAR time series tool for the derivation of displacements.
+#
+# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de)
+#
+# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context
+# of the SAR4Infra project with funds of the German Federal Ministry for Digital and
+# Transport and contributions from Landesamt fuer Vermessung und Geoinformation
+# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein.
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++
+# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you
+# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes.
+# This requirement extends to SARvey.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see <https://www.gnu.org/licenses/>.
+
+
+
+
+
+

Merge Request Guidelines

+

Before you submit a pull request, check that it meets these guidelines:

+
    +
  1. The merge request should include tests.

  2. +
  3. If the merge request adds functionality, the docs should be updated. Put +your new functionality into a function with a docstring, and add the +feature to the list in README.rst.

  4. +
  5. The pull request should work for Python 3.6, 3.7, 3.8 and 3.9. Check +https://gitlab.projekt.uni-hannover.de/ipi-sar4infra/sarvey/-/merge_requests +and make sure that the tests pass for all supported Python versions.

  6. +
+
+
+

Tips

+

To run a subset of tests:

+
$ pytest tests.test_processing
+
+
+
+
+ + +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/demo/demo_masjed_dam.html b/doc/demo/demo_masjed_dam.html new file mode 100644 index 0000000..8c8ca9e --- /dev/null +++ b/doc/demo/demo_masjed_dam.html @@ -0,0 +1,167 @@ + + + + + + + + Masjed Soleyman dam — SARvey 1.0.0 documentation + + + + + + + + + + + + + + + +
+
+
+
+ +
+

Masjed Soleyman dam

+

This tutorial focuses on measuring the post-construction settlement of the Masjed Soleyman Dam, a rock-fill dam on the Karun river, opened in 2002. Previous investigations using GNSS and high-resolution TerraSAR-X data, as detailed in Emadali et al., 2017, have shown post-construction settlement of the dam. TerraSAR-X data indicates that the dam undergoes a maximum deformation rate of 13 cm/year in the radar line-of-sight.

+
+

Dataset

+

The dataset used in this tutorial is a Sentinel-1 stack of 100 images. The details are provided in the table below.

+ + + + + + + + + + + + + + + + + + + + + +

Number of SLC images

100

Start date

2015-01-05

End date

2018-09-04

Sensor

Sentinel-1

Orbit direction

Descending

InSAR processor

GAMMA

+

There are two tutorials for this demo dataset: one with a comprehensive description for beginners, and one with minimal description for advanced users.

+ +
+
+

Literature

+
    +
  • Emadali L, Motagh M, Haghighi, MH (2017). Characterizing post-construction settlement of the Masjed-Soleyman embankment dam, Southwest Iran, using TerraSAR-X SpotLight radar imagery. Engineering Structures 143:261-273, DOI 10.1016/j.engstruct.2017.04.009. Link to paper.

  • +
+
+
+ + +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/demo/demo_masjed_dam_detailed_guide.html b/doc/demo/demo_masjed_dam_detailed_guide.html new file mode 100644 index 0000000..e640418 --- /dev/null +++ b/doc/demo/demo_masjed_dam_detailed_guide.html @@ -0,0 +1,402 @@ + + + + + + + + Detailed Guide for Masjed Soleyman Dam — SARvey 1.0.0 documentation + + + + + + + + + + + + + + + +
+
+
+
+ +
+

Detailed Guide for Masjed Soleyman Dam

+

This tutorial provides a comprehensive guide to SARvey processing. If you are an advanced user, you can proceed directly to the fast track for advanced users.

+
+

Note

+

This instruction is based on SARvey version 1.0.0 (Strawberry Pie). Newer versions may differ slightly.

+
+
+

Step 1: Before Running SARvey

+
+
+

Step 1.1: Download the Data

+

Download the data by running the following commnad in the console:

+
wget https://zenodo.org/records/12189041/files/SARvey_input_data_Masjed_Soleyman_dam_S1_dsc_2015_2018.zip
+
+
+

Unzip the downloaded file and change the directory.

+
unzip SARvey_input_data_Masjed_Soleyman_dam_S1_dsc_2015_2018.zip
+cd SARvey_input_data_Masjed_Soleyman_dam_S1_dsc_2015_2018
+
+
+

Check the downloaded data using info.py and view.py. For example:

+
info.py inputs/slcStack.h5
+
+
+
view.py inputs/geometryRadar.h5
+
+
+
+
+

Step 1.2: Activate SARvey and Change Directory

+

If you have not installed SARvey, refer to the installation instructions. Activate the SARvey environment:

+
conda activate sarvey
+
+
+

Ensure SARvey can be called from the console.

+
sarvey -h
+
+
+

If you see the following command, it indicates that SARvey cannot be called. Ensure it is installed correctly and the conda environment is activated.

+
command not found: sarvey
+
+
+
+
+

Step 1.3: Create a Config File

+

Create a config file, which is a JSON file containing the parameters for sarvey. The config file can be created using the following command:

+
sarvey -f config.json 0 0 -g
+
+
+

Note: The above command only generates a configuration file. Although step 0 is specified, it will not be executed.

+
+
+

Step 1.4: Modify the config.json File

+

1.4.1. Open the config.json file and check the parameters. The first parameters to specify in the config file are input_path and output_path. For this example dataset, the slcStack.h5 and geometryRadar.h5 files are in the inputs/ directory, which is the default value in the config file. Therefore, you do not need to change it. The output_path should be outputs/ for this example.

+
{
+    "general": {
+        "input_path": "inputs/",
+        "output_path": "outputs/"
+    }
+    // other parameters
+}
+
+
+

1.4.2. Specify the num_cores. You can check the number of cores on your computer using the following commands.

+

In Linux, run:

+
nproc --all
+
+
+

In MacOS, run:

+
sysctl -n hw.ncpu
+
+
+

It is a good practice to specify a number lower than the number of available cores in the config file.

+
{
+// other parameters
+    "general": {
+    "num_cores": 5,
+    // other parameters
+    },
+//other parameters
+}
+
+
+
+
+

Step 2: Running SARvey

+

SARvey consists of five steps as detailed in Multitemporal InSAR processing workflow. You can run all steps by specifying starting step 0 and ending step 4. In this tutorial, however, we will run the steps separately as follows.

+

When running sarvey, if it finishes normally, you will see a message like the following in the command line:

+
2024-06-19 11:05:10,305 - INFO - MTI finished normally.
+
+
+
+

Note

+

If you encounter an error, first read all the prompts in the console and carefully track all error and warning messages. If the issue is not clear from the console messages, check the log files stored in the directory specified in the config file. If the error persists and you need assistance, sharing the corresponding log file will help.

+
+
+

Step 2.0: Run Step 0 of SARvey: Preparation

+

The first step creates an interferogram network and calculates the temporal coherence for all pixels. Run the following command:

+
sarvey -f config.json 0 0
+
+
+

In the command line, you will see a list of parameters used by SARvey to run step 0. All parameters that have been changed from the default are indicated:

+
...
+2024-06-19 11:04:28,137 - INFO - Parameter value default
+2024-06-19 11:04:28,137 - INFO - _________ _____ _______
+2024-06-19 11:04:28,138 - INFO - num_cores 5 <--- 50
+2024-06-19 11:04:28,138 - INFO - num_patches 1 1
+2024-06-19 11:04:28,138 - INFO - apply_temporal_unwrapping True True
+2024-06-19 11:04:28,138 - INFO - spatial_unwrapping_method puma puma
+2024-06-19 11:04:28,138 - INFO -
+2024-06-19 11:04:28,138 - INFO - ---------------------------------------------------------------------------------
+2024-06-19 11:04:28,138 - INFO - STEP 0: PREPARATION
+2024-06-19 11:04:28,138 - INFO - ---------------------------------------------------------------------------------
+2024-06-19 11:04:28,138 - INFO - Parameter value default
+2024-06-19 11:04:28,139 - INFO - _________ _____ _______
+2024-06-19 11:04:28,139 - INFO - start_date None None
+2024-06-19 11:04:28,139 - INFO - end_date None None
+2024-06-19 11:04:28,139 - INFO - ifg_network_type sb <--- delaunay
+2024-06-19 11:04:28,139 - INFO - num_ifgs 3 3
+2024-06-19 11:04:28,139 - INFO - max_tbase 100 100
+2024-06-19 11:04:28,139 - INFO - filter_window_size 9 9
+...
+
+
+

After running this step, a sbas directory is created. Inside this directory, you can find the following files:

+
outputs/
+├── temporal_coherence.h5
+├── ifg_stack.h5
+├── ifg_network.h5
+├── coordinates_utm.h5
+├── config.json
+├── background_map.h5
+└── pic/
+    ├── step_0_temporal_phase_coherence.png
+    ├── step_0_interferogram_network.png
+    └── step_0_amplitude_image.png
+
+
+

Check the PNG files inside the outputs/pic directory and ensure the amplitude image, interferogram network, and temporal coherence look fine. If you are not satisfied with the interferogram network, you can modify the corresponding parameters in the config.json file and run step 0 again.

+

Use the following command to plot the interferograms:

+
sarvey_plot outputs/ifg_stack.h5 -i
+
+
+

This command creates the interferograms as PNG files in the following directory:

+
outputs/
+└── pic/
+    └── ifgs/
+        ├── 0_ifg.png
+        ├── 1_ifg.png
+        └── ...
+
+
+

Check the interferograms one by one and ensure they look reasonable. In various interferograms, there are fringes associated with deformation approximately at ranges 100-200, azimuth 40-60.

+
+
+

Step 2.1: Run Step 1 of SARvey

+
sarvey -f config.json 1 1
+
+
+

Outputs of this step are:

+
outputs/
+├── point_network.h5
+├── p1_ifg_wr.h5
+├── point_network_parameter.h5
+└── pic/
+    ├── selected_pixels_temp_coh_0.8.png
+    ├── step_1_mask_p1.png
+    ├── step_1_arc_coherence.png
+    ├── step_1_arc_coherence_reduced.png
+    ├── step_1_rmse_vel_0th_iter.png
+    └── step_1_rmse_dem_error_0th_iter.png
+
+
+
+
+

Step 2.2: Run Step 2 of SARvey

+
sarvey -f config.json 2 2
+
+
+

Outputs of this step are:

+
outputs/
+├── p1_ifg_unw.h5
+├── p1_ts.h5
+└── pic/
+    ├── step_2_estimation_dem_error.png
+    └── step_2_estimation_velocity.png
+
+
+
+
+

Step 2.3: Run Step 3 of SARvey

+
sarvey -f config.json 3 3
+
+
+

Outputs of this step are:

+
outputs/
+├── p2_coh80_ifg_wr.h5
+├── p2_coh80_aps.h5
+├── p1_aps.h5
+├── p1_ts_filt.h5
+└── pic/
+    ├── step_3_temporal_autocorrelation.png
+    ├── step_3_stable_points.png
+    ├── selected_pixels_temp_coh_0.8.png
+    └── step_3_mask_p2_coh80.png
+
+
+
+
+

Step 2.4: Run Step 4 of SARvey

+
sarvey -f config.json 4 4
+
+
+

The results of step 4 of SARvey, including the time series, are stored in the p2_coh80_ts.h5 file. The file is named based on the coherence_p2 parameter in the config.json file.

+
+
+
+

Step 3: Plot Time Series Results

+

Check the instruction on how to use the sarvey_plot.

+
sarvey_plot -h
+
+
+

Plot the time series using the following command. Flag -t indicates that you want to plot the time series.

+
sarvey_plot outputs/p2_coh80_ts.h5 -t
+
+
+

You can visualize velocity and DEM error estimation of second-order points. You can also visualize amplitude, DEM, or temporal coherence as the background. Right-click on any point to see its time series. As you will see in the plot, the density of measurement points on the dam is relatively low. In the next section, you will learn how to modify the config file to increase the density of points.

+
+
+

Step 4: Modify Config File and Rerun SARvey

+

Modify the config.json file and change coherence_p2 from 0.8 to 0.7.

+

Run steps 3 and 4 using the following command:

+
sarvey -f config.json 3 4
+
+
+

A new file p2_coh70_ts.h5 is created. You can now visualize this file that has a higher point density.

+
sarvey_plot outputs/p2_coh70_ts.h5 -t
+
+
+
+

Note

+

Be cautious that reducing the value of coherence_p2 too much may include noisy points of low quality in the analysis, potentially leading to poor final results.

+

You should carefully read the Multitemporal InSAR processing workflow documentation to understand the meaning of each parameter and carefully choose reasonable values. You should also check the details of all parameters using the -p flag in sarvey and decide how to tune them.

+
+
sarvey -f config.json 0 0 -p
+
+
+
+
+

Step 5: Export to GIS Format

+

Export the data to Shapefiles using the following command:

+
sarvey_export outputs/p2_coh70_ts.h5 -o outputs/shp/p2_coh70_ts.shp
+
+
+

You can open the exported data in any GIS software. If you use QGIS, you can use the PS Time Series Viewer plugin to draw the time series.

+
+
+

Step 6: Validate Your Results

+

You can download a copy of the final SARvey products from this link. Use these files to compare your results and ensure everything worked correctly.

+
+
+ + +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/demo/demo_masjed_dam_fast_track.html b/doc/demo/demo_masjed_dam_fast_track.html new file mode 100644 index 0000000..ced1a5a --- /dev/null +++ b/doc/demo/demo_masjed_dam_fast_track.html @@ -0,0 +1,192 @@ + + + + + + + + Fast Track Guide for Masjed Soleyman Dam — SARvey 1.0.0 documentation + + + + + + + + + + + + + + + +
+
+
+
+ +
+

Fast Track Guide for Masjed Soleyman Dam

+

If you are an advanced user, you can proceed with this fast track tutorial. If you prefer a more detailed, step-by-step guide, please refer to the detailed guide for this example.

+
+

Note

+

These instructions are based on SARvey version 1.0.0 (Strawberry Pie). Newer versions may differ slightly.

+
+
+

Download the Data

+

In this tutorial, a processed stack of data is provided. If you wish to generate data for other areas, please refer to the Preparation section.

+
wget https://zenodo.org/records/12189041/files/SARvey_input_data_Masjed_Soleyman_dam_S1_dsc_2015_2018.zip
+unzip SARvey_input_data_Masjed_Soleyman_dam_S1_dsc_2015_2018.zip
+cd SARvey_input_data_Masjed_Soleyman_dam_S1_dsc_2015_2018
+
+
+
+
+

Activate SARvey environment

+
conda activate sarvey
+
+
+
+
+

Create a Config File

+
sarvey -f config.json 0 0 -g
+
+
+

Specify parameters in the config file. Set a reasonable value for num_cores.

+
+
+

Run SARvey

+

You can run each step individually or a range of steps by specifying the first and last step.

+
sarvey -f config.json 0 4
+
+
+
+
+

Check Outputs

+

First, check the output snapshots in the outputs/pics directory. You can also use `sarvey_plot` to plot various products to assess the quality of the results and decide how to adjust parameters. Modify the parameters in the config file and rerun the corresponding steps of sarvey to improve the results. For instance, changing `coherence_p2` from 0.8 to 0.7 and rerunning steps 3 and 4 can increase the density of the final set of points. However, be cautious that reducing the value too much may include noisy points of low quality in the analysis, potentially leading to poor final results. You can check the details of all parameters using the -p flag in sarvey and decide how to tune them. For more explanations, please refer to Multitemporal InSAR processing workflow

+
+
+

Plot Time Series Results

+

The final products, including the time series, are stored in the coh**_ts.h5 file. The file is named based on the coherence_p2 parameter you used. Plot the time series using the following command:

+
sarvey_plot outputs/p2_coh80_ts.h5 -t
+
+
+

You can visualize velocity and DEM error estimation of second-order points. You can also visualize amplitude, DEM, or temporal coherence as the background. Right-click on any point to see its time series.

+
+
+

Export to GIS Format

+

Export the data to Shapefiles using the following command.

+
sarvey_export outputs/p2_coh80_ts.h5 -o outputs/shp/p2_coh80_ts.shp
+
+
+

You can visualize the data in any GIS software. If you use QGIS, you can use the PS Time Series Viewer plugin to draw the time series.

+
+
+

Validate Your Results

+

You can download a copy of the final SARvey products from this link. Use these files to compare your results and ensure everything worked correctly.

+
+
+ + +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/demo_datasets.html b/doc/demo_datasets.html new file mode 100644 index 0000000..42b1754 --- /dev/null +++ b/doc/demo_datasets.html @@ -0,0 +1,124 @@ + + + + + + + + Demo Datasets — SARvey 1.0.0 documentation + + + + + + + + + + + + + + + +
+
+
+
+ +
+

Demo Datasets

+

Several demo datasets are available to help you learn how to perform SARvey processing effectively.

+
+

Note

+

The demo datasets and instructions provided serve as a practical guide for using SARvey. They do not cover all the software details or offer the best processing strategies for every specific dataset.

+
+
+

Demo Datasets:

+ +
+
+ + +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/genindex.html b/doc/genindex.html new file mode 100644 index 0000000..1403854 --- /dev/null +++ b/doc/genindex.html @@ -0,0 +1,714 @@ + + + + + + + Index — SARvey 1.0.0 documentation + + + + + + + + + + + + + +
+
+
+
+ + +

Index

+ +
+ A + | B + | C + | D + | E + | F + | G + | I + | L + | M + | N + | O + | P + | R + | S + | T + | U + | W + +
+

A

+ + + +
+ +

B

+ + +
+ +

C

+ + + +
+ +

D

+ + + +
+ +

E

+ + + +
+ +

F

+ + + +
+ +

G

+ + + +
+ +

I

+ + + +
+ +

L

+ + + +
+ +

M

+ + +
+ +

N

+ + + +
+ +

O

+ + + +
+ +

P

+ + + +
+ +

R

+ + + +
+ +

S

+ + + +
    +
  • + sarvey + +
  • +
  • + sarvey.coherence + +
  • +
  • + sarvey.console + +
  • +
  • + sarvey.densification + +
  • +
  • + sarvey.filtering + +
  • +
  • + sarvey.geolocation + +
  • +
  • + sarvey.ifg_network + +
  • +
  • + sarvey.objects + +
  • +
  • + sarvey.osm_utils + +
  • +
  • + sarvey.preparation + +
  • +
  • + sarvey.sarvey_mask + +
  • +
  • + sarvey.sarvey_osm + +
  • +
  • + sarvey.triangulation + +
  • +
+ +

T

+ + + +
+ +

U

+ + + +
+ +

W

+ + + +
+ + + +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/history.html b/doc/history.html new file mode 100644 index 0000000..b5f7510 --- /dev/null +++ b/doc/history.html @@ -0,0 +1,124 @@ + + + + + + + + History — SARvey 1.0.0 documentation + + + + + + + + + + + + + + +
+
+
+
+ +
+

History

+
+

1.0.0 (2024-08-12) Strawberry Pie

+
    +
  • First release version on github.

  • +
  • Change name of files for second-order points from coh* to p2_coh*.

  • +
  • Check existence of intermediate results before continuing processing.

  • +
  • Improve parameter names in config.

  • +
  • Combine all general settings into one section in config.

  • +
  • Allow adding user comments in config.json file.

  • +
  • Improve documentation.

  • +
  • Adapt CI from gitlab to github.

  • +
+
+
+ + +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/index.html b/doc/index.html new file mode 100644 index 0000000..c6e604f --- /dev/null +++ b/doc/index.html @@ -0,0 +1,185 @@ + + + + + + + + SARvey documentation — SARvey 1.0.0 documentation + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/doc/installation.html b/doc/installation.html new file mode 100644 index 0000000..4a80b2b --- /dev/null +++ b/doc/installation.html @@ -0,0 +1,288 @@ + + + + + + + + Installation — SARvey 1.0.0 documentation + + + + + + + + + + + + + + + +
+
+
+
+ +
+

Installation

+
+
SARvey is a cross-platform python-based software and can be installed on
+
+
+
+

Linux

+

On Linux, SARvey can be installed Using Mamba (recommended) or Using Anaconda or Miniconda.

+ +
+

Using Anaconda or Miniconda

+

Using conda (latest version recommended), SARvey is installed as follows:

+
    +
  1. Then clone the SARvey source code and install SARvey and all dependencies from the environment_sarvey.yml file:

    +
    git clone git@gitlab.projekt.uni-hannover.de:ipi-sar4infra/timeseries.git
    +cd timeseries
    +
    +
    +
  2. +
+
    +
  1. Create virtual environment for SARvey (optional but recommended):

    +
    pip install conda-merge
    +wget https://raw.githubusercontent.com/insarlab/MiaplPy/main/conda-env.yml
    +conda-merge conda-env.yml tests/CI_docker/context/environment_sarvey.yml > env.yml
    +conda env create -n sarvey -f env.yml
    +rm env.yml conda-env.yml
    +conda activate sarvey
    +pip install git+https://github.com/insarlab/MiaplPy.git
    +pip install .
    +
    +
    +
  2. +
+
+
+
+

MacOS ARM (Apple Silicon M2)

+

This guide provides instructions for installing SARvey on MacOS ARM M2 using conda. +If you do not have Conda, install Conda for Mac. +Using conda (latest version recommended), SARvey is installed as follows:

+
    +
  1. Create a directory for the SARvey package and navigate to it in the terminal. You can choose any other directory if you prefer.

    +
    +
    mkdir -p ~/software/sarvey
    +
    +
    +
    +
  2. +
  3. Install MiaplPy before installing SARvey in the same environment where you want to install SARvey.

    +
    +
    cd ~/software/sarvey
    +git clone https://github.com/insarlab/MiaplPy.git
    +cd MiaplPy
    +
    +
    +

    1.1 Open conda-env.yml in an editor of your choice and comment out the line isce2. Alternatively, you can run the following command:.

    +
    sed -i '' '/isce2/s/^/# /' conda-env.yml
    +
    +
    +

    1.2 Install the package using Conda.

    +
    conda env update --name sarvey --file conda-env.yml
    +conda activate sarvey
    +python -m pip install .
    +
    +
    +
    +
  4. +
  5. Install SARvey

    +

    2.1 Download the source code of the SARvey package.

    +
    +
    cd ~/software/sarvey
    +git clone git@gitlab.projekt.uni-hannover.de:ipi-sar4infra/timeseries.git
    +cd timeseries
    +
    +
    +
    +

    2.2 Open tests/CI_docker/context/environment_sarvey.yml in an editor of your choice and comment out the lines isce2 and gcc_linux-64. Alternatively, you can run the following commands.

    +
    +
    sed -i '' '/isce2/s/^/# /' tests/CI_docker/context/environment_sarvey.yml
    +sed -i '' '/gcc_linux-64/s/^/# /' tests/CI_docker/context/environment_sarvey.yml
    +
    +
    +

    Note: As of the time of creation of this document, isce2 for MacOS ARM64 is not available in Conda repositories. Therefore, it is skipped, but it should not cause any problems for running SARvey. Also, gcc_linux-64 is not required on ARM64.

    +
    +

    2.3 Install Timeseries using the same environment that you used to install MiaplPy.

    +
    +
    conda env update --name sarvey -f tests/CI_docker/context/environment_sarvey.yml
    +conda activate sarvey
    +pip install .
    +
    +
    +
    +
  6. +
  7. Set up the PATH for MiaplPy and SARvey.

    +

    3.1 Run the following commands to set up the path in ~/source_sarvey.sh.

    +
    +
    echo 'export miaplpy_path=~/software/sarvey/MiaplPy/src/' > ~/source_sarvey.sh
    +echo 'export PYTHONPATH=${PYTHONPATH:+$PYTHONPATH:}$miaplpy_path' >> ~/source_sarvey.sh
    +echo 'export timeseries_path=~/software/sarvey/timeseries' >> ~/source_sarvey.sh
    +echo 'export PATH=${PATH}:$timeseries_path:$timeseries_path/sarvey' >> ~/source_sarvey.sh
    +echo 'export PYTHONPATH=${PYTHONPATH:+$PYTHONPATH:}:$timeseries_path' >> ~/source_sarvey.sh
    +
    +
    +
    +
  8. +
  9. Test the installation

    +

    4.1. Open a new terminal and activate the software.

    +
    +
    conda activate sarvey
    +source ~/source_sarvey.sh
    +
    +
    +
    +

    4.2. Run the following commands. If the help messages of SARvey and MiaplPy are shown, the installation is correctly done.

    +
    +
    sarvey -h
    +
    +
    +
    +
  10. +
+
+
+

Windows using WSL

+

On Windows, SARvey is tested on Windows Subsystem for Linux (WSL) version 2. Please follow the Linux installation.

+
+

Note

+

Timeseries has been tested with Python 3.6+., i.e., should be fully compatible to all Python versions from 3.6 onwards.

+
+
+
+ + +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/modules.html b/doc/modules.html new file mode 100644 index 0000000..2a3babb --- /dev/null +++ b/doc/modules.html @@ -0,0 +1,351 @@ + + + + + + + + Python API reference — SARvey 1.0.0 documentation + + + + + + + + + + + + + + + +
+
+
+
+ +
+

Python API reference

+
+ +
+
+ + +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/objects.inv b/doc/objects.inv new file mode 100644 index 0000000..5c72fdf Binary files /dev/null and b/doc/objects.inv differ diff --git a/doc/preparation.html b/doc/preparation.html new file mode 100644 index 0000000..3cc3ac1 --- /dev/null +++ b/doc/preparation.html @@ -0,0 +1,275 @@ + + + + + + + + Preparation — SARvey 1.0.0 documentation + + + + + + + + + + + + + + + +
+
+
+
+ +
+

Preparation

+

SARvey requires a coregistered stack of SLC and the related geometry information in the MiaplPy data format. +The coregistered stack of SLC can be created using an InSAR processor, such as ISCE, GAMMA, or SNAP. +Currently MiaplPy only supports ISCE. Support for GAMMA and SNAP is planned for future. +After creating the coregistered stack of SLC, run the “load_data” step from MiaplPy to create the “inputs” directory which contains “slcStack.h5” and “geometryRadar.h5”.

+
+

Preprocessing

+
+

ISCE

+

… ISCE brief processing to be added

+

The ISCE products should have the following directory structure that is later in Loading Data into MiaplPy step.

+
ISCE_processed_data
+├─ reference
+│   ├─ IW*.xml
+│   └─ ...
+├─ merged
+│   ├─ SLC
+│   │   ├─ YYYYMMDD
+│   │   │   ├─ YYYYMMDD.slc.full
+│   │   │   └─ ...
+│   │   ├─ YYYYMMDD
+│   │   ├─ YYYYMMDD
+│   ├─ geom_reference
+│   │   ├─ hgt.rdr.full
+│   │   ├─ lat.rdr.full
+│   │   ├─ lon.rdr.full
+│   │   ├─ los.rdr.full
+│   │   └─ ...
+└─ baselines
+    └─ YYYYMMDD_YYYYMMDD
+        └─ YYYYMMDD_YYYYMMDD.txt
+
+
+
+
+

GAMMA

+

Support is in progress.

+
+
+

SNAP

+

Support is planned for future.

+
+
+
+

Loading Data to MiaplPy Format

+
+

Loading Data into MiaplPy

+

Run the load_data step of MiaplPy to convert the preprocessed stack of SLC to slcStack.h5 and geometryRadar.h5. +Refer to MiaplPy instruction on how to prepare the stack of coregistered SLC and modify the template file.

+
miaplpyApp miaplpy_template_file.txt --dostep load_data
+
+
+

The output includes the following directory structure that is later used as input in SARvey processing:

+
inputs
+  ├── slcStack.h5
+  └── geometryRadar.h5
+
+
+
+
+

Check the data

+

Use info.py from MintPy to check the files’ information.

+
info.py inputs/slcStack.h5
+info.py inputs/geometryRadar.h5
+
+
+

Use view.py from MintPy to visualize the files and make sure they look fine.

+
view.py inputs/slcStack.h5
+view.py inputs/geometryRadar.h5
+
+
+
+
+
+

Optional Steps

+
+

Phase Linking

+

This step is optional. You can run it if you wish to perform distributed scatterers (DS) analysis. +Caution: This step is computationally heavy and might be time-consuming for large datasets.

+
miaplpyApp miaplpy_template_file.txt --dostep phase_linking
+ miaplpyApp miaplpy_template_file.txt --dostep concatenate_patches
+
+
+

The output includes the following directory structure that is later used as additional input in SARvey processing if the config file is modified to inclued DS analysis.

+
MiaplPy working directory
+├─ inverted
+│   ├── phase_series.h5
+│   ├── ...
+├── maskPS.h5
+└── ...
+
+
+
+
+

Subset Data

+

Data loaded into MiaplPy can be subset using Mintpy’s subset function. +This is particularly useful if you have a dataset in MiaplPy format and want to crop a small area of it. +Both slcStack.h5 and geometryRadar.h5 should be subset with the same range and azimuth coordinate ranges. +Also the Phase Linking results (phase_series.h5 and maskPS.h5) should be subset if it has been created. +Please refer to Mintpy for more instruction to subset. +Run subset.py -h for information about parameters. +The following example crops the data between 500 and 800 in range and 100 and 1000 in azimuth coordinates.

+
subset.py -h
+
+subset.py inputs/slcStack.h5 -x 500 800 -y 100 1000 -o inputs_crop/slcStack.h5
+subset.py inputs/geometryRadar.h5 -x 500 800 -y 100 1000 -o inputs_crop/geometryRadar.h5
+
+subset.py inverted/phase_series.h5 -x 500 800 -y 100 1000 -o inverted_crop/phase_series.h5
+subset.py maskPS.h5 -x 500 800 -y 100 1000 -o inverted_crop/maskPS.h5
+
+
+

Check the data after subsetting it and make sure all products look correct.

+
+
+

Create Manual Mask

+

A mask can be created manually using MintPy’s generate_mask.py tool. +This is particularly useful if you want to limit the MTInSAR processing to certain areas. +Run generate_mask.py -h for information about parameters. +The following example allows to draw a polygon on top of the DEM to create a mask.

+
generate_mask.py -h
+
+generate_mask.py inputs/geometryRadar.h5 height -o mask.h5 --roipoly         # draw polygon on top of the DEM
+
+
+

Alternatively, a mask can be drawn on top of the temporal coherence map, in case step 0 (preparation) of sarvey has been executed already.

+
generate_mask.py results_dir/temporal_coherence.h5 -o mask.h5 --roipoly         # draw polygon on top of the temporal coherence image
+
+
+

Follow the instructions in the terminal:

+
+

Select points in the figure by enclosing them within a polygon. +Press the ‘esc’ key to start a new polygon. +Try hold to left key to move a single vertex. +After complete the selection, close the figure/window to continue.

+
+
+
+
+ + +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/processing.html b/doc/processing.html new file mode 100644 index 0000000..5fb42b5 --- /dev/null +++ b/doc/processing.html @@ -0,0 +1,549 @@ + + + + + + + + Multitemporal InSAR processing workflow — SARvey 1.0.0 documentation + + + + + + + + + + + + + + + +
+
+
+
+ +
+

Multitemporal InSAR processing workflow

+

The sarvey command line interface executes the multitemporal InSAR processing workflow. +The workflow is described in the paper

+
+

Piter, A., Haghshenas Haghighi, M., Motagh, M.(2024). An in-depth study on Sentinel-1 InSAR for transport infrastructure monitoring. PFG - Journal of Photogrammetry, Remote Sensing and Geoinformation Science. (paper currently under review).

+
+

All processing steps are described in detail in the following sections. +Two processing strategies are provided with either one- or two-step unwrapping. +The workflow should be decided based on the characteristics of the displacement (spatial extend, magnitude, temporal behaviour). +The parameters of each step are handled via the configuration file for which the parameters are named within the description of each step.

+
+

Configuration file

+

The configuration file is a JSON file containing all the parameters required to run sarvey. +This file can be generated using the sarvey command with the “-g” flag, where you can specify your desired filename.

+
sarvey -f config.json 0 0 -g
+
+
+

Note: The above command only generates a configuration file. Although step 0 is specified, it will not be executed.

+

The configuration file has various sections, as detailed below:

+
    +
  • General

  • +
+
+

This section includes top-level parameters such as the number of cores and the unwrapping method. +It specifies the paths to the input and output data. The paths can be either absolute or relative. +Further, it defines the logging level displayed in the command line and the directory path where log files will be stored.

+
+
    +
  • phase_linking

  • +
+
+

This section specifies the Phase Linking parameters. By default, “use_phase_linking_results”: false. +If you wish to perform DS analysis, change it to true. Note: If “use_phase_linking_results”: true, you must complete the corresponding step of MiaplPy as described in preparation. In the configuration file, set inverted_path to the path of the inverted directory of MiaplPy data.

+
+
    +
  • preparation

  • +
+
+

This section includes network parameters, such as the start and end dates, network type, and filter_window_size, which specifies the window size used to estimate the temporal coherence for each pixel.

+
+
    +
  • consistency_check

  • +
+
+

This section contains parameters related to the first order points.

+
+
    +
  • unwrapping

  • +
+
+

This section will specify parameters related to the unwrapping process.

+
+
    +
  • filtering

  • +
+
+

This section defines the parameters for atmospheric estimation and filtering. Atmospheric filtering is enabled by default. To skip it, set “apply_aps_filtering”: false.

+
+
    +
  • densification

  • +
+
+

This section includes the settings for second order points.

+
+
+
+

Processing steps for two-step unwrapping workflow

+
+

Step 0: Preparation

+
    +
  • +
    Loading the resampled SLC data:

    The resampled SLC (Single Look Complex) data is read from the inputs/slcStack.h5 +This data is complex-valued and contains both amplitude and phase information. +The data is subsetted to the specified time span (via preparation:start_date and preparation:end_date in the config file). +A description of how to prepare the data and make a spatial subset of the data is described in data preparation in MiaplPy.

    +
    +
    +
  • +
  • +
    Designing the interferogram network:

    From the stack of SLC images, the interferogram network is designed. +The network of interferograms is designed based on the temporal and perpendicular baselines of the SLC images. +Different networks can be created (via preparation:ifg_network_type in the config file) and should be chosen based on the characteristics of the displacement (spatial extend, magnitude, temporal behaviour). +Currently five types of networks are supported:

    +
      +
    1. small baseline network (‘sb’) (Berardino et al. 2002),

    2. +
    3. small temporal baseline network (‘stb’) (only consecutive images are used to form interferograms)

    4. +
    5. small temporal baselines + yearly interferograms (‘stb_yearly’)

    6. +
    7. delaunay network (‘delaunay’)

    8. +
    9. star network (‘star’, single-reference network) (Ferretti et al. 2001)

    10. +
    +
    +
    +
  • +
  • +
    Generating a stack of interferograms:

    The stack of interferograms is generated based on the specified interferogram network.

    +
    +
    +
  • +
  • +
    Estimating the temporal coherence:

    The phase noise of each pixel is approximated by the estimation of the temporal phase coherence (Zhao and Mallorqui 2019). +Thereby, a low-pass filter with a certain window size is used (preparation:filter_window_size). +The temporal coherence is used to select the first- and second-order points in the later steps (consistency_check:coherence_p1 and filtering:coherence_p2).

    +
    +
    +
  • +
  • +
    Output of this step
      +
    • background_map.h5

    • +
    • ifg_stack.h5

    • +
    • coordinates_utm.h5

    • +
    • ifg_network.h5

    • +
    • temporal_coherence.h5

    • +
    +
    +
    +
  • +
+
+
+

Step 1: Consistency Check

+
    +
  • +
    Selecting candidates for first order points:

    Candidates for the first-order points are selected based on the temporal coherence threshold (consistency_check:coherence_p1). +However, not all points with a coherence above the threshold are selected, but only those which have the highest coherence within a grid cell of size consistency_check:grid_size (in [m]). +A mask file can be specified (consistency_check:mask_p1_file) to limit the first-order points to the given area of interest.

    +
    +
    +
  • +
  • +
    Creating a spatial network:

    After selecting the candidates for first order points, the method creates a spatial network to connect the first-order points. +For each arc in the network, the double difference phase time series is calculated. +A delaunay network ensures the connectivity in the spatial network and k-nearest neighbors (consistency_check:num_nearest_neighbours) can be used to increase the redundancy in the network. +Arcs with a distance above a threshold (consistency_check:max_arc_length) are removed from the network to reduce the impact of the atmospheric effects.

    +
    +
    +
  • +
  • +
    Temporal unwrapping:

    All arcs in the spatial network are temporally unwrapped based on a phase model consisting of DEM error difference and velocity difference between the two points of the arc. +The temporal coherence derived from the model fit is maximized by searching within a search space of given bounds (consistency_check:velocity_bound and consistency_check:dem_error_bound). +Within the bounds, the search space is discretized (consistency_check:num_optimization_samples). +The final parameters for each arc are derived from a gradient descent refinement of the discrete search space result.

    +
    +
    +
  • +
  • +
    Performing a consistency check on the data:

    During the atmospheric filtering in step 3, only high quality first-order points are supposed to be used. +Therefore, outliers among the candidates are removed with a consistency check. +The consistency check is based on the estimated temporal coherence of the temporal unwrapping of each arc. +A point is assumed to be an outlier, if it is connected by many arcs having a low temporal coherence from temporal unwrapping. +Arcs with a temporal coherence below a threshold are removed (consistency_check:arc_unwrapping_coherence). +Similarly, points with mean coherence of all connected arcs are removed (specified by the same parameter consistency_check:arc_unwrapping_coherence). +Moreover, points which are connected by a number of arcs less than a threshold (consistency_check:min_num_arc) are removed. +Afterwards, the consistency within the spatial network is checked. +For this purpose, the parameters (DEM error difference and velocity difference) of all arcs are integrated in the spatial network relative to an arbitrary reference point using least squares. +The residuals of the integration are used to identify outliers.

    +
    +
    +
  • +
  • +
    Output of this step
      +
    • point_network.h5

    • +
    • point_network_parameter.h5

    • +
    • p1_ifg_wr.h5

    • +
    +
    +
    +
  • +
+
+
+

Step 2: Unwrapping

+

Two unwrapping options (general:apply_temporal_unwrapping, also applies to step 4) are implemented and should be chosen based on the characteristics of the displacement (spatial extend, magnitude, temporal behaviour).

+
    +
  • +
    Output of this step
      +
    • p1_ifg_unw.h5

    • +
    • p1_ifg_ts.h5

    • +
    +
    +
    +
  • +
+
+

Option 1) Unwrapping in time and space

+
    +
  • +
    Integrating parameters from arcs to points:

    The temporal unwrapping results of the spatial network from consistency check in step 1 are used in this step. +The parameters of the arcs are integrated relative to an arbitrary reference point from the arcs to the points using least squares.

    +
    +
    +
  • +
  • +
    Removing phase contributions (mean velocity and DEM error):

    After integrating the parameters, the phase contributions are removed from the wrapped interferometric phase of the first-order points.

    +
    +
    +
  • +
  • +
    Spatial unwrapping of the residuals:

    The residuals in each interferogram are unwrapped in space using a sparse point network unwrapping method (general:spatial_unwrapping_method) (Bioucas-Dias and Valadao 2007, Boykov and Kolmogorov 2004). +The spatial neighbourhood for unwrapping is defined by the arcs of the spatial network. +There are two options (unwrapping:use_arcs_from_temporal_unwrapping). +Either the spatial network from consistency check (step 2) can be used for unwrapping, i.e. the spatial network after removing arcs with a low temporal coherence from temporal unwrapping. +Or, the spatial network is re-created with a delaunay network.

    +
    +
    +
  • +
  • +
    Restore phase contributions to the spatially unwrapped residual phase:

    Finally, the phase contributions are added back to the spatially unwrapped residual phase of each point.

    +
    +
    +
  • +
  • +
    Adjust reference:

    All restored unwrapped interferograms are referenced to the peak of velocity histogram derived from all points.

    +
    +
    +
  • +
  • +
    Inverting the interferogram network:

    The interferogram network is inverted for each point to retrieve the displacement time series relative to the first acquisition.

    +
    +
    +
  • +
+
+
+

Option 2) Unwrapping in space

+
    +
  • +
    Spatial unwrapping:

    The interferograms are unwrapped independently in space with a sparse point network unwrapping method (general:spatial_unwrapping_method) (Bioucas-Dias and Valadao 2007, Boykov and Kolmogorov 2004). +The spatial neighbourhood for unwrapping is defined by the arcs of the spatial network. +There are two options (unwrapping:use_arcs_from_temporal_unwrapping). +Either the spatial network from consistency check (step 2) can be used for unwrapping, i.e. the spatial network after removing arcs with a low temporal coherence from temporal unwrapping. +Or, the spatial network is re-created with a delaunay network.

    +
    +
    +
  • +
  • +
    Adjust reference:

    All unwrapped interferograms are referenced to the peak of velocity histogram derived from all points.

    +
    +
    +
  • +
  • +
    Inverting the interferogram network:

    The interferogram network is inverted for each point to retrieve the displacement time series relative to the first acquisition.

    +
    +
    +
  • +
+
+
+
+

Step 3: Filtering

+

In this step, the atmospheric phase screen (APS) is estimated from the displacement time series of the first-order points. +Afterwards, the APS is interpolated to the location of the second-order points. +The filtering can be skipped by setting filtering:apply_aps_filtering to True. +However, the step 3 has to be executed as the second-order points are selected during this step.

+
    +
  • +
    Selecting pixels with no or linear displacement:

    Among the first-order points, the points with no or merely linear displacement are selected (filtering:use_moving_points). +It is assumed that for these points, the phase consists only of atmospheric effect and noise after removing the mean velocity and DEM error. +Points with a non-linear displacement behaviour are removed by a threshold on the temporal autocorrelation of the displacement time series (filtering:max_temporal_autocorrelation) (Crosetto et al. 2018). +A regular grid (filtering:grid_size in [m]) is applied to select the first-order points with the lowest temporal autocorrelation to reduce the computational complexity during filtering.

    +
    +
    +
  • +
  • +
    Selecting second-order points:

    Second-order points are selected based on a temporal coherence threshold (filtering:coherence_p2) on the temporal phase coherence computed during step 0. +A mask file can be specified (filtering:mask_p2_file) to limit the second-order points to the given area of interest. +Second-order points can also be selected based on the results of phase-linking (set phase_linking:use_phase_linking_results to True) implemented in MiaplPy (Mirzaee et al. 2023). +More information on Miaplpy and phase-linking can be found here. +The number of siblings (phase_linking:num_siblings) used during phase-linking within MiaplPy processing needs to be specified to identify the distributed scatterers (DS) among the pixels selected by MiaplPy. +A mask file can be specified (phase_linking:mask_phase_linking_file) to limit the phase-linking to the given area of interest. +MiaplPy also provides a selection of persistent scatterers (PS) which can be included as second-order points (set phase_linking:use_ps to True) and also specify the path to the maskPS.h5 (phase_linking:mask_ps_file) which is also an output of MiaplPy. +In case the second-order points are selected among the results from MiaplPy, the filtered interferometric phase (MiaplPy result) is used for the respective points. +The DS pixels from MiaplPy and the pixels selected with the temporal phase coherence from step 0 are both selected with the same coherence threshold (filtering:coherence_p2).

    +
    +
    +
  • +
  • +
    Estimating the atmospheric phase screen (APS):

    The estimation of the APS takes place in time-domain and not interferogram-domain to reduce the computational time. +The phase contributions are removed from the first-order points which were selected for atmospheric filtering. +Their residual time series contains atmospheric phase contributions and noise. +As the APS is assumed to be spatially correlated, the residuals of all points are spatially filtered (filtering:interpolation_method) independently for each time step. +After filtering, the estimated APS is interpolated to the location of the second-order points.

    +
    +
    +
  • +
  • +
    Output of this step
      +
    • p1_ts_filt.h5

    • +
    • p1_aps.h5

    • +
    • p2_cohXX_aps.h5

    • +
    • p2_cohXX_ifg_wr.h5

    • +
    +
    +
    +
  • +
+

The placeholder XX depends on the threshold for the temporal coherence used for selecting the second-order points. +For example, a threshold of 0.8 would result in p2_coh80_aps.h5 and p2_coh80_ifg_wr.h5.

+
+
+

Step 4: Densification

+

Two unwrapping options (general:apply_temporal_unwrapping, also applies to step 2) are implemented and should be chosen based on the characteristics of the displacement (spatial extend, magnitude, temporal behaviour).

+
    +
  • +
    Output of this step
      +
    • p2_cohXX_ifg_unw.h5

    • +
    • p2_cohXX_ts.h5

    • +
    +
    +
    +
  • +
+

The placeholder XX depends on the threshold for the temporal coherence used for selecting the second-order points during filtering in step 3. +For example, a threshold of 0.8 would result in p2_coh80_ifg_unw.h5 and p2_coh80_ts.h5.

+
+

Option 1: Unwrapping in time and space

+
    +
  • +
    Removing APS from interferograms

    The wrapped interferograms are corrected for the interpolated APS for both the first and second order points.

    +
    +
    +
  • +
  • +
    Densify network:

    The parameters (DEM error and velocity) of each second-order point are estimated independently from the other second-order points. +The parameters are estimated by temporal unwrapping with respect to the closest first-order points (densification:num_connections_to_p1, densification:max_distance_to_p1) with a phase model consisting of DEM error and velocity (densification:velocity_bound and densification:dem_error_bound, densification:num_optimization_samples). +The densification is similar to the approach described by Van Leijen (2014), but jointly maximizes the temporal coherence to find the parameters that fit best to all arcs connecting the second-order point to the first-order points.

    +
    +
    +
  • +
  • +
    Remove outliers:

    Second-order points which could not be temporally unwrapped with respect to the closest first-order points are removed. +For this purpose, a threshold on the joint temporal coherence considering the residuals of all arcs connecting the respective second-order point to the closest first-order points is applied (densification:arc_unwrapping_coherence). +First-order points receive a joint temporal coherence value of 1.0 to avoid them being removed from the final set of points.

    +
    +
    +
  • +
  • +
    Removing phase contributions (mean velocity and DEM error):

    After estimating the parameters of the second-order points, the phase contributions are removed from the wrapped interferometric phase of the first-order points.

    +
    +
    +
  • +
  • +
    Spatial unwrapping of the residuals:

    The residuals in each interferogram are unwrapped in space using a sparse point network unwrapping method (general:spatial_unwrapping_method) (Bioucas-Dias and Valadao 2007, Boykov and Kolmogorov 2004). +The spatial neighbourhood for unwrapping is defined by spatial network including both first- and second-order points. +It is created with a delaunay network.

    +
    +
    +
  • +
  • +
    Restore phase contributions to the spatially unwrapped residual phase:

    Finally, the phase contributions are added back to the spatially unwrapped residual phase of each point.

    +
    +
    +
  • +
  • +
    Adjust reference:

    All restored unwrapped interferograms are referenced to the peak of velocity histogram derived from all points.

    +
    +
    +
  • +
  • +
    Inverting the interferogram network:

    The interferogram network is inverted for each point to retrieve the displacement time series relative to the first acquisition.

    +
    +
    +
  • +
+
+
+

Option 2: Unwrapping in space

+
    +
  • +
    Removing APS from interferograms

    The wrapped interferograms are corrected for the interpolated APS for both the first and second order points.

    +
    +
    +
  • +
+

Afterwards, the processing is the same as in the spatial unwrapping during step 2.

+
+
+
+
+

Handling big datasets

+

The processing of large datasets can be computationally expensive and time-consuming. +Especially the estimation of the temporal phase coherence in step 0 is a bottleneck, also in terms of memory consumption. +Therefore, it is recommended to set general:num_cores for parallel processing. +By setting general:num_patches the data is split into spatial patches and processed subsequently to fit into memory.

+
+
+

Processing steps for one-step unwrapping workflow

+

The one-step unwrapping workflow is an alternative to the two-step unwrapping workflow. +The steps are similar to the workflow described above, but is only executed until step 2. +This workflow is meant for processing small areas where the atmospheric filtering is not required as the reference point will be selected close to the area of interest. +The idea behind the one-step unwrapping workflow is to apply the consistency check based on the temporal unwrapping (step 1) to all pixels, without differentiating between first and second order points. +This can yield better unwrapping results compared to the two-step unwrapping in case DEM error and/or velocity highly vary in space. +For this purpose, the pixels are selected without gridding (set preparation:grid_size to Zero, i.e. all pixels above the specified coherence threshold are selected as final points. +Since the densification step is not performed, you should reduce the coherence threshold (consistency_check:coherence_p1) to select the desired number of points.

+
+
+

Literature

+
    +
  • Piter, A., Haghshenas Haghighi, M., Motagh, M.(2024). An in-depth study on Sentinel-1 InSAR for transport infrastructure monitoring. PFG - Journal of Photogrammetry, Remote Sensing and Geoinformation Science. (paper currently under review).

  • +
  • Zhao F, Mallorqui JJ (2019). A Temporal Phase Coherence Estimation Algorithm and Its Application on DInSAR Pixel Selection. IEEE Transactions on Geoscience and Remote Sensing 57(11):8350–8361, DOI 10.1109/TGRS.2019.2920536

  • +
  • Ferretti A, Prati C, Rocca F (2001). Permanent scatterers in SAR interferometry. IEEE Transactions on Geoscience and Remote Sensing 39(1):8–20

  • +
  • Berardino P, Fornaro G, Lanari R, Sansosti E (2002). A new algorithm for surface deformation monitoring based on small baseline differential SAR interferograms. IEEE Transactions on Geoscience and Remote Sensing 40(11):2375–2383

  • +
  • Bioucas-Dias JM, Valadao G (2007). Phase Unwrapping via Graph Cuts. IEEE Transactions on Image Processing 16(3):698–709, DOI 10.1109/TIP.2006.888351

  • +
  • Mirzaee S, Amelung F, Fattahi H (2023). Non-linear phase linking using joined distributed and persistent scatterers. Computers & Geosciences 171:105291, DOI 10.1016/j.cageo.2022.105291

  • +
  • Crosetto M, Devanthéry N, Monserrat O, Barra A, Cuevas-González M, Mróz M, Botey-Bassols J, Vázquez-Suné E, Crippa B (2018). A persistent scatterer interferometry procedure based on stable areas to filter the atmospheric component. Remote Sensing 10(11):1780

  • +
  • Van Leijen FJ (2014). Persistent scatterer interferometry based on geodetic estimation theory. PhD thesis

  • +
  • Boykov Y, Kolmogorov V (2004) An experimental comparison of min-cut/max- flow algorithms for energy minimization in vision. IEEE Transactions on Pattern Analysis and Machine Intelligence 26(9):1124–1137, DOI 10.1109/TPAMI.2004.60

  • +
+
+
+ + +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/py-modindex.html b/doc/py-modindex.html new file mode 100644 index 0000000..718d2b1 --- /dev/null +++ b/doc/py-modindex.html @@ -0,0 +1,179 @@ + + + + + + + Python Module Index — SARvey 1.0.0 documentation + + + + + + + + + + + + + + + + +
+
+
+
+ + +

Python Module Index

+ +
+ s +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
 
+ s
+ sarvey +
    + sarvey.coherence +
    + sarvey.console +
    + sarvey.densification +
    + sarvey.filtering +
    + sarvey.geolocation +
    + sarvey.ifg_network +
    + sarvey.objects +
    + sarvey.osm_utils +
    + sarvey.preparation +
    + sarvey.sarvey_mask +
    + sarvey.sarvey_osm +
    + sarvey.triangulation +
    + sarvey.unwrapping +
    + sarvey.utils +
    + sarvey.version +
    + sarvey.viewer +
+ + +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/readme.html b/doc/readme.html new file mode 100644 index 0000000..5abbc24 --- /dev/null +++ b/doc/readme.html @@ -0,0 +1,268 @@ + + + + + + + + SARvey - survey with SAR — SARvey 1.0.0 documentation + + + + + + + + + + + + + + + +
+
+
+
+ +
+

SARvey - survey with SAR

+

Open-source InSAR time series analysis software developed within the project SAR4Infra. +SARvey aims to analyze InSAR displacement time series for engineering applications.

+
+

Documentation

+

The documentation with installation instructions, processing steps, and examples with a demo dataset can be found at: +https://ipi-sar4infra.projektpages.uni-h.de/timeseries/doc/

+
+
+

Status

+Pipelines + +Coverage + +Documentation + +DOI + +

See also the latest coverage report and the pytest HTML report.

+
+
+

License

+

SARvey is distributed under the GNU General Public License, version 3 (GPLv3).

+

The following exceptions applies:

+

This package uses PyMaxFlow. The core of PyMaxflows library is the C++ implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you cite [BOYKOV04] in any resulting publication if you use this code for research purposes. +This requirement extends to SARvey.

+

Please check out the details of the license here.

+
+
+

How to cite

+

If you use SARvey in your research, please cite the following.

+
    +
  1. The paper describing the methodology:

    +

    Piter, A., Haghshenas Haghighi, M., Motagh, M.(2024). An in-depth study on Sentinel-1 InSAR for transport infrastructure monitoring. PFG - Journal of Photogrammetry, Remote Sensing and Geoinformation Science. (paper currently under review).

    +
  2. +
  3. The software itself. Please specify the version you use:

    +

    Piter, A., Haghshenas Haghighi, M., FERN.Lab, & Motagh, M. (2024). SARvey - survey with SAR [version]. Zenodo. https://doi.org/10.5281/zenodo.12544131

    +
  4. +
  5. If you use the PUMA method for unwrapping in your research, please cite the following publication as indicated in the license:

    +

    An Experimental Comparison of Min-Cut/Max-Flow Algorithms for Energy Minimization in Vision. Yuri Boykov and Vladimir Kolmogorov. In IEEE Transactions on Pattern Analysis and Machine Intelligence (PAMI), September 2004. Link to paper.

    +
  6. +
+
+
+

Processing overview

+SARvey workflow + +

Processing workflow for using the SARvey software to derive displacement time series.

+

SARvey is a command-line-based software. The major steps for running SARvey are the following:

+
    +
  • Installation

    +

    SARvey is a cross-platform python-based software and can be installed on Linux and MacOS. On Windows, SARvey is tested on Windows Subsystem for Linux (WSL) version 2. +Details of installation can be found in installation instruction.

    +
  • +
  • Preprocessing

    +

    The software requires a coregistered stack of SLC and the related geometry information in the MiaplPy data format. +The coregistered stack of SLC can be created using an InSAR processor. Currently MiaplPy only supports ISCE. Support for GAMMA and SNAP is planned for future. +After creating the coregistered stack of SLC, run the “load_data” step from Miaplpy to create the “inputs” directory which contains “slcStack.h5” and “geometryRadar.h5”. +Details are explained in the Preparation section

    +
  • +
  • Time series analysis

    +

    Time series analysis is performed using sarvey. It consists of 5 steps (steps 0 to 4). The details of each step are explained in processing steps. The processing parameters are handled in a json config file. Visualization and export are handled by sarvey_plot and sarvey_export packages. Below are the major steps:

    +
      +
    • Go to your working directory:

      +
      cd path/to/working_dir/
      +
      +
      +
    • +
    • Create a default config file using “-g” flag:

      +
      sarvey -f config.json 0 4 -g
      +
      +
      +
    • +
    • Modify config.json to change path to “inputs” directory. Modify other parameters as desired.

    • +
    • Run all processing steps (steps 0 to 4):

      +
      sarvey -f config.json 0 4
      +
      +
      +

      Different processing steps are explained here.

      +
    • +
    • Plot the resulting displacement time series:

      +
      sarvey_plot outputs/p2_coh80_ts.h5 -t
      +
      +
      +
    • +
    • Export the results as Shapefiles:

      +
      sarvey_export outputs/p2_coh80_ts.h5 -o outputs/shp/p2_coh80.shp
      +
      +
      +
    • +
    +
  • +
+
+
+

Feature overview

+

SARvey has three main components for processing, visualization, and exporting data.

+
    +
  • sarvey performs time series analysis.

  • +
  • sarvey_plot plots the outputs.

  • +
  • sarvey_export exports InSAR time series results from to GIS data formats. The GIS data format can be visualized for example in QGIS.

  • +
+

It also has two components that facilitate transport infrastructure monitoring.

+
    +
  • sarvey_mask creates mask from Shapefiles, e.g. for transport infrastructures.

  • +
  • sarvey_osm downloads transport infrastructure information from OSM and store as Shapefiles.

  • +
+

You can run each component in the command line with “-h” argument for more information about the usage. For example:

+
+
sarvey -h
+
+
+
+

SARvey supports two processing schemes:

+ +
+
+

History / Changelog

+

You can find the protocol of recent changes in the SARvey package +here.

+

We follow the principle of semantic versioning. +The version number is structured as follows: MAJOR.MINOR.PATCH. +You can find a description of the versioning scheme here.

+
+
+

Credits

+

This software was developed within the project SAR4Infra (2020-2024) with funds of the German Federal Ministry for Digital and Transport. +The project consortium consists of +the Institute of Photogrammetry and GeoInformation at Leibniz University Hannover, +FERN.Lab (innovation and technology transfer lab of the GFZ German Research Centre for Geosciences, Potsdam), +Landesamt fuer Vermessung und Geoinformation Schleswig-Holstein, +and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +The scientific and methodological development was carried out by Andreas Piter (piter@ipi.uni-hannover.de), supervised by Mahmud H. Haghighi (mahmud@ipi.uni-hannover.de) and Mahdi Motagh (motagh@gfz-potsdam.de). +The FERN.Lab (fernlab@gfz-potsdam.de) contributed to the development, documentation, continuous integration, and testing of the package.

+

This package was created with Cookiecutter and the fernlab/cookiecutter-pypackage project template.

+
+
+ + +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/sarvey.html b/doc/sarvey.html new file mode 100644 index 0000000..ec46013 --- /dev/null +++ b/doc/sarvey.html @@ -0,0 +1,3278 @@ + + + + + + + + sarvey package — SARvey 1.0.0 documentation + + + + + + + + + + + + + + + +
+
+
+
+ +
+

sarvey package

+
+

Submodules

+
+
+

sarvey.coherence module

+

Coherence module for SARvey.

+
+
+sarvey.coherence.computeIfgs(*, slc, ifg_array)[source]
+

ComputeIfgs.

+
+

Parameters

+
+
slcnp.ndarray

SLC stack.

+
+
ifg_arraynp.ndarray

Array containing the indices of the reference and secondary images which are used to compute the interferograms.

+
+
+
+
+

Returns

+
+
ifgsnp.ndarray

Interferograms.

+
+
+
+
+ +
+
+sarvey.coherence.computeIfgsAndTemporalCoherence(*, path_temp_coh, path_ifgs, path_slc, ifg_array, time_mask, wdw_size, num_boxes, box_list, num_cores, logger)[source]
+

ComputeIfgsAndTemporalCoherence.

+

Compute the interferograms and temporal coherence from the SLC stack for a given set of (spatial) patches.

+
+

Parameters

+
+
path_temp_cohstr

Path to the temporary coherence stack. The data will be stored in this file during processing.

+
+
path_ifgsstr

Path to the interferograms stack. The data will be stored in this file during processing.

+
+
path_slcstr

Path to the SLC stack. The data will be read from this file.

+
+
ifg_arraynp.ndarray

Array containing the indices of the reference and secondary images which are used to compute the interferograms.

+
+
time_masknp.ndarray

Binary mask indicating the selected images from the SLC stack.

+
+
wdw_sizeint

Size of the filter window. Has to be odd.

+
+
num_boxesint

Number of patches to enable reading and processing of larger SLC stacks.

+
+
box_listlist

List containing the indices of each patch.

+
+
num_coresint

Number of cores for parallel processing.

+
+
loggerLogger

Logger object.

+
+
+
+
+

Returns

+
+
mean_amp_imgnp.ndarray

Mean amplitude image.

+
+
+
+
+ +
+
+sarvey.coherence.launchConvolve2d(args)[source]
+

LaunchConvolve2d.

+
+

Parameters

+
+
argstuple

Tuple containing the arguments for the convolution. +Tuple contains:

+
+
idxint

Index of the processed interferogram.

+
+
ifgnp.ndarray

Interferogram.

+
+
filter_kernelnp.ndarray

Filter kernel.

+
+
+
+
+
+
+

Returns

+
+
idxint

Index of the processed interferogram.

+
+
avg_neighboursnp.ndarray

Low-pass filtered phase derived as average of neighbours.

+
+
+
+
+ +
+
+

sarvey.config module

+
+
+

sarvey.console module

+

Console module for SARvey.

+
+
+sarvey.console.printCurrentConfig(*, config_section, config_section_default, logger)[source]
+

Print the current parameters and their default values from the config file to console.

+
+

Parameters

+
+
config_section: dict

Section of the configuration class which contains the selected parameters.

+
+
config_section_default: dict

Config section with default values.

+
+
logger: Logger

Logging handler.

+
+
+
+
+ +
+
+sarvey.console.printStep(*, step, step_dict, logger)[source]
+

Print the current step to console.

+
+

Parameters

+
+
step: int

current step number

+
+
step_dict: dict

dictionary with step numbers and names

+
+
logger: Logger

Logging handler

+
+
+
+
+ +
+
+sarvey.console.showLogoSARvey(*, logger, step)[source]
+

ShowLogoSARvey.

+
+

Parameters

+
+
logger: Logger

logging handler

+
+
step: str

Name of the step or script which is shown on the logo.

+
+
+
+
+ +
+
+

sarvey.densification module

+

Densification module for SARvey.

+
+
+sarvey.densification.densificationInitializer(tree_p1, point2_obj, demod_phase1)[source]
+

DensificationInitializer.

+

Sets values to global variables for parallel processing.

+
+

Parameters

+
+
tree_p1KDTree

KDTree of the first-order network

+
+
point2_objPoints

Points object with second-order points

+
+
demod_phase1np.ndarray

demodulated phase of the first-order network

+
+
+
+
+ +
+
+sarvey.densification.densifyNetwork(*, point1_obj, vel_p1, demerr_p1, point2_obj, num_conn_p1, max_dist_p1, velocity_bound, demerr_bound, num_samples, num_cores=1, logger)[source]
+

DensifyNetwork.

+

Densifies the network with second-order points by connecting the second-order points to the closest points in the +first-order network.

+
+

Parameters

+
+
point1_objPoints

Points object with first-order points

+
+
vel_p1np.ndarray

Velocity array of the first-order points

+
+
demerr_p1np.ndarray

DEM error array of the first-order points

+
+
point2_objPoints

Points object with second-order points

+
+
num_conn_p1int

Number of nearest points in the first-order network

+
+
max_dist_p1float

Maximum allowed distance to the nearest points in the first-order network

+
+
velocity_boundfloat

Bound for the velocity estimate in temporal unwrapping

+
+
demerr_boundfloat

Bound for the DEM error estimate in temporal unwrapping

+
+
num_samplesint

Number of samples for the search of the optimal parameters

+
+
num_coresint

Number of cores for parallel processing (default: 1)

+
+
loggerLogger

Logger object

+
+
+
+
+

Returns

+
+
demerr_p2np.ndarray

DEM error array of the second-order points

+
+
vel_p2np.ndarray

Velocity array of the second-order points

+
+
gamma_p2np.ndarray

Estimated temporal coherence array of the second-order points resulting from temporal unwrapping

+
+
+
+
+ +
+
+sarvey.densification.launchDensifyNetworkConsistencyCheck(args)[source]
+

LaunchDensifyNetworkConsistencyCheck.

+

Launches the densification of the network with second-order points inside parallel processing.

+
+

Parameters

+
+
argstuple

Tuple with the following parameters:

+
+
idx_rangenp.ndarray

Array with the indices of the second-order points

+
+
num_pointsint

Number of second-order points

+
+
num_conn_p1int

Number of nearest points in the first-order network

+
+
max_dist_p1float

Maximum allowed distance to the nearest points in the first-order network

+
+
velocity_boundfloat

Bound for the velocity estimate in temporal unwrapping

+
+
demerr_boundfloat

Bound for the DEM error estimate in temporal unwrapping

+
+
num_samplesint

Number of samples for the search of the optimal parameters

+
+
+
+
+
+
+

Returns

+
+
idx_rangenp.ndarray

Array with the indices of the second-order points

+
+
demerr_p2np.ndarray

DEM error array of the second-order points

+
+
vel_p2np.ndarray

Velocity array of the second-order points

+
+
gamma_p2np.ndarray

Estimated temporal coherence array of the second-order points resulting from temporal unwrapping

+
+
+
+
+ +
+
+

sarvey.filtering module

+

Filtering module for SARvey.

+
+
+sarvey.filtering.estimateAtmosphericPhaseScreen(*, residuals, coord_utm1, coord_utm2, num_cores=1, bool_plot=False, logger)[source]
+

Estimate_atmospheric_phase_screen.

+

Estimates the atmospheric phase screen from a stack of phase time series for a sparse set of points. +Kriging is used to estimate the spatial dependence and to interpolate the phase screen over a set of new points.

+
+
Return type:
+

tuple[ndarray, ndarray]

+
+
+
+

Parameters

+
+
residuals: np.ndarray

residual phase (size: num_points1 x num_images)

+
+
coord_utm1: np.ndarray

coordinates in UTM of the points for which the residuals are given (size: num_points1 x 2)

+
+
coord_utm2: np.ndarray

coordinates in UTM of the new points which shall be interpolated (size: num_points2 x 2)

+
+
num_cores: int

Number of cores

+
+
bool_plot: bool

boolean flag to plot intermediate results (default: False)

+
+
logger: Logger

Logging handler

+
+
+
+
+

Returns

+
+
aps1: np.ndarray

atmospheric phase screen for the known points (size: num_points1 x num_images)

+
+
aps2: np.ndarray

atmospheric phase screen for the new points (size: num_points2 x num_images)

+
+
+
+
+ +
+
+sarvey.filtering.launchSpatialFiltering(parameters)[source]
+

Launch_spatial_filtering.

+

Launches the spatial filtering to estimate the atmospheric phase screen with low-pass filtering.

+
+

Parameters

+
+
parameters: tuple

Tuple containing the following parameters:

+
+
idx_range: np.ndarray

range of indices for the time series

+
+
num_time: int

number of time steps

+
+
residuals: np.ndarray

residual phase (size: num_points x num_ifgs)

+
+
coord_utm1: np.ndarray

coordinates in UTM of the first-order points for which the residuals are given (size: num_points_p1 x 2)

+
+
coord_utm2: np.ndarray

coordinates in UTM of the new points which shall be interpolated (size: num_points_p2 x 2)

+
+
bins: np.ndarray

bin edges for the variogram

+
+
bool_plot: bool

boolean flag to plot intermediate results

+
+
logger: Logger

Logging handler

+
+
+
+
+
+
+

Returns

+
+
idx_range: np.ndarray

range of indices for the time series

+
+
aps1: np.ndarray

atmospheric phase screen for the known points (size: num_points_p1 x num_ifgs)

+
+
aps2: np.ndarray

atmospheric phase screen for the new points (size: num_points_p2 x num_ifgs)

+
+
+
+
+ +
+
+sarvey.filtering.simpleInterpolation(*, residuals, coord_utm1, coord_utm2, interp_method='linear')[source]
+

SimpleInterpolation.

+

Simple interpolation of atmospheric phase screen using scipy’s griddata function with options “linear” or “cubic”. +For pixels outside the convex hull of the input points, the nearest neighbor is used.

+
+

Parameters

+
+
residuals: np.ndarray

residual phase (size: num_points x num_ifgs)

+
+
coord_utm1: np.ndarray

coordinates in UTM of the points for which the residuals are given (size: num_points_p1 x 2)

+
+
coord_utm2: np.ndarray

coordinates in UTM of the new points which shall be interpolated (size: num_points_p2 x 2)

+
+
interp_method: str

interpolation method (default: “linear”; options: “linear”, “cubic”)

+
+
+
+
+

Returns

+
+
aps1: np.ndarray

atmospheric phase screen for the known points (size: num_points_p1 x num_images)

+
+
aps2: np.ndarray

atmospheric phase screen for the new points (size: num_points_p2 x num_images)

+
+
+
+
+ +
+
+

sarvey.geolocation module

+

Module for correcting the geolocation of the scatterers.

+
+
+sarvey.geolocation.calculateGeolocationCorrection(*, path_geom, point_obj, demerr, logger)[source]
+

Calculate geolocation correction.

+
+

Parameters

+
+
path_geom: str

Path to directory containing ‘slcStack.h5’ or ‘geometryRadar.h5’.

+
+
point_obj: Points

Point object with incidence angle for points

+
+
demerr: np.array

Array of dem error per pixel

+
+
logger: Logger

Logger handle

+
+
+
+
+

Returns

+
+
coord_correction: np.array

array of geolocation corrections, two columns [x_correction, y_correction] per point.

+
+
+
+
+ +
+
+sarvey.geolocation.getHeading(input_path, logger)[source]
+

Read heading angle from slcStack.h5.

+
+

Parameters

+
+
input_path: str

Path to directory containing ‘slcStack.h5’ and ‘geometryRadar.h5’.

+
+
logger: Logger

Logger handle

+
+
+
+
+

Returns

+
+
heading_angle: float

heading angle of the satellite in radians +for ascending ~ -12*pi/180 +for descending ~ 190*pi/180

+
+
+
+
+ +
+
+

sarvey.ifg_network module

+

IfgNetwork module for SARvey.

+
+
+class sarvey.ifg_network.DelaunayNetwork[source]
+

Bases: IfgNetwork

+

Delaunay network of interferograms which restricts both the temporal and perpendicular baselines.

+

Init.

+
+
+configure(*, pbase, tbase, dates)[source]
+

Create list of interferograms containing the indices of the images and computes baselines.

+
+

Parameter

+
+
pbase: np.ndarray

perpendicular baselines of the SAR acquisitions, array

+
+
tbase: np.ndarray

temporal baselines of the SAR acquisitions, array

+
+
dates: list

Dates of the acquisitions, list.

+
+
+
+
+ +
+ +
+
+class sarvey.ifg_network.IfgNetwork[source]
+

Bases: object

+

Abstract class/interface for different types of interferogram networks.

+

Init.

+
+
+getDesignMatrix()[source]
+

Compute the design matrix for the smallbaseline network.

+
+ +
+
+ifg_list: Union[list, ndarray] = None
+
+ +
+
+open(*, path)[source]
+

Read stored information from already existing.h5 file.

+
+

Parameter

+
+
path: str

path to existing file to read from.

+
+
+
+
+ +
+
+plot()[source]
+

Plot the network of interferograms.

+
+ +
+
+writeToFile(*, path, logger)[source]
+

Write all existing data to .h5 file.

+
+

Parameters

+
+
path: str

path to filename

+
+
logger: Logger

Logging handler.

+
+
+
+
+ +
+ +
+
+class sarvey.ifg_network.SmallBaselineNetwork[source]
+

Bases: IfgNetwork

+

Small baseline network of interferograms restricting both temporal and spatial baselines.

+

Init.

+
+
+configure(*, pbase, tbase, num_link, max_tbase, dates)[source]
+

Create list of interferograms containing the indices of the images and computes baselines.

+
+

Parameter

+
+
pbase: np.ndarray

perpendicular baselines of the SAR acquisitions.

+
+
tbase: np.ndarray

temporal baselines of the SAR acquisitions.

+
+
max_tbase: int

maximum temporal baseline in [days] (default: None).

+
+
num_link: int

number of links within the range of maximum temporal baseline.

+
+
dates: list

Dates of the acquisitions.

+
+
+
+
+ +
+ +
+
+class sarvey.ifg_network.SmallBaselineYearlyNetwork[source]
+

Bases: IfgNetwork

+

Small baseline network of interferograms with yearly connections.

+

Init.

+
+
+configure(*, pbase, tbase, num_link=None, dates)[source]
+

Create list of interferograms containing the indices of the images and computes baselines.

+
+

Parameter

+
+
pbase: np.ndarray

perpendicular baselines of the SAR acquisitions, array

+
+
tbase: np.ndarray

temporal baselines of the SAR acquisitions, array

+
+
num_link: int

Number of consecutive links in time connecting acquisitions.

+
+
dates: list

Dates of the acquisitions, list.

+
+
+
+
+ +
+ +
+
+class sarvey.ifg_network.SmallTemporalBaselinesNetwork[source]
+

Bases: IfgNetwork

+

Small temporal baselines network of interferograms without restrictions on the perpendicular baselines.

+

Init.

+
+
+configure(*, pbase, tbase, num_link=None, dates)[source]
+

Create list of interferograms containing the indices of the images and computes baselines.

+
+

Parameter

+
+
pbase: np.ndarray

Perpendicular baselines of the SAR acquisitions.

+
+
tbase: np.ndarray

Temporal baselines of the SAR acquisitions.

+
+
num_link: int

Number of consecutive links in time connecting acquisitions.

+
+
dates: list

Dates of the acquisitions.

+
+
+
+
+ +
+ +
+
+class sarvey.ifg_network.StarNetwork[source]
+

Bases: IfgNetwork

+

Star network of interferograms (single-reference).

+

Init.

+
+
+configure(*, pbase, tbase, ref_idx, dates)[source]
+

Create list of interferograms containing the indices of the images and computes baselines.

+
+

Parameter

+
+
pbase: np.ndarray

Perpendicular baselines of the SAR acquisitions.

+
+
tbase: np.ndarray

Temporal baselines of the SAR acquisitions.

+
+
ref_idx: int

Index of the reference image.

+
+
dates: list

Dates of the acquisitions.

+
+
+
+
+ +
+ +
+
+

sarvey.objects module

+

Objects module for SARvey.

+
+
+class sarvey.objects.AmplitudeImage(*, file_path)[source]
+

Bases: object

+

AmplitudeImage.

+

Init.

+
+

Parameters

+
+
file_path: str

path to filename

+
+
+
+
+open()[source]
+

Open.

+
+ +
+
+plot(*, ax=None, logger)[source]
+

Plot the mean amplitude image as a background map.

+
+

Parameters

+
+
ax: plt.Axes

axes for plotting (default: None, a new figure will be created).

+
+
logger: Logger

Logging handler.

+
+
+
+
+

Return

+
+
ax: plt.Axes

axes object.

+
+
+
+
+ +
+
+prepare(*, slc_stack_obj, img, logger)[source]
+

Read the SLC stack, compute the mean amplitude image and store it into a file.

+
+

Parameters

+
+
slc_stack_obj: slcStack

object of class slcStack from MiaplPy

+
+
img: np.ndarray

amplitude image, e.g. the mean over time

+
+
logger: Logger

Logging handler

+
+
+
+
+ +
+
+ +
+
+class sarvey.objects.BaseStack(*, file=None, logger)[source]
+

Bases: object

+

Class for 3D image-like data stacks.

+

Init.

+
+

Parameters

+
+
file: str

path to filename

+
+
logger: Logger

Logging handler.

+
+
+
+
+close(*, print_msg=True)[source]
+

Close.

+
+ +
+
+getShape(*, dataset_name)[source]
+

Open file and read shape of dataset.

+
+ +
+
+prepareDataset(dataset_name, dshape, dtype, metadata, mode='w', chunks=True)[source]
+

PrepareDataset. Creates a dataset in file with specified size without writing any data.

+
+

Parameters

+
+
dataset_name: str

name of dataset.

+
+
dshape: tuple

shape of dataset.

+
+
dtype: object

data type of dataset.

+
+
metadata: dict

metadata of dataset (e.g. WAVELENGTH, ORBIT_DIRECTION, etc.). Usually the same as in slcStack.h5.

+
+
mode: str

open mode (‘w’ for writing new file or ‘a’ for appending to existing file).

+
+
chunks: tuple

chunk size (‘True’/’False’ or tuple specifying the dimension of the chunks)

+
+
+
+
+ +
+
+read(*, dataset_name, box=None, print_msg=True)[source]
+

Read dataset from slc file.

+
+

Parameters

+
+
dataset_name: str

name of dataset

+
+
box: tuple

tuple of 4 int, indicating x0,y0,x1,y1 of range, or +tuple of 6 int, indicating x0,y0,z0,x1,y1,z1 of range

+
+
print_msg: bool

print message.

+
+
+
+
+

Returns

+
+
data: np.ndarray

2D or 3D dataset

+
+
+
+
+ +
+
+writeToFile(*, data, dataset_name, metadata=None, mode='a', chunks=True)[source]
+

Write the whole dataset to the file (not block-by-block).

+
+

Parameters

+
+
data: np.ndarray

3D data array.

+
+
dataset_name: str

name of dataset.

+
+
metadata: dict

metadata of dataset (e.g. WAVELENGTH, ORBIT_DIRECTION, etc.). Usually the same as in slcStack.h5.

+
+
mode: str

mode for opening the h5 file (e.g. write: ‘w’ or append: ‘a’)

+
+
chunks: tuple

chunk size (‘True’/’False’ or tuple specifying the dimension of the chunks)

+
+
+
+
+ +
+
+writeToFileBlock(*, data, dataset_name, block=None, mode='a', print_msg=True)[source]
+

Write data to existing HDF5 dataset in disk block by block.

+
+

Parameters

+
+
data: np.ndarray

1/2/3D matrix.

+
+
dataset_name: str

dataset name.

+
+
block: list

the list can contain 2, 4 or 6 integers indicating: [zStart, zEnd, yStart, yEnd, xStart, xEnd].

+
+
mode: str

open mode (‘w’ for writing new file or ‘a’ for appending to existing file).

+
+
print_msg: bool

print message.

+
+
+
+
+

Returns

+
+
file: str

path to file

+
+
+
+
+ +
+
+ +
+
+class sarvey.objects.CoordinatesUTM(*, file_path, logger)[source]
+

Bases: object

+

Coordinates in UTM for all pixels in the radar image.

+

Init.

+
+

Parameters

+
+
file_path: str

path to filename

+
+
logger: Logger

Logging handler.

+
+
+
+
+open()[source]
+

Open.

+
+ +
+
+prepare(*, input_path)[source]
+

Read the slc stack, computes the mean amplitude image and stores it into a file.

+
+

Parameters

+
+
input_path: str

path to slcStack.h5 file.

+
+
+
+
+ +
+
+ +
+
+class sarvey.objects.Network(*, file_path, logger)[source]
+

Bases: object

+

Spatial network of PS candidates.

+

Init.

+
+

Parameters

+
+
file_path: str

absolute path to working directory for creating/loading ‘psNetwork.h5’

+
+
logger: Logger

Logging handler.

+
+
+
+
+computeArcObservations(*, point_obj, arcs)[source]
+

Compute the phase observations for each arc.

+

Compute double difference phase observations, i.e. the phase differences for each arc in the network from the +phase of the two scatterers connected by the arc.

+
+

Parameters

+
+
point_obj: Points

object of class Points.

+
+
arcs: np.ndarray

Array with the indices of the points connected by an arc.

+
+
+
+
+ +
+
+open(*, input_path)[source]
+

Read stored information from existing .h5 file.

+
+ +
+
+openExternalData(*, input_path)[source]
+

Read data from slcStack.h5 and IfgNetwork.h5 files.

+
+ +
+
+removeArcs(*, mask)[source]
+

Remove arcs from the list of arcs in the network.

+
+

Parameter

+
+
mask: np.ndarray

mask to select arcs to be kept, rest will be removed.

+
+
+
+
+ +
+
+writeToFile()[source]
+

Write all existing data to psNetwork.h5 file.

+
+ +
+
+ +
+
+class sarvey.objects.NetworkParameter(*, file_path, logger)[source]
+

Bases: Network

+

Spatial Network with the estimated parameters of each arc in the network.

+

Init.

+
+
+open(*, input_path)[source]
+

Read data from file.

+
+ +
+
+prepare(*, net_obj, demerr, vel, gamma)[source]
+

Prepare.

+
+

Parameter

+
+
net_obj: Network

object of class Network.

+
+
demerr: np.ndarray

estimated DEM error for each arc in the network.

+
+
vel: np.ndarray

estimated velocity for each arc in the network.

+
+
gamma: np.ndarray

estimated temporal coherence for each arc in the network.

+
+
+
+
+ +
+
+writeToFile()[source]
+

Write DEM error, velocity and temporal coherence to file.

+
+ +
+ +
+
+class sarvey.objects.Points(*, file_path, logger)[source]
+

Bases: object

+

Points class for storing information about the selected scatterers.

+

Init.

+
+

Parameters

+
+
file_path: str

ath to filename

+
+
logger: Logger

Logging handler.

+
+
+
+
+addPointsFromObj(*, new_point_id, new_coord_xy, new_phase, new_num_points, input_path)[source]
+

Add new points and their attributes to the existing data.

+
+

Parameters

+
+
new_point_id: np.ndarray

point_id of the new scatterers.

+
+
new_coord_xy: np.ndarray

radar coordinates of the new scatterers.

+
+
new_phase: np.ndarray

phase of the new scatterers.

+
+
new_num_points: int

number of new points.

+
+
input_path: str

path to input files (slcStack.h5, geometryRadar.h5).

+
+
+
+
+ +
+
+coord_xy: array
+
+ +
+
+createMask()[source]
+

Create a mask.

+

Create a mask in the size of the radar image which is used to read the geometry and SLC data for the selected +scatterers.

+
+ +
+
+file_path: str
+
+ +
+
+length: int
+
+ +
+
+num_points: int
+
+ +
+
+open(input_path, other_file_path=None)[source]
+

Read data from file.

+

Read stored information from already existing .h5 file. This can be the file of the object itself. If the +data should be read from another file, the path to this file can be given as ‘other_file_path’. Thereby, a new +Points object can be created with the data of another Points object.

+
+

Parameters

+
+
input_path: str

path to input files (slcStack.h5, geometryRadar.h5).

+
+
other_file_path: str

path to other .h5 file (default: None).

+
+
+
+
+ +
+
+openExternalData(*, input_path)[source]
+

Load data which is stored in slcStack.h5, geometryRadar.h5, ifg_network.h5 and coordinates_utm.h5.

+
+ +
+
+phase: array
+
+ +
+
+point_id: array
+
+ +
+
+prepare(*, point_id, coord_xy, input_path)[source]
+

Assign point_id and radar coordinates to the object.

+

Store the point_id and radar coordinates of the scatterers in the object (not file) and read further +attributes from external files (ifg_network.h5, slcStack.h5, geometryRadar.h5, coordinates_utm.h5).

+
+

Parameters

+
+
point_id: np.ndarray

point_id of the scatterers.

+
+
coord_xy: np.ndarray

radar coordinates of the scatterers.

+
+
input_path: str

path to input files (slcStack.h5, geometryRadar.h5).

+
+
+
+
+ +
+
+removePoints(mask=None, *, keep_id, input_path)[source]
+

Remove all entries from specified points.

+

The possible options exist for removing the points: +a) Keep all points which are set to True in a ‘mask’ with size (num_points x 1). Or +b) Keep all points whose ID is listed in keep_id. The rest of the points will be removed.

+
+

Parameters

+
+
mask: np.ndarray

mask to select points to be kept, rest will be removed (default: None).

+
+
keep_id: np.ndarray

list of point_id to keep.

+
+
input_path: str

path to input files (slcStack.h5, geometryRadar.h5).

+
+
+
+
+ +
+
+times: None
+
+ +
+
+wavelength: float
+
+ +
+
+width: int
+
+ +
+
+writeToFile()[source]
+

Write data to .h5 file (num_points, coord_xy, point_id, phase).

+
+ +
+
+ +
+
+

sarvey.osm_utils module

+

Osm utils module for SARvey.

+
+
+sarvey.osm_utils.getSpatialExtend(*, geom_file, logger)[source]
+

Get spatial extend of the radar image.

+
+

Parameters

+
+
geom_file: str

path of geometryRadar.h5 file

+
+
logger: Logger

Logging handler.

+
+
+
+
+

Returns

+
+
ll_corner_wgs: list

list of coordinates of the lower-left corner of the radar image in WGS84 coordinates.

+
+
ur_corner_wgs: list

list of coordinates of the upper-right corner of the radar image in WGS84 coordinates.

+
+
coord: np.ndarray

coordinates of all pixels in the radar image in WGS84.

+
+
atr: dict

metadata dictionary from geometryRadar.h5.

+
+
+
+
+ +
+
+sarvey.osm_utils.runOsmQuery(*, ll_corner_wgs, ur_corner_wgs, type_list, logger)[source]
+

Query OSM database for transport infrastructure within the spatial extent of the radar image.

+
+
Return type:
+

Result

+
+
+
+

Parameters

+
+
ll_corner_wgs: np.ndarray

coordinates of the lower-left corner of the radar image in WGS84 coordinates.

+
+
ur_corner_wgs: np.ndarray

coordinates of the upper-right corner of the radar image in WGS84 coordinates.

+
+
type_list: list

List of street types that shall be queried at the OSM database.

+
+
logger: Logger

Logging handler.

+
+
+
+
+

Returns

+
+
+
result: overpy.Result

results of the overpy query to OSM database.

+
+
+
+
+
+ +
+
+sarvey.osm_utils.runOsmQueryBridge(*, ll_corner_wgs, ur_corner_wgs, bridge_highway, bridge_railway, logger)[source]
+

Query OSM database for bridges of transport infrastructure within the spatial extent of the radar image.

+
+
Return type:
+

Result

+
+
+
+

Parameters

+
+
ll_corner_wgs: np.ndarray

coordinates of the lower-left corner of the radar image in WGS84 coordinates.

+
+
ur_corner_wgs: np.ndarray

coordinates of the upper-right corner of the radar image in WGS84 coordinates.

+
+
bridge_highway: bool

Set true to query highway bridges.

+
+
bridge_railway: bool

Set true to query railway bridges.

+
+
logger: Logger

Logging handler.

+
+
+
+
+

Returns

+
+
+
result: overpy.Result

results of the overpy query to OSM database.

+
+
+
+
+
+ +
+
+

sarvey.preparation module

+

Preparation module for SARvey.

+
+
+sarvey.preparation.createArcsBetweenPoints(*, point_obj, knn=None, max_arc_length=inf, logger)[source]
+

Create a spatial network of arcs to triangulate the points.

+

All points are triangulated with a Delaunay triangulation. If knn is given, the triangulation is done with the k +nearest neighbors. Too long arcs are removed from the network. If, afterward, the network is not connected, a +delaunay triangulation is performed again to ensure connectivity in the network.

+
+
Return type:
+

ndarray

+
+
+
+

Parameters

+
+
point_obj: Points

Point object.

+
+
knn: int

Number of nearest neighbors to consider (default: None).

+
+
max_arc_length: float

Maximum length of an arc. Longer arcs will be removed. Default: np.inf.

+
+
logger: Logger

Logging handler.

+
+
+
+
+

Returns

+
+
arcs: np.ndarray

Arcs of the triangulation containing the indices of the points for each arc.

+
+
+
+
+ +
+
+sarvey.preparation.createTimeMaskFromDates(*, start_date, stop_date, date_list, logger)[source]
+

Create a mask with selected dates within given time frame.

+
+

Parameters

+
+
start_date: str

Start date.

+
+
stop_date: str

Stop date.

+
+
date_list: list

all avaiable dates in the slcStack.h5.

+
+
logger: Logger

Logging handler.

+
+
+
+
+

Returns

+
+
time_mask: np.ndarray

mask with True for selected dates.

+
+
num_slc: int

number of selected images.

+
+
result_date_list: list

list of selected dates.

+
+
+
+
+ +
+
+sarvey.preparation.readCoherenceFromMiaplpy(*, path, box=None, logger)[source]
+

Read the coherence image from phase-linking of MiaplPy.

+
+
Return type:
+

tuple[ndarray, dict]

+
+
+
+

Parameters

+
+
path: str

Path to phase_series.h5 file.

+
+
box: tuple

Bounding Box to read from.

+
+
logger: Logger

Logging handler.

+
+
+
+
+

Returns

+
+
temp_coh: np.ndarray

temporal coherence image from phase-linking results of MiaplPy.

+
+
+
+
+ +
+
+sarvey.preparation.readSlcFromMiaplpy(*, path, box=None, logger)[source]
+

Read SLC data from phase-linking results of Miaplpy.

+
+
Return type:
+

ndarray

+
+
+
+

Parameters

+
+
path: str

Path to the phase_series.h5 file.

+
+
box: tuple

Bounding Box to read from.

+
+
logger: Logger

Logging handler.

+
+
+
+
+

Returns

+
+
slc: np.ndarray

slc stack created from phase-linking results.

+
+
+
+
+ +
+
+sarvey.preparation.selectPixels(*, path, selection_method, thrsh, grid_size=None, bool_plot=False, logger)[source]
+

Select pixels based on temporal coherence.

+
+

Parameters

+
+
path: str

Path to the directory with the temporal_coherence.h5 file.

+
+
selection_method: str

Pixel selection method. Currently, only “temp_coh” is implemented.

+
+
thrsh: float

Threshold for pixel selection.

+
+
grid_size: int

Grid size for sparse pixel selection.

+
+
bool_plot: bool

Plot the selected pixels.

+
+
logger: Logger

Logging handler.

+
+
+
+
+

Returns

+
+
cand_mask: np.ndarray

Mask with selected pixels.

+
+
+
+
+ +
+
+

sarvey.processing module

+
+
+

sarvey.sarvey_export module

+
+
+

sarvey.sarvey_mask module

+

Generate mask from shape file.

+
+
+class sarvey.sarvey_mask.CoordinateSearch[source]
+

Bases: object

+

CoordinateSearch.

+

Init.

+
+
+createSearchTree(*, coord, logger)[source]
+

Create search tree.

+
+

Parameters

+
+
coord: utils.coordinate

Coordinates

+
+
logger: Logger

Logging handler.

+
+
+
+
+ +
+
+getMeanDistanceBetweenPixels()[source]
+

Compute mean distance between adjacent pixels.

+
+ +
+
+getNearestNeighbour(*, node)[source]
+

Query the kd-tree for the nearest neighbour.

+
+
Parameters:
+

node (Node) – Node object

+
+
+
+ +
+ +
+
+class sarvey.sarvey_mask.Node(*, lat=None, lon=None)[source]
+

Bases: object

+

Define simple class for a node at a road (similar to overpy.Node).

+

Init.

+
+ +
+
+sarvey.sarvey_mask.computeLastRoadPixel(*, cur_node, prev_node, found_node)[source]
+

Compute the location of the pixel at the border of the radar image that is part of the road.

+
+

Parameters

+
+
cur_node: Node

Current node of the road.

+
+
prev_node: Node

Previous node of the road.

+
+
found_node: Node

Found node of the road.

+
+
+
+
+

Returns

+
+
new_lon: float

Longitude of the pixel at the border of the radar image that is part of the road.

+
+
new_lat: float

Latitude of the pixel at the border of the radar image that is part of the road.

+
+
+
+
+ +
+
+sarvey.sarvey_mask.convertToRadarCoord(*, gdf_infra, csearch, width, logger)[source]
+

Convert Polyline to a mask in shape of radar image. Apply a buffer of size ‘width’ in pixels.

+
+

Parameters

+
+
gdf_infra: gpd.geodataframe

The queried infrastructures containing polygons.

+
+
csearch: CoordinateSearch

The coordinate search object.

+
+
width: int

Width of the mask in pixel.

+
+
logger: Logger

Logging handler.

+
+
+
+
+

Returns

+
+
img_np: np.ndarray

Mask image.

+
+
+
+
+ +
+
+sarvey.sarvey_mask.convertToRadarCoordPolygon(*, gdf_infra, csearch, logger)[source]
+

Convert Polygon to a mask in shape of radar image.

+
+

Parameters

+
+
gdf_infra: gpd.geodataframe

The queried infrastructures containing polygons.

+
+
csearch: CoordinateSearch

The coordinate search object.

+
+
logger: Logger

Logging handler.

+
+
+
+
+

Returns

+
+
img_np: np.ndarray

Mask image.

+
+
+
+
+ +
+
+sarvey.sarvey_mask.createMask(*, input_file, width, work_dir, out_file_name, geom_file, logger)[source]
+

Create a mask for the radar image from a shapefile containing lines or polygons.

+
+

Parameters

+
+
input_file: str

Path to input file.

+
+
width: int

Width of the mask in pixel. Applied to the lines only.

+
+
work_dir: str

Working directory.

+
+
out_file_name: str

Output file name.

+
+
geom_file: str

Path to geometryRadar.h5 file.

+
+
logger: logging.Logger

Logging handler.

+
+
+
+
+ +
+
+sarvey.sarvey_mask.create_parser()[source]
+

Create_parser.

+
+ +
+
+sarvey.sarvey_mask.euclDist(*, node1, node2)[source]
+

Compute the euclidean distance between two nodes.

+
+ +
+
+sarvey.sarvey_mask.findLastRoadPixel(*, csearch, cur_node, prev_node, dist_thrsh)[source]
+

Find the index of the last road pixel that is within the image extend.

+

Idea: the pixel with the shortest distance to the current node of a road is not necessarily on the road, if the +current node is outside the image extend. Split the road in further linear parts and find the last road pixel +recursively that is still inside the image. +Hint: all nodes are instances from class Node

+
+

Parameters

+
+
csearch: CoordinateSearch

Search tree for efficient spatial search of the coordinate of a pixel in the radar image.

+
+
cur_node: Node

Current node of the road that is outside the image extend.

+
+
prev_node: Node

Previous node of the road that is inside the image extend.

+
+
dist_thrsh: float

Distance threshold for stop criterion (derived from average distance between two pixels in the image).

+
+
+
+
+

Returns

+
+
node_idx: int

Node of the pixel which is the last pixel on the road inside the image.

+
+
+
+
+ +
+
+sarvey.sarvey_mask.main(iargs=None)[source]
+

Create mask from lines or polygons given in geographic coordinates (EPSG:4326). Input as shp or gpkg.

+
+ +
+
+sarvey.sarvey_mask.saveMask(*, work_dir, mask, atr, out_file_name)[source]
+

Save the mask to ‘maskRoads.h5’.

+
+

Parameters

+
+
work_dir: str

Working directory.

+
+
mask: np.ndarray

Mask image.

+
+
atr: dict

Metadata data, e.g. from the geometryRadar.h5 file.

+
+
out_file_name: str

Output file name.

+
+
+
+
+ +
+
+

sarvey.sarvey_mti module

+
+
+

sarvey.sarvey_osm module

+

Download openstreetmap data for area of interest.

+
+
+sarvey.sarvey_osm.create_parser()[source]
+

Create_parser.

+
+ +
+
+sarvey.sarvey_osm.downloadOSM(*, railway, highway, bridge, work_dir, out_file_name, logger, geom_file)[source]
+

Download openstreetmap data and store to file.

+
+

Parameters

+
+
railway: bool

download railway data.

+
+
highway: bool

download highway data.

+
+
bridge: bool

download bridge data.

+
+
work_dir: str

working directory.

+
+
out_file_name: str

output file name.

+
+
logger: logging.Logger

logger.

+
+
geom_file: str

path to geometryRadar.h5 file.

+
+
+
+
+ +
+
+sarvey.sarvey_osm.main(iargs=None)[source]
+

Download openstreetmap data and store to file.

+
+ +
+
+

sarvey.sarvey_plot module

+
+
+

sarvey.triangulation module

+

Triangulation module for SARvey.

+
+
+class sarvey.triangulation.PointNetworkTriangulation(*, coord_xy, coord_utmxy, logger)[source]
+

Bases: object

+

PointNetworkTriangulation.

+

Triangulate points in space based on distance.

+
+

Parameters

+
+
coord_xy: np.ndarray

Radar coordinates of the points.

+
+
coord_utmxy: np.ndarray

UTM coordinates of the points.

+
+
logger: Logger

Logging handler.

+
+
+
+
+getArcsFromAdjMat()[source]
+

Convert the adjacency matrix into a list of arcs.

+
+

Returns

+
+
arcs: np.ndarray

List of arcs with indices of the start and end point.

+
+
+
+
+ +
+
+isConnected()[source]
+

Check if the network is connected.

+
+ +
+
+removeLongArcs(*, max_dist)[source]
+

Remove arcs from network which are longer than given threshold.

+
+

Parameter

+
+
max_dist: float

distance threshold on arc length in [m]

+
+
+
+
+ +
+
+triangulateGlobal()[source]
+

Connect the points with a GLOBAL delaunay triangulation.

+
+ +
+
+triangulateKnn(*, k)[source]
+

Connect points to the k-nearest neighbours.

+
+ +
+
+ +
+
+

sarvey.unwrapping module

+

Unwrapping module for SARvey.

+
+
+sarvey.unwrapping.computeAvgCoherencePerPoint(*, net_obj, point_id, logger)[source]
+

Compute the average coherence from all arcs that a point is connected with. Used to remove incoherent points.

+
+
Return type:
+

ndarray

+
+
+
+

Parameters

+
+
net_obj: Network

The Network object.

+
+
point_id: np.ndarray

ID of the points in the network.

+
+
logger: Logger

Logging handler.

+
+
+
+
+

Returns

+
+
mean_gamma_point: np.ndarray

Average coherence per point

+
+
+
+
+ +
+
+sarvey.unwrapping.computeNumArcsPerPoints(*, net_obj, point_id, logger)[source]
+

Remove Points with less than specified number of arcs.

+
+
Return type:
+

tuple[ndarray, ndarray]

+
+
+
+

Parameters

+
+
net_obj: Network

The spatial Network object.

+
+
point_id: np.ndarray

ID of the points in the network.

+
+
logger: Logger

Logging handler.

+
+
+
+
+

Returns

+
+
+
design_mat: np.ndarray

Design matrix of the spatial network

+
+
arcs_per_point: np.ndarray

Number of arcs that each point is connected with.

+
+
+
+
+
+ +
+
+sarvey.unwrapping.findOptimum(*, obs_phase, design_mat, val_range)[source]
+

Find optimal value within a one dimensional search space that fits to the observed phase.

+
+

Parameters

+
+
obs_phase: np.ndarray

Observed phase of the arc.

+
+
design_mat: np.ndarray

Design matrix for estimating parameters from arc phase.

+
+
val_range: np.ndarray

Range of possible values for the solution. Can be either for DEM error or velocity.

+
+
+
+
+

Returns

+

opt_val: scipy.optimize.minimize return value +gamma: float +pred_phase: np.ndarray

+
+
+ +
+
+sarvey.unwrapping.gradientSearchTemporalCoherence(*, scale_vel, scale_demerr, obs_phase, design_mat, x0)[source]
+

GradientSearchTemporalCoherence.

+
+

Parameters

+
+
scale_demerr: float

Scaling factor for DEM error to equalize the axis of the search space.

+
+
scale_vel: float

Scaling factor for velocity to equalize the axis of the search space.

+
+
design_mat: np.ndarray

Design matrix for estimating parameters from arc phase.

+
+
obs_phase: np.ndarray

Observed phase of the arc.

+
+
x0: np.ndarray

Initial values for optimization.

+
+
+
+
+

Returns

+

demerr: float +vel: float +gamma: float

+
+
+ +
+
+sarvey.unwrapping.gridSearchTemporalCoherence(*, demerr_grid, vel_grid, design_mat, obs_phase)[source]
+

Grid search which maximizes the temporal coherence as the objective function.

+
+

Parameters

+
+
demerr_grid: np.ndarray

Search space for the DEM error in a 2D grid.

+
+
vel_grid: np.ndarray

Search space for the velocity in a 2D grid.

+
+
design_mat: np.ndarray

Design matrix for estimating parameters from arc phase.

+
+
obs_phase: np.ndarray

Observed phase of the arc.

+
+
+
+
+

Returns

+
+
demerr: float

estimated DEM error.

+
+
vel: float

estimated velocity.

+
+
gamma: float

estimated temporal coherence.

+
+
+
+
+ +
+
+sarvey.unwrapping.launchAmbiguityFunctionSearch(parameters)[source]
+

Wrap for launching ambiguity function for temporal unwrapping in parallel.

+
+

Parameters

+
+
parameters: tuple

Arguments for temporal unwrapping in parallel.

+
+
+
+
+

Returns

+

arc_idx_range: np.ndarray +demerr: np.ndarray +vel: np.ndarray +gamma: np.ndarray

+
+
+ +
+
+sarvey.unwrapping.launchSpatialUnwrapping(parameters)[source]
+

LaunchSpatialUnwrapping.

+
+
Return type:
+

tuple[ndarray, ndarray]

+
+
+
+

Parameters

+
+
parameters: tuple

idx_range, num_ifgs, num_points, edges, phase

+
+
+
+
+

Returns

+

idx_range: np.ndarray +unw_phase: np.ndarray

+
+
+ +
+
+sarvey.unwrapping.objFuncTemporalCoherence(x, *args)[source]
+

Compute temporal coherence from parameters and phase. To be used as objective function for optimization.

+
+

Parameters

+
+
x: np.ndarray

Search space for the DEM error in a 1D grid.

+
+
args: tuple

Additional arguments: (design_mat, obs_phase, scale_vel, scale_demerr).

+
+
+
+
+

Returns

+

1 - gamma: float

+
+
+ +
+
+sarvey.unwrapping.oneDimSearchTemporalCoherence(*, demerr_range, vel_range, obs_phase, design_mat)[source]
+

One dimensional search for maximum temporal coherence that fits the observed arc phase.

+
+

Parameters

+
+
demerr_range: np.ndarray

Search space for the DEM error in a 1D grid.

+
+
vel_range: np.ndarray

Search space for the velocity in a 1D grid.

+
+
design_mat: np.ndarray

Design matrix for estimating parameters from arc phase.

+
+
obs_phase: np.ndarray

Observed phase of the arc.

+
+
+
+
+

Returns

+

demerr: float +vel: float +gamma: float

+
+
+ +
+
+sarvey.unwrapping.parameterBasedNoisyPointRemoval(*, net_par_obj, point_id, coord_xy, design_mat, rmse_thrsh=0.02, num_points_remove=1, bmap_obj=None, bool_plot=False, logger)[source]
+

Remove Points during spatial integration step if residuals at many connected arcs are high.

+

The idea is similar to outlier removal in DePSI, but without hypothesis testing. +It can be used as a preprocessing step to spatial integration. +The points are removed based on the RMSE computed from the residuals of the parameters (DEM error, velocity) per +arc. The point with the highest RMSE is removed in each iteration. The process stops when the maximum RMSE is below +a threshold.

+
+

Parameters

+
+
net_par_obj: NetworkParameter

The spatial NetworkParameter object containing the parameters estimates at each arc.

+
+
point_id: np.ndarray

ID of the points in the network.

+
+
coord_xy: np.ndarray

Radar coordinates of the points in the spatial network.

+
+
design_mat: np.ndarray

Design matrix describing the relation between arcs and points.

+
+
rmse_thrsh: float

Threshold for the RMSE of the residuals per point. Default = 0.02.

+
+
num_points_remove: int

Number of points to remove in each iteration. Default = 1.

+
+
bmap_obj: AmplitudeImage

Basemap object for plotting. Default = None.

+
+
bool_plot: bool

Plot the RMSE per point. Default = False.

+
+
logger: Logger

Logging handler.

+
+
+
+
+

Returns

+
+
spatial_ref_id: int

ID of the spatial reference point.

+
+
point_id: np.ndarray

ID of the points in the network without the removed points.

+
+
net_par_obj: NetworkParameter

The NetworkParameter object without the removed points.

+
+
+
+
+ +
+
+sarvey.unwrapping.removeArcsByPointMask(*, net_obj, point_id, coord_xy, p_mask, design_mat, logger)[source]
+

Remove all entries related to the arc observations connected to the points which have a False value in p_mask.

+
+
Return type:
+

tuple[Network, ndarray, ndarray, ndarray]

+
+
+
+

Parameters

+
+
net_obj: Network

The Network object.

+
+
point_id: np.ndarray

ID of the points in the network.

+
+
coord_xy: np.ndarray

Radar coordinates of the points in the spatial network.

+
+
p_mask: np.ndarray

Boolean mask with True for points to keep, and False for points to remove.

+
+
design_mat: np.ndarray

Design matrix describing the relation between arcs and points.

+
+
logger: Logger

Logging handler.

+
+
+
+
+

Returns

+
+
net_obj: Network

Network object without the removed arcs and points.

+
+
point_id: np.ndarray

ID of the points in the network without the removed points.

+
+
coord_xy: np.ndarray

Radar coordinates of the points in the spatial network without the removed points.

+
+
design_mat: np.ndarray

Design matrix describing the relation between arcs and points without the removed points and arcs.

+
+
+
+
+ +
+
+sarvey.unwrapping.removeGrossOutliers(*, net_obj, point_id, coord_xy, min_num_arc=3, quality_thrsh=0.0, logger)[source]
+

Remove both gross outliers which have many low quality arcs and points which are not well connected.

+
+
Return type:
+

tuple[Network, ndarray, ndarray, ndarray]

+
+
+
+

Parameters

+
+
net_obj: Network

The spatial Network object.

+
+
point_id: np.ndarray

ID of the points in the network.

+
+
coord_xy: np.ndarray

Radar coordinates of the points in the spatial network.

+
+
min_num_arc: int

Threshold on the minimal number of arcs per point. Default = 3.

+
+
quality_thrsh: float

Threshold on the temporal coherence of the arcs. Default = 0.0.

+
+
logger: Logger

Logging handler.

+
+
+
+
+

Returns

+
+
net_obj: Network

Network object without the removed arcs and points.

+
+
point_id: np.ndarray

ID of the points in the network without the removed points.

+
+
coord_xy: np.ndarray

Radar coordinates of the points in the spatial network without the removed points.

+
+
a: np.ndarray

Design matrix describing the relation between arcs and points without the removed points and arcs.

+
+
+
+
+ +
+
+sarvey.unwrapping.spatialParameterIntegration(*, val_arcs, arcs, coord_xy, weights, spatial_ref_idx=0, logger)[source]
+

Unwrapping double-difference arc parameters spatially.

+

The parameters at the arcs are integrated spatially to the points. The integration is done using least-squares.

+
+

Parameters

+
+
val_arcs: np.ndarray

Value at the arcs (e.g. DEM error, velocity).

+
+
arcs: np.ndarray

Arcs of the spatial network.

+
+
coord_xy: np.ndarray

Radar coordinates of the points in the spatial network.

+
+
weights: np.ndarray

Weights of the arcs (e.g. temporal coherence from temporal unwrapping)

+
+
spatial_ref_idx: int

Index of the spatial reference point (default = 0). Can be arbitrary.

+
+
logger: Logger

Logging handler

+
+
+
+
+

Returns

+
+
val_points: np.ndarray

Estimated parameters at the points resulting from the integration of the parameters at the arcs.

+
+
+
+
+ +
+
+sarvey.unwrapping.spatialParameterIntegrationIterative(*, val_arcs, all_arcs, coord_xy, all_weights, spatial_ref_idx=0, res_tol=0.001, max_rm_fraction=0.001, logger)[source]
+

Unwrapping double-difference arc parameters spatially.

+

The parameters at the arcs are integrated spatially to the points. The integration is done iteratively using +least-squares by removing the arcs with the highest residuals in each iteration. +The integration stops when the sum of the residuals is below a threshold. +Function is adopted from StaMPS software (Hooper et al., 2007).

+
+

Parameters

+
+
val_arcs: np.ndarray

Value at the arcs (e.g. DEM error, velocity).

+
+
all_arcs: np.ndarray

Arcs of the spatial network.

+
+
coord_xy: np.ndarray

Radar coordinates of the points in the spatial network.

+
+
all_weights: np.ndarray

Weights of the arcs (e.g. temporal coherence from temporal unwrapping)

+
+
spatial_ref_idx: int

Index of the spatial reference point (default = 0). Can be arbitrary.

+
+
res_tol: float

Threshold on the sum of the residual phase (default = 1e-3). Convergence criterion.

+
+
max_rm_fraction: float

Fraction of the arcs that are removed in each iteration (default = 0.001).

+
+
logger: Logger

Logging handler

+
+
+
+
+

Returns

+
+
val_points: np.ndarray

Estimated parameters at the points resulting from the integration of the parameters at the arcs.

+
+
+
+
+ +
+
+sarvey.unwrapping.spatialUnwrapping(*, num_ifgs, num_points, phase, edges, method, num_cores, logger)[source]
+

Spatial unwrapping of interferograms for a set of points.

+
+

Parameters

+
+
num_ifgs: int

Number of interferograms.

+
+
num_points: int

Number of points.

+
+
phase: np.ndarray

Phase of the interferograms at the points.

+
+
edges: np.ndarray

Edges/arcs of the graph.

+
+
method: str

Method for spatial unwrapping (puma or ilp).

+
+
num_cores: int

Number of cores to be used in multiprocessing.

+
+
logger: Logger

Logging handler.

+
+
+
+
+

Returns

+
+
unw_phase: np.ndarray

Unwrapped phase of the interferograms at the points.

+
+
+
+
+ +
+
+sarvey.unwrapping.temporalUnwrapping(*, ifg_net_obj, net_obj, wavelength, velocity_bound, demerr_bound, num_samples, num_cores=1, logger)[source]
+

Solve ambiguities for every arc in spatial Network object.

+
+
Return type:
+

tuple[ndarray, ndarray, ndarray]

+
+
+
+

Parameters

+
+
ifg_net_obj: IfgNetwork

The IfgNetwork object.

+
+
net_obj: Network

The Network object.

+
+
wavelength: float

The wavelength.

+
+
velocity_bound: float

The velocity bound.

+
+
demerr_bound: float

The DEM error bound.

+
+
num_samples: int

The number of samples for the search space.

+
+
num_cores: int

Number of cores to be used. Default is 1.

+
+
logger: Logger

Logging handler.

+
+
+
+
+

Returns

+

demerr: np.ndarray +vel: np.ndarray +gamma: np.ndarray

+
+
+ +
+
+

sarvey.utils module

+

Utils module for SARvey.

+
+
+sarvey.utils.checkIfRequiredFilesExist(*, path_to_files, required_files, logger)[source]
+

Check if all required files exist from previous processing steps.

+
+

Parameters

+
+
path_to_files: str

path to the files

+
+
required_files: list

list of required files which are all checked

+
+
logger: Logger

logging handler

+
+
+
+
+

Raises

+
+
FileNotFoundError

if a required file is missing

+
+
+
+
+ +
+
+sarvey.utils.convertBboxToBlock(*, bbox)[source]
+

ConvertBboxToBlock. read box and write2hdf5_block block have different definitions.

+
+ +
+
+sarvey.utils.createSpatialGrid(*, coord_utm_img, length, width, grid_size)[source]
+

Create a spatial grid over the image.

+
+

Parameters

+
+
coord_utm_img: np.ndarray

coordinates of all image pixels in UTM

+
+
length: int

number of pixels in length of the image

+
+
width: int

number of pixels in width of the image

+
+
grid_size: int

size of the grid in [m]

+
+
+
+
+

Returns

+
+
box_list: list

of tuples with the radar coordinates of the boxes

+
+
num_box: int

actual number of boxes created by the function

+
+
+
+
+ +
+
+sarvey.utils.detectValidAreas(*, bmap_obj, logger)[source]
+

Detect valid areas based on amplitude image.

+
+

Parameters

+
+
bmap_obj: AmplitudeImage

instance of class AmplitudeImage

+
+
logger: Logger

logging handler

+
+
+
+
+

Returns

+
+
mask_valid_area: np.ndarray

boolean mask of the valid areas

+
+
+
+
+ +
+
+sarvey.utils.estimateParameters(*, obj, estimate_ref_atmo=True, ifg_space=True)[source]
+

Estimate the parameters either per point or per arc.

+

Parameters are velocity and DEM error (or additionally reference APS).

+
+

Parameters

+
+
obj: Union[Points, Network]

object of either network, networkParameter, points or pointsParameters

+
+
estimate_ref_atmo: bool

set to True if APS of reference date shall be estimated. corresponds to offset of linear +motion model (default: False).

+
+
ifg_space: bool

set to True if the phase shall be predicted in interferogram space. If False, phase will be +predicted in acquisition space. (default: True)

+
+
+
+
+

Returns

+
+
vel: np.ndarray

velocity for each point

+
+
demerr: np.ndarray

dem error for each point

+
+
ref_atmo: np.ndarray

reference APS for each point

+
+
omega:

sum of squared residuals

+
+
v_hat:

residuals

+
+
+
+
+ +
+
+sarvey.utils.invertIfgNetwork(*, phase, num_points, ifg_net_obj, num_cores, ref_idx, logger)[source]
+

Wrap the ifg network inversion running in parallel.

+
+

Parameters

+
+
phase: np.ndarray

interferometric phases of the points.

+
+
num_points: int

number of points.

+
+
ifg_net_obj: IfgNetwork

instance of class IfgNetwork.

+
+
num_cores: int

number of cores to use for multiprocessing.

+
+
ref_idx: int

index of temporal reference date for interferogram network inversion.

+
+
logger: Logger

logging handler

+
+
+
+
+

Returns

+
+
+
phase_ts: np.ndarray

inverted phase time series of the points.

+
+
+
+
+
+ +
+
+sarvey.utils.launchInvertIfgNetwork(parameters)[source]
+

Launch the inversion of the interferogram network in parallel.

+
+

Parameters

+
+
parameters: tuple

parameters for inversion

+
+
Tuple contains:
+
idx_range: np.ndarray

range of point indices to be processed

+
+
num_points: int

number of points

+
+
phase: np.ndarray

interferometric phases of the points

+
+
design_mat: np.ndarray

design matrix

+
+
num_images: int

number of images

+
+
ref_idx: int

index of temporal reference date for interferogram network inversion

+
+
+
+
+
+
+
+
+

Returns

+
+
+
idx_range: np.ndarray

range of indices of the points processed

+
+
phase_ts: np.ndarray

inverted phase time series

+
+
+
+
+
+ +
+
+sarvey.utils.predictPhase(*, obj, vel=None, demerr=None, ifg_space=True, logger)[source]
+

Predicts the phase time series based on the estimated parameters DEM error and mean velocity.

+

Can be used for both arc phase or point phase. Wrapper function for ‘predictPhaseCore(…)’

+
+

Parameters

+
+
obj: Union[NetworkParameter, Points]

object of either ‘networkParameter’ or ‘points’. If instance of ‘points’ is given, ‘vel’ and ‘demerr’ +also need to be specified.

+
+
vel: np.ndarray

velocity for each sample (default: None)

+
+
demerr: np.ndarray

dem error for each sample (default: None).

+
+
ifg_space: bool

set to True if the phase shall be predicted in interferogram space. If False, phase will be predicted +in acquisition space. (default: True)

+
+
logger: Logger

Logging handler.

+
+
+
+
+

Returns

+
+
+
pred_phase_demerr: np.ndarray

predicted phase from DEM error

+
+
pred_phase_vel: np.ndarray

predicted phase from velocity

+
+
+
+
+
+

Raises

+
+
ValueError

vel or demerr is none

+
+
TypeError

obj is of the wrong type

+
+
+
+
+ +
+
+sarvey.utils.predictPhaseCore(*, ifg_net_obj, wavelength, vel, demerr, slant_range, loc_inc, ifg_space=True)[source]
+

Predicts the phase time series based on the estimated parameters DEM error and mean velocity.

+

Can be used for both arc phase or point phase.

+
+

Parameters

+
+
ifg_net_obj: IfgNetwork

instance of class ifgNetwork

+
+
wavelength: float

wavelength in [m]

+
+
vel: np.ndarray

velocity for each sample

+
+
demerr: np.ndarray

dem error for each sample

+
+
slant_range: np.ndarray

slant range distance for each sample

+
+
loc_inc: np.ndarray

local incidence angle for each sample

+
+
ifg_space: bool

set to True if the phase shall be predicted in interferogram space. If False, phase will be +predicted in acquisition space. (default: True)

+
+
+
+
+

Returns

+
+
+
pred_phase_demerr: np.ndarray

predicted phase from DEM error

+
+
pred_phase_vel: np.ndarray

predicted phase from velocity

+
+
+
+
+
+ +
+
+sarvey.utils.predictPhaseSingle(*, demerr, vel, slant_range, loc_inc, ifg_net_obj, wavelength, only_vel=False, ifg_space=True)[source]
+

Predict the phase time series for only one point based on the estimated parameters DEM error and mean velocity.

+

Can be used for both arc phase or point phase.

+
+

Parameters

+
+
demerr: float

DEM error (scalar)

+
+
vel: float

mean velocity (scalar)

+
+
slant_range: float

slant range distance in [m] (scalar)

+
+
loc_inc: float

local incidence angle in [rad] (scalar)

+
+
ifg_net_obj: IfgNetwork

object of class IfgNetwork

+
+
wavelength: float

radar wavelength in [m]

+
+
only_vel: bool

set to True if only the mean velocity shall be predicted (default: False)

+
+
ifg_space: bool

set to True if the phase shall be predicted in interferogram space. If False, phase will be predicted in +acquisition space. (default: True)

+
+
+
+
+

Returns

+
+
+
pred_phase: np.ndarray

predicted phase

+
+
+
+
+
+ +
+
+sarvey.utils.preparePatches(*, num_patches, width, length, logger)[source]
+

Create patches to subset the image stack for parallel processing to reduce memory usage.

+
+

Parameters

+
+
num_patches: int

number of patches to split the image into

+
+
width: int

width of the image

+
+
length: int

length of the image

+
+
logger: Logger

logging handler

+
+
+
+
+

Returns

+
+
box_list: list

tuples with the radar coordinates of the boxes

+
+
num_patches: int

number of actual patches created by the function

+
+
+
+
+ +
+
+sarvey.utils.readPhasePatchwise(*, stack_obj, dataset_name, num_patches, cand_mask, point_id_img, logger)[source]
+

Read the phase from a file in a patchwise manner to reduce memory usage.

+
+

Parameters

+
+
stack_obj: BaseStack

instance of class BaseStack

+
+
dataset_name: str

name of the dataset to read (e.g. ‘ifgs’ or ‘phase’)

+
+
num_patches: int

number of patches to split the image into

+
+
cand_mask: np.ndarray

boolean mask of the selected pixels

+
+
point_id_img: np.ndarray

image with point IDs for each pixel

+
+
logger: Logger

logging handler

+
+
+
+
+

Returns

+
+
phase_points: np.ndarray

phase time series of the selected pixels

+
+
+
+
+ +
+
+sarvey.utils.selectBestPointsInGrid(*, box_list, quality, sel_min=True)[source]
+

Select the best point inside a grid.

+

If several pixel fullfil the criteria, the first one is selected.

+
+

Parameters

+
+
box_list: list

of tuples with the radar coordinates of the boxes

+
+
quality: np.ndarray

quality of the pixels

+
+
sel_min: bool

set to True if the minimum value shall be selected (default: True)

+
+
+
+
+

Returns

+
+
cand_mask_sparse: np.ndarray

boolean mask of the selected pixels

+
+
+
+
+ +
+
+sarvey.utils.setReferenceToPeakOfHistogram(*, phase, vel, num_bins=100)[source]
+

Set reference phase value to peak of the velocity histogram.

+

It assumes that no velocity (i.e. stable area) is occuring most frequently.

+
+

Parameters

+
+
phase: np.ndarray

phase time series of the points

+
+
vel: np.ndarray

velocity of the points

+
+
num_bins: int

number of bins for the histogram (default: 100)

+
+
+
+
+

Returns

+
+
phase: np.ndarray

phase time series adjusted by the new reference phase

+
+
+
+
+ +
+
+sarvey.utils.spatiotemporalConsistency(*, coord_utm, phase, wavelength, min_dist=15, max_dist=inf, knn=50)[source]
+

Spatiotemporal consistency proposed by Hanssen et al. (2008) and implemented in DePSI (van Leijen, 2014).

+
+

Parameters

+
+
coord_utm: np.ndarray

UTM coordinates of the points

+
+
phase: np.ndarray

phase time series of the points

+
+
wavelength: float

radar wavelength in [m]

+
+
min_dist: int

minimum distance to other points in [m] (default: 15)

+
+
max_dist: float

maximum distance to other points in [m] (default: np.inf)

+
+
knn: int

number of nearest neighbors to consider (default: 50)

+
+
+
+
+

Returns

+
+
stc: np.ndarray

spatiotemporal consistency of the points

+
+
+
+
+ +
+
+sarvey.utils.splitDatasetForParallelProcessing(*, num_samples, num_cores)[source]
+

Split the dataset into chunks of similar size for processing them in parallel.

+
+

Parameters

+
+
num_samples: int

number of samples to be split

+
+
num_cores: int

number of cores to split among

+
+
+
+
+

Returns

+
+
idx: list

list of sample ranges for each core

+
+
+
+
+ +
+
+sarvey.utils.splitImageIntoBoxesRngAz(*, length, width, num_box_az, num_box_rng)[source]
+

Split the image into several boxes.

+

(adapted from mintpy.ifgram_inversion.split2boxes)

+
+

Parameters

+
+
num_box_rng: int

Number of boxes in range direction

+
+
num_box_az:

Number of boxes in azimuth direction

+
+
length: int

length of the image

+
+
width: int

width of the image

+
+
+
+
+

Returns

+
+
box_list: list

of tuple of 4 int (xmin, ymin, xmax, ymax)

+
+
num_box: int

number of boxes

+
+
+
+
+ +
+
+sarvey.utils.temporalAutoCorrelation(*, residuals, lag)[source]
+

Compute the temporal autocorrelation for given time lag from the residuals.

+
+

Parameters

+
+
residuals: np.ndarray

residual phase time series (dim: num_points x num_time_steps)

+
+
lag: int

time lag used for computing the correlation

+
+
+
+
+

Returns

+
+
auto_corr: np.ndarray

auto-correlation of each point (dim: num_points x lag)

+
+
+
+
+ +
+
+

sarvey.version module

+

Version module for SARvey.

+
+
+

sarvey.viewer module

+

Viewer Module for SARvey.

+
+
+class sarvey.viewer.TimeSeriesViewer(*, point_obj, vel_scale='mm', input_path, logger)[source]
+

Bases: object

+

TimeSeriesViewer.

+

Init.

+
+
+initFigureMap()[source]
+

InitFigureMap.

+
+ +
+
+initFigureTimeseries()[source]
+

InitFigureTimeseries.

+
+ +
+
+onClick(event)[source]
+

Event function to get y/x from button press.

+
+ +
+
+plotMap(val)[source]
+

Plot velocity map and time series.

+
+ +
+
+plotPointTimeseries(val)[source]
+

Plot_point_timeseries.

+
+ +
+
+updateButtonStatus(val)[source]
+

Set to true.

+
+ +
+
+updateReference()[source]
+

Change the phase of all points according to the new reference point.

+

Update the plot of the velocity and time series.

+
+ +
+ +
+
+sarvey.viewer.plotColoredPointNetwork(*, x, y, arcs, val, ax=None, linewidth=2, cmap_name='seismic', clim=None)[source]
+

Plot a network of points with colored arcs.

+
+

Parameters

+
+
x: np.ndarray

x-coordinates of the points (dim: no. points x 1)

+
+
y: np.ndarray

y-coordinates of the points (dim: no. points x 1)

+
+
arcs: np.ndarray

indices of the points to be connected (dim: no. arcs x 2)

+
+
val: np.ndarray

values for the color of the arcs (dim: no. arcs x 1)

+
+
ax: plt.Axes

axis for plotting (default: None)

+
+
linewidth: float

line width of the arcs (default: 2)

+
+
cmap_name: str

name of the colormap (default: “seismic”)

+
+
clim: tuple

color limits for the colormap (default: None)

+
+
+
+
+

Returns

+
+
ax: plt.Axes

current axis

+
+
cbar: plt.colorbar

current colorbar

+
+
+
+
+ +
+
+sarvey.viewer.plotGridFromBoxList(*, box_list, ax=None, edgecolor='k', linewidth=1)[source]
+

Plot a grid into an axis.

+
+

Parameters

+
+
box_list: list

boxes to be plotted. box_list can be created with ‘splitImageIntoBoxesRngAz’ or ‘splitImageIntoBoxes’

+
+
ax: plt.Axes

axis for plotting (default: None)

+
+
edgecolor: str

edge color for the boxes (default: “k”)

+
+
linewidth: float

line width for the boxes (default: 1)

+
+
+
+
+

Returns

+
+
ax: plt.Axes

current axis

+
+
+
+
+ +
+
+sarvey.viewer.plotIfgs(*, phase, coord, spatial_ref_idx=None, ttl=None, cmap='cmy')[source]
+

Plot one interferogram per subplot.

+
+

Parameters

+
+
phase: np.ndarray

phase per point and ifg, e.g. wrapped or unwrapped phase (dim: no. psPoints x no. ifgs)

+
+
coord: np.ndarray

coordinates of the psPoints, e.g. pixel or lat lon (dim: no. psPoints x 2)

+
+
spatial_ref_idx: int

index of the spatial reference point (default: None)

+
+
ttl: str

title for the figure (default: None)

+
+
cmap: str

colormap, use “cmy” for wrapped phase data (default) or “?” for unwrapped or residual phase

+
+
+
+
+ +
+
+sarvey.viewer.plotScatter(*, value, coord, bmap_obj=None, ttl=None, unit=None, s=5.0, cmap=<matplotlib.colors.LinearSegmentedColormap object>, symmetric=False, logger, **kwargs)[source]
+

Plot a scatter map for given value.

+
+

Parameters

+
+
value: np.ndarray

value to be plotted per point giving the colour of the point (dim: no. points x 1)

+
+
coord: np.ndarray

coordinates of the points, e.g. radar or lat lon (dim: no. points x 2). If bmapObj is given, +the coordinates must be radar coordinates!

+
+
bmap_obj: AmplitudeImage

instance of amplitudeImage for plotting background image (default: None)

+
+
ttl: str

title for the figure (default: None)

+
+
unit: str

unit as title for the colorbar axis (default: None)

+
+
s: float

size of the scatter points (default: 5.0)

+
+
cmap: str

colormap (default: “jet_r”)

+
+
symmetric: bool

plot symmetric colormap extend, i.e. abs(vmin) == abs(vmax) (default: False)

+
+
logger: Logger

logging Handler

+
+
kwargs: Any

additional keyword arguments for scatter plot

+
+
+
+
+

Returns

+
+
fig: plt.Figure

current figure,

+
+
ax: plt.Axes

current axis

+
+
cb: plt.colorbar

current colorbar

+
+
+
+
+ +
+
+

Module contents

+

Top-level package for SARvey.

+
+
+ + +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/search.html b/doc/search.html new file mode 100644 index 0000000..2d31ef7 --- /dev/null +++ b/doc/search.html @@ -0,0 +1,101 @@ + + + + + + + Search — SARvey 1.0.0 documentation + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Search

+ + + + +

+ Searching for multiple words only shows matches that contain + all words. +

+ + +
+ + + +
+ + +
+ + +
+
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/doc/searchindex.js b/doc/searchindex.js new file mode 100644 index 0000000..edb1d1a --- /dev/null +++ b/doc/searchindex.js @@ -0,0 +1 @@ +Search.setIndex({"alltitles": {"1.0.0 (2024-08-12) Strawberry Pie": [[6, "strawberry-pie"]], "Activate SARvey environment": [[4, "activate-sarvey-environment"]], "Check Outputs": [[4, "check-outputs"]], "Check the data": [[10, "check-the-data"]], "Command-line tools": [[14, "command-line-tools"]], "Commit Changes": [[1, "commit-changes"]], "Configuration file": [[11, "configuration-file"]], "Contents:": [[7, null]], "Contributing": [[1, null]], "Contributors": [[0, "contributors"]], "Create Manual Mask": [[10, "create-manual-mask"]], "Create a Config File": [[4, "create-a-config-file"]], "Credits": [[0, null], [12, "credits"]], "Dataset": [[2, "dataset"]], "Demo Datasets": [[5, null]], "Demo Datasets:": [[5, null]], "Detailed Guide for Masjed Soleyman Dam": [[3, null]], "Development Lead": [[0, "development-lead"]], "Documentation": [[12, "documentation"]], "Download the Data": [[4, "download-the-data"]], "Export to GIS Format": [[4, "export-to-gis-format"]], "Fast Track Guide for Masjed Soleyman Dam": [[4, null]], "Feature overview": [[12, "feature-overview"]], "Fix Bugs": [[1, "fix-bugs"]], "GAMMA": [[10, "gamma"]], "Handling big datasets": [[11, "handling-big-datasets"]], "History": [[6, null]], "History / Changelog": [[12, "history-changelog"]], "How to": [[1, "how-to"]], "How to cite": [[12, "how-to-cite"]], "ISCE": [[10, "isce"]], "Implement Features": [[1, "implement-features"]], "Indices and tables": [[7, "indices-and-tables"]], "Installation": [[8, null]], "License": [[12, "license"]], "License header": [[1, "license-header"]], "Linux": [[8, "linux"]], "Literature": [[2, "literature"], [11, "literature"]], "Loading Data into MiaplPy": [[10, "loading-data-into-miaplpy"]], "Loading Data to MiaplPy Format": [[10, "loading-data-to-miaplpy-format"]], "MacOS ARM (Apple Silicon M2)": [[8, "macos-arm-apple-silicon-m2"]], "Masjed Soleyman dam": [[2, null]], "Merge Request Guidelines": [[1, "merge-request-guidelines"]], "Module contents": [[13, "module-sarvey"]], "Multitemporal InSAR processing workflow": [[11, null]], "Option 1) Unwrapping in time and space": [[11, "option-1-unwrapping-in-time-and-space"]], "Option 1: Unwrapping in time and space": [[11, "id1"]], "Option 2) Unwrapping in space": [[11, "option-2-unwrapping-in-space"]], "Option 2: Unwrapping in space": [[11, "id2"]], "Optional Steps": [[10, "optional-steps"]], "Parameter": [[13, "parameter"], [13, "id23"], [13, "id25"], [13, "id26"], [13, "id27"], [13, "id28"], [13, "id43"], [13, "id44"], [13, "id80"]], "Parameters": [[13, "parameters"], [13, "id1"], [13, "id3"], [13, "id5"], [13, "id6"], [13, "id7"], [13, "id8"], [13, "id9"], [13, "id11"], [13, "id13"], [13, "id15"], [13, "id17"], [13, "id19"], [13, "id21"], [13, "id24"], [13, "id29"], [13, "id30"], [13, "id31"], [13, "id32"], [13, "id33"], [13, "id34"], [13, "id36"], [13, "id37"], [13, "id39"], [13, "id40"], [13, "id41"], [13, "id42"], [13, "id45"], [13, "id46"], [13, "id47"], [13, "id48"], [13, "id49"], [13, "id50"], [13, "id52"], [13, "id54"], [13, "id56"], [13, "id58"], [13, "id60"], [13, "id62"], [13, "id64"], [13, "id66"], [13, "id67"], [13, "id69"], [13, "id71"], [13, "id73"], [13, "id74"], [13, "id76"], [13, "id77"], [13, "id78"], [13, "id81"], [13, "id83"], [13, "id85"], [13, "id87"], [13, "id89"], [13, "id91"], [13, "id93"], [13, "id95"], [13, "id97"], [13, "id99"], [13, "id101"], [13, "id103"], [13, "id105"], [13, "id107"], [13, "id109"], [13, "id111"], [13, "id113"], [13, "id114"], [13, "id116"], [13, "id118"], [13, "id120"], [13, "id122"], [13, "id124"], [13, "id127"], [13, "id129"], [13, "id131"], [13, "id133"], [13, "id135"], [13, "id137"], [13, "id139"], [13, "id141"], [13, "id143"], [13, "id145"], [13, "id147"], [13, "id149"], [13, "id151"], [13, "id152"]], "Phase Linking": [[10, "phase-linking"]], "Plot Time Series Results": [[4, "plot-time-series-results"]], "Preparation": [[10, null]], "Preprocessing": [[10, "preprocessing"]], "Processing overview": [[12, "processing-overview"]], "Processing steps for one-step unwrapping workflow": [[11, "processing-steps-for-one-step-unwrapping-workflow"]], "Processing steps for two-step unwrapping workflow": [[11, "processing-steps-for-two-step-unwrapping-workflow"]], "Python API reference": [[9, null]], "Raises": [[13, "raises"], [13, "id126"]], "Report Bugs": [[1, "report-bugs"]], "Return": [[13, "return"]], "Returns": [[13, "returns"], [13, "id2"], [13, "id4"], [13, "id10"], [13, "id12"], [13, "id14"], [13, "id16"], [13, "id18"], [13, "id20"], [13, "id22"], [13, "id35"], [13, "id38"], [13, "id51"], [13, "id53"], [13, "id55"], [13, "id57"], [13, "id59"], [13, "id61"], [13, "id63"], [13, "id65"], [13, "id68"], [13, "id70"], [13, "id72"], [13, "id75"], [13, "id79"], [13, "id82"], [13, "id84"], [13, "id86"], [13, "id88"], [13, "id90"], [13, "id92"], [13, "id94"], [13, "id96"], [13, "id98"], [13, "id100"], [13, "id102"], [13, "id104"], [13, "id106"], [13, "id108"], [13, "id110"], [13, "id112"], [13, "id115"], [13, "id117"], [13, "id119"], [13, "id121"], [13, "id123"], [13, "id125"], [13, "id128"], [13, "id130"], [13, "id132"], [13, "id134"], [13, "id136"], [13, "id138"], [13, "id140"], [13, "id142"], [13, "id144"], [13, "id146"], [13, "id148"], [13, "id150"], [13, "id153"]], "Run SARvey": [[4, "run-sarvey"]], "SARvey - survey with SAR": [[12, null]], "SARvey documentation": [[7, null]], "SNAP": [[10, "snap"]], "Sign your commits": [[1, "sign-your-commits"]], "Status": [[12, "status"]], "Step 0: Preparation": [[11, "step-0-preparation"]], "Step 1.1: Download the Data": [[3, "step-1-1-download-the-data"]], "Step 1.2: Activate SARvey and Change Directory": [[3, "step-1-2-activate-sarvey-and-change-directory"]], "Step 1.3: Create a Config File": [[3, "step-1-3-create-a-config-file"]], "Step 1.4: Modify the config.json File": [[3, "step-1-4-modify-the-config-json-file"]], "Step 1: Before Running SARvey": [[3, "step-1-before-running-sarvey"]], "Step 1: Consistency Check": [[11, "step-1-consistency-check"]], "Step 2.0: Run Step 0 of SARvey: Preparation": [[3, "step-2-0-run-step-0-of-sarvey-preparation"]], "Step 2.1: Run Step 1 of SARvey": [[3, "step-2-1-run-step-1-of-sarvey"]], "Step 2.2: Run Step 2 of SARvey": [[3, "step-2-2-run-step-2-of-sarvey"]], "Step 2.3: Run Step 3 of SARvey": [[3, "step-2-3-run-step-3-of-sarvey"]], "Step 2.4: Run Step 4 of SARvey": [[3, "step-2-4-run-step-4-of-sarvey"]], "Step 2: Running SARvey": [[3, "step-2-running-sarvey"]], "Step 2: Unwrapping": [[11, "step-2-unwrapping"]], "Step 3: Filtering": [[11, "step-3-filtering"]], "Step 3: Plot Time Series Results": [[3, "step-3-plot-time-series-results"]], "Step 4: Densification": [[11, "step-4-densification"]], "Step 4: Modify Config File and Rerun SARvey": [[3, "step-4-modify-config-file-and-rerun-sarvey"]], "Step 5: Export to GIS Format": [[3, "step-5-export-to-gis-format"]], "Step 6: Validate Your Results": [[3, "step-6-validate-your-results"]], "Submit Feedback": [[1, "submit-feedback"]], "Submodules": [[13, "submodules"]], "Subset Data": [[10, "subset-data"]], "Tips": [[1, "tips"]], "Tutorials:": [[2, null]], "Types of Contributions": [[1, "types-of-contributions"]], "Usage": [[14, null]], "Usage of the Python API": [[14, "usage-of-the-python-api"]], "Using Anaconda or Miniconda": [[8, "using-anaconda-or-miniconda"]], "Using Mamba (recommended)": [[8, "using-mamba-recommended"]], "Validate Your Results": [[4, "validate-your-results"]], "Windows using WSL": [[8, "windows-using-wsl"]], "Write Documentation": [[1, "write-documentation"]], "sarvey package": [[13, null]], "sarvey.coherence module": [[13, "module-sarvey.coherence"]], "sarvey.config module": [[13, "sarvey-config-module"]], "sarvey.console module": [[13, "module-sarvey.console"]], "sarvey.densification module": [[13, "module-sarvey.densification"]], "sarvey.filtering module": [[13, "module-sarvey.filtering"]], "sarvey.geolocation module": [[13, "module-sarvey.geolocation"]], "sarvey.ifg_network module": [[13, "module-sarvey.ifg_network"]], "sarvey.objects module": [[13, "module-sarvey.objects"]], "sarvey.osm_utils module": [[13, "module-sarvey.osm_utils"]], "sarvey.preparation module": [[13, "module-sarvey.preparation"]], "sarvey.processing module": [[13, "sarvey-processing-module"]], "sarvey.sarvey_export module": [[13, "sarvey-sarvey-export-module"]], "sarvey.sarvey_mask module": [[13, "module-sarvey.sarvey_mask"]], "sarvey.sarvey_mti module": [[13, "sarvey-sarvey-mti-module"]], "sarvey.sarvey_osm module": [[13, "module-sarvey.sarvey_osm"]], "sarvey.sarvey_plot module": [[13, "sarvey-sarvey-plot-module"]], "sarvey.triangulation module": [[13, "module-sarvey.triangulation"]], "sarvey.unwrapping module": [[13, "module-sarvey.unwrapping"]], "sarvey.utils module": [[13, "module-sarvey.utils"]], "sarvey.version module": [[13, "module-sarvey.version"]], "sarvey.viewer module": [[13, "module-sarvey.viewer"]]}, "docnames": ["authors", "contributing", "demo/demo_masjed_dam", "demo/demo_masjed_dam_detailed_guide", "demo/demo_masjed_dam_fast_track", "demo_datasets", "history", "index", "installation", "modules", "preparation", "processing", "readme", "sarvey", "usage"], "envversion": {"sphinx": 62, "sphinx.domains.c": 3, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 9, "sphinx.domains.index": 1, "sphinx.domains.javascript": 3, "sphinx.domains.math": 2, "sphinx.domains.python": 4, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1, "sphinx.ext.todo": 2, "sphinx.ext.viewcode": 1}, "filenames": ["authors.rst", "contributing.rst", "demo/demo_masjed_dam.rst", "demo/demo_masjed_dam_detailed_guide.rst", "demo/demo_masjed_dam_fast_track.rst", "demo_datasets.rst", "history.rst", "index.rst", "installation.rst", "modules.rst", "preparation.rst", "processing.rst", "readme.rst", "sarvey.rst", "usage.rst"], "indexentries": {"addpointsfromobj() (sarvey.objects.points method)": [[13, "sarvey.objects.Points.addPointsFromObj", false]], "amplitudeimage (class in sarvey.objects)": [[13, "sarvey.objects.AmplitudeImage", false]], "basestack (class in sarvey.objects)": [[13, "sarvey.objects.BaseStack", false]], "calculategeolocationcorrection() (in module sarvey.geolocation)": [[13, "sarvey.geolocation.calculateGeolocationCorrection", false]], "checkifrequiredfilesexist() (in module sarvey.utils)": [[13, "sarvey.utils.checkIfRequiredFilesExist", false]], "close() (sarvey.objects.basestack method)": [[13, "sarvey.objects.BaseStack.close", false]], "computearcobservations() (sarvey.objects.network method)": [[13, "sarvey.objects.Network.computeArcObservations", false]], "computeavgcoherenceperpoint() (in module sarvey.unwrapping)": [[13, "sarvey.unwrapping.computeAvgCoherencePerPoint", false]], "computeifgs() (in module sarvey.coherence)": [[13, "sarvey.coherence.computeIfgs", false]], "computeifgsandtemporalcoherence() (in module sarvey.coherence)": [[13, "sarvey.coherence.computeIfgsAndTemporalCoherence", false]], "computelastroadpixel() (in module sarvey.sarvey_mask)": [[13, "sarvey.sarvey_mask.computeLastRoadPixel", false]], "computenumarcsperpoints() (in module sarvey.unwrapping)": [[13, "sarvey.unwrapping.computeNumArcsPerPoints", false]], "configure() (sarvey.ifg_network.delaunaynetwork method)": [[13, "sarvey.ifg_network.DelaunayNetwork.configure", false]], "configure() (sarvey.ifg_network.smallbaselinenetwork method)": [[13, "sarvey.ifg_network.SmallBaselineNetwork.configure", false]], "configure() (sarvey.ifg_network.smallbaselineyearlynetwork method)": [[13, "sarvey.ifg_network.SmallBaselineYearlyNetwork.configure", false]], "configure() (sarvey.ifg_network.smalltemporalbaselinesnetwork method)": [[13, "sarvey.ifg_network.SmallTemporalBaselinesNetwork.configure", false]], "configure() (sarvey.ifg_network.starnetwork method)": [[13, "sarvey.ifg_network.StarNetwork.configure", false]], "convertbboxtoblock() (in module sarvey.utils)": [[13, "sarvey.utils.convertBboxToBlock", false]], "converttoradarcoord() (in module sarvey.sarvey_mask)": [[13, "sarvey.sarvey_mask.convertToRadarCoord", false]], "converttoradarcoordpolygon() (in module sarvey.sarvey_mask)": [[13, "sarvey.sarvey_mask.convertToRadarCoordPolygon", false]], "coord_xy (sarvey.objects.points attribute)": [[13, "sarvey.objects.Points.coord_xy", false]], "coordinatesearch (class in sarvey.sarvey_mask)": [[13, "sarvey.sarvey_mask.CoordinateSearch", false]], "coordinatesutm (class in sarvey.objects)": [[13, "sarvey.objects.CoordinatesUTM", false]], "create_parser() (in module sarvey.sarvey_mask)": [[13, "sarvey.sarvey_mask.create_parser", false]], "create_parser() (in module sarvey.sarvey_osm)": [[13, "sarvey.sarvey_osm.create_parser", false]], "createarcsbetweenpoints() (in module sarvey.preparation)": [[13, "sarvey.preparation.createArcsBetweenPoints", false]], "createmask() (in module sarvey.sarvey_mask)": [[13, "sarvey.sarvey_mask.createMask", false]], "createmask() (sarvey.objects.points method)": [[13, "sarvey.objects.Points.createMask", false]], "createsearchtree() (sarvey.sarvey_mask.coordinatesearch method)": [[13, "sarvey.sarvey_mask.CoordinateSearch.createSearchTree", false]], "createspatialgrid() (in module sarvey.utils)": [[13, "sarvey.utils.createSpatialGrid", false]], "createtimemaskfromdates() (in module sarvey.preparation)": [[13, "sarvey.preparation.createTimeMaskFromDates", false]], "delaunaynetwork (class in sarvey.ifg_network)": [[13, "sarvey.ifg_network.DelaunayNetwork", false]], "densificationinitializer() (in module sarvey.densification)": [[13, "sarvey.densification.densificationInitializer", false]], "densifynetwork() (in module sarvey.densification)": [[13, "sarvey.densification.densifyNetwork", false]], "detectvalidareas() (in module sarvey.utils)": [[13, "sarvey.utils.detectValidAreas", false]], "downloadosm() (in module sarvey.sarvey_osm)": [[13, "sarvey.sarvey_osm.downloadOSM", false]], "estimateatmosphericphasescreen() (in module sarvey.filtering)": [[13, "sarvey.filtering.estimateAtmosphericPhaseScreen", false]], "estimateparameters() (in module sarvey.utils)": [[13, "sarvey.utils.estimateParameters", false]], "eucldist() (in module sarvey.sarvey_mask)": [[13, "sarvey.sarvey_mask.euclDist", false]], "file_path (sarvey.objects.points attribute)": [[13, "sarvey.objects.Points.file_path", false]], "findlastroadpixel() (in module sarvey.sarvey_mask)": [[13, "sarvey.sarvey_mask.findLastRoadPixel", false]], "findoptimum() (in module sarvey.unwrapping)": [[13, "sarvey.unwrapping.findOptimum", false]], "getarcsfromadjmat() (sarvey.triangulation.pointnetworktriangulation method)": [[13, "sarvey.triangulation.PointNetworkTriangulation.getArcsFromAdjMat", false]], "getdesignmatrix() (sarvey.ifg_network.ifgnetwork method)": [[13, "sarvey.ifg_network.IfgNetwork.getDesignMatrix", false]], "getheading() (in module sarvey.geolocation)": [[13, "sarvey.geolocation.getHeading", false]], "getmeandistancebetweenpixels() (sarvey.sarvey_mask.coordinatesearch method)": [[13, "sarvey.sarvey_mask.CoordinateSearch.getMeanDistanceBetweenPixels", false]], "getnearestneighbour() (sarvey.sarvey_mask.coordinatesearch method)": [[13, "sarvey.sarvey_mask.CoordinateSearch.getNearestNeighbour", false]], "getshape() (sarvey.objects.basestack method)": [[13, "sarvey.objects.BaseStack.getShape", false]], "getspatialextend() (in module sarvey.osm_utils)": [[13, "sarvey.osm_utils.getSpatialExtend", false]], "gradientsearchtemporalcoherence() (in module sarvey.unwrapping)": [[13, "sarvey.unwrapping.gradientSearchTemporalCoherence", false]], "gridsearchtemporalcoherence() (in module sarvey.unwrapping)": [[13, "sarvey.unwrapping.gridSearchTemporalCoherence", false]], "ifg_list (sarvey.ifg_network.ifgnetwork attribute)": [[13, "sarvey.ifg_network.IfgNetwork.ifg_list", false]], "ifgnetwork (class in sarvey.ifg_network)": [[13, "sarvey.ifg_network.IfgNetwork", false]], "initfiguremap() (sarvey.viewer.timeseriesviewer method)": [[13, "sarvey.viewer.TimeSeriesViewer.initFigureMap", false]], "initfiguretimeseries() (sarvey.viewer.timeseriesviewer method)": [[13, "sarvey.viewer.TimeSeriesViewer.initFigureTimeseries", false]], "invertifgnetwork() (in module sarvey.utils)": [[13, "sarvey.utils.invertIfgNetwork", false]], "isconnected() (sarvey.triangulation.pointnetworktriangulation method)": [[13, "sarvey.triangulation.PointNetworkTriangulation.isConnected", false]], "launchambiguityfunctionsearch() (in module sarvey.unwrapping)": [[13, "sarvey.unwrapping.launchAmbiguityFunctionSearch", false]], "launchconvolve2d() (in module sarvey.coherence)": [[13, "sarvey.coherence.launchConvolve2d", false]], "launchdensifynetworkconsistencycheck() (in module sarvey.densification)": [[13, "sarvey.densification.launchDensifyNetworkConsistencyCheck", false]], "launchinvertifgnetwork() (in module sarvey.utils)": [[13, "sarvey.utils.launchInvertIfgNetwork", false]], "launchspatialfiltering() (in module sarvey.filtering)": [[13, "sarvey.filtering.launchSpatialFiltering", false]], "launchspatialunwrapping() (in module sarvey.unwrapping)": [[13, "sarvey.unwrapping.launchSpatialUnwrapping", false]], "length (sarvey.objects.points attribute)": [[13, "sarvey.objects.Points.length", false]], "main() (in module sarvey.sarvey_mask)": [[13, "sarvey.sarvey_mask.main", false]], "main() (in module sarvey.sarvey_osm)": [[13, "sarvey.sarvey_osm.main", false]], "module": [[13, "module-sarvey", false], [13, "module-sarvey.coherence", false], [13, "module-sarvey.console", false], [13, "module-sarvey.densification", false], [13, "module-sarvey.filtering", false], [13, "module-sarvey.geolocation", false], [13, "module-sarvey.ifg_network", false], [13, "module-sarvey.objects", false], [13, "module-sarvey.osm_utils", false], [13, "module-sarvey.preparation", false], [13, "module-sarvey.sarvey_mask", false], [13, "module-sarvey.sarvey_osm", false], [13, "module-sarvey.triangulation", false], [13, "module-sarvey.unwrapping", false], [13, "module-sarvey.utils", false], [13, "module-sarvey.version", false], [13, "module-sarvey.viewer", false]], "network (class in sarvey.objects)": [[13, "sarvey.objects.Network", false]], "networkparameter (class in sarvey.objects)": [[13, "sarvey.objects.NetworkParameter", false]], "node (class in sarvey.sarvey_mask)": [[13, "sarvey.sarvey_mask.Node", false]], "num_points (sarvey.objects.points attribute)": [[13, "sarvey.objects.Points.num_points", false]], "objfunctemporalcoherence() (in module sarvey.unwrapping)": [[13, "sarvey.unwrapping.objFuncTemporalCoherence", false]], "onclick() (sarvey.viewer.timeseriesviewer method)": [[13, "sarvey.viewer.TimeSeriesViewer.onClick", false]], "onedimsearchtemporalcoherence() (in module sarvey.unwrapping)": [[13, "sarvey.unwrapping.oneDimSearchTemporalCoherence", false]], "open() (sarvey.ifg_network.ifgnetwork method)": [[13, "sarvey.ifg_network.IfgNetwork.open", false]], "open() (sarvey.objects.amplitudeimage method)": [[13, "sarvey.objects.AmplitudeImage.open", false]], "open() (sarvey.objects.coordinatesutm method)": [[13, "sarvey.objects.CoordinatesUTM.open", false]], "open() (sarvey.objects.network method)": [[13, "sarvey.objects.Network.open", false]], "open() (sarvey.objects.networkparameter method)": [[13, "sarvey.objects.NetworkParameter.open", false]], "open() (sarvey.objects.points method)": [[13, "sarvey.objects.Points.open", false]], "openexternaldata() (sarvey.objects.network method)": [[13, "sarvey.objects.Network.openExternalData", false]], "openexternaldata() (sarvey.objects.points method)": [[13, "sarvey.objects.Points.openExternalData", false]], "parameterbasednoisypointremoval() (in module sarvey.unwrapping)": [[13, "sarvey.unwrapping.parameterBasedNoisyPointRemoval", false]], "phase (sarvey.objects.points attribute)": [[13, "sarvey.objects.Points.phase", false]], "plot() (sarvey.ifg_network.ifgnetwork method)": [[13, "sarvey.ifg_network.IfgNetwork.plot", false]], "plot() (sarvey.objects.amplitudeimage method)": [[13, "sarvey.objects.AmplitudeImage.plot", false]], "plotcoloredpointnetwork() (in module sarvey.viewer)": [[13, "sarvey.viewer.plotColoredPointNetwork", false]], "plotgridfromboxlist() (in module sarvey.viewer)": [[13, "sarvey.viewer.plotGridFromBoxList", false]], "plotifgs() (in module sarvey.viewer)": [[13, "sarvey.viewer.plotIfgs", false]], "plotmap() (sarvey.viewer.timeseriesviewer method)": [[13, "sarvey.viewer.TimeSeriesViewer.plotMap", false]], "plotpointtimeseries() (sarvey.viewer.timeseriesviewer method)": [[13, "sarvey.viewer.TimeSeriesViewer.plotPointTimeseries", false]], "plotscatter() (in module sarvey.viewer)": [[13, "sarvey.viewer.plotScatter", false]], "point_id (sarvey.objects.points attribute)": [[13, "sarvey.objects.Points.point_id", false]], "pointnetworktriangulation (class in sarvey.triangulation)": [[13, "sarvey.triangulation.PointNetworkTriangulation", false]], "points (class in sarvey.objects)": [[13, "sarvey.objects.Points", false]], "predictphase() (in module sarvey.utils)": [[13, "sarvey.utils.predictPhase", false]], "predictphasecore() (in module sarvey.utils)": [[13, "sarvey.utils.predictPhaseCore", false]], "predictphasesingle() (in module sarvey.utils)": [[13, "sarvey.utils.predictPhaseSingle", false]], "prepare() (sarvey.objects.amplitudeimage method)": [[13, "sarvey.objects.AmplitudeImage.prepare", false]], "prepare() (sarvey.objects.coordinatesutm method)": [[13, "sarvey.objects.CoordinatesUTM.prepare", false]], "prepare() (sarvey.objects.networkparameter method)": [[13, "sarvey.objects.NetworkParameter.prepare", false]], "prepare() (sarvey.objects.points method)": [[13, "sarvey.objects.Points.prepare", false]], "preparedataset() (sarvey.objects.basestack method)": [[13, "sarvey.objects.BaseStack.prepareDataset", false]], "preparepatches() (in module sarvey.utils)": [[13, "sarvey.utils.preparePatches", false]], "printcurrentconfig() (in module sarvey.console)": [[13, "sarvey.console.printCurrentConfig", false]], "printstep() (in module sarvey.console)": [[13, "sarvey.console.printStep", false]], "read() (sarvey.objects.basestack method)": [[13, "sarvey.objects.BaseStack.read", false]], "readcoherencefrommiaplpy() (in module sarvey.preparation)": [[13, "sarvey.preparation.readCoherenceFromMiaplpy", false]], "readphasepatchwise() (in module sarvey.utils)": [[13, "sarvey.utils.readPhasePatchwise", false]], "readslcfrommiaplpy() (in module sarvey.preparation)": [[13, "sarvey.preparation.readSlcFromMiaplpy", false]], "removearcs() (sarvey.objects.network method)": [[13, "sarvey.objects.Network.removeArcs", false]], "removearcsbypointmask() (in module sarvey.unwrapping)": [[13, "sarvey.unwrapping.removeArcsByPointMask", false]], "removegrossoutliers() (in module sarvey.unwrapping)": [[13, "sarvey.unwrapping.removeGrossOutliers", false]], "removelongarcs() (sarvey.triangulation.pointnetworktriangulation method)": [[13, "sarvey.triangulation.PointNetworkTriangulation.removeLongArcs", false]], "removepoints() (sarvey.objects.points method)": [[13, "sarvey.objects.Points.removePoints", false]], "runosmquery() (in module sarvey.osm_utils)": [[13, "sarvey.osm_utils.runOsmQuery", false]], "runosmquerybridge() (in module sarvey.osm_utils)": [[13, "sarvey.osm_utils.runOsmQueryBridge", false]], "sarvey": [[13, "module-sarvey", false]], "sarvey.coherence": [[13, "module-sarvey.coherence", false]], "sarvey.console": [[13, "module-sarvey.console", false]], "sarvey.densification": [[13, "module-sarvey.densification", false]], "sarvey.filtering": [[13, "module-sarvey.filtering", false]], "sarvey.geolocation": [[13, "module-sarvey.geolocation", false]], "sarvey.ifg_network": [[13, "module-sarvey.ifg_network", false]], "sarvey.objects": [[13, "module-sarvey.objects", false]], "sarvey.osm_utils": [[13, "module-sarvey.osm_utils", false]], "sarvey.preparation": [[13, "module-sarvey.preparation", false]], "sarvey.sarvey_mask": [[13, "module-sarvey.sarvey_mask", false]], "sarvey.sarvey_osm": [[13, "module-sarvey.sarvey_osm", false]], "sarvey.triangulation": [[13, "module-sarvey.triangulation", false]], "sarvey.unwrapping": [[13, "module-sarvey.unwrapping", false]], "sarvey.utils": [[13, "module-sarvey.utils", false]], "sarvey.version": [[13, "module-sarvey.version", false]], "sarvey.viewer": [[13, "module-sarvey.viewer", false]], "savemask() (in module sarvey.sarvey_mask)": [[13, "sarvey.sarvey_mask.saveMask", false]], "selectbestpointsingrid() (in module sarvey.utils)": [[13, "sarvey.utils.selectBestPointsInGrid", false]], "selectpixels() (in module sarvey.preparation)": [[13, "sarvey.preparation.selectPixels", false]], "setreferencetopeakofhistogram() (in module sarvey.utils)": [[13, "sarvey.utils.setReferenceToPeakOfHistogram", false]], "showlogosarvey() (in module sarvey.console)": [[13, "sarvey.console.showLogoSARvey", false]], "simpleinterpolation() (in module sarvey.filtering)": [[13, "sarvey.filtering.simpleInterpolation", false]], "smallbaselinenetwork (class in sarvey.ifg_network)": [[13, "sarvey.ifg_network.SmallBaselineNetwork", false]], "smallbaselineyearlynetwork (class in sarvey.ifg_network)": [[13, "sarvey.ifg_network.SmallBaselineYearlyNetwork", false]], "smalltemporalbaselinesnetwork (class in sarvey.ifg_network)": [[13, "sarvey.ifg_network.SmallTemporalBaselinesNetwork", false]], "spatialparameterintegration() (in module sarvey.unwrapping)": [[13, "sarvey.unwrapping.spatialParameterIntegration", false]], "spatialparameterintegrationiterative() (in module sarvey.unwrapping)": [[13, "sarvey.unwrapping.spatialParameterIntegrationIterative", false]], "spatialunwrapping() (in module sarvey.unwrapping)": [[13, "sarvey.unwrapping.spatialUnwrapping", false]], "spatiotemporalconsistency() (in module sarvey.utils)": [[13, "sarvey.utils.spatiotemporalConsistency", false]], "splitdatasetforparallelprocessing() (in module sarvey.utils)": [[13, "sarvey.utils.splitDatasetForParallelProcessing", false]], "splitimageintoboxesrngaz() (in module sarvey.utils)": [[13, "sarvey.utils.splitImageIntoBoxesRngAz", false]], "starnetwork (class in sarvey.ifg_network)": [[13, "sarvey.ifg_network.StarNetwork", false]], "temporalautocorrelation() (in module sarvey.utils)": [[13, "sarvey.utils.temporalAutoCorrelation", false]], "temporalunwrapping() (in module sarvey.unwrapping)": [[13, "sarvey.unwrapping.temporalUnwrapping", false]], "times (sarvey.objects.points attribute)": [[13, "sarvey.objects.Points.times", false]], "timeseriesviewer (class in sarvey.viewer)": [[13, "sarvey.viewer.TimeSeriesViewer", false]], "triangulateglobal() (sarvey.triangulation.pointnetworktriangulation method)": [[13, "sarvey.triangulation.PointNetworkTriangulation.triangulateGlobal", false]], "triangulateknn() (sarvey.triangulation.pointnetworktriangulation method)": [[13, "sarvey.triangulation.PointNetworkTriangulation.triangulateKnn", false]], "updatebuttonstatus() (sarvey.viewer.timeseriesviewer method)": [[13, "sarvey.viewer.TimeSeriesViewer.updateButtonStatus", false]], "updatereference() (sarvey.viewer.timeseriesviewer method)": [[13, "sarvey.viewer.TimeSeriesViewer.updateReference", false]], "wavelength (sarvey.objects.points attribute)": [[13, "sarvey.objects.Points.wavelength", false]], "width (sarvey.objects.points attribute)": [[13, "sarvey.objects.Points.width", false]], "writetofile() (sarvey.ifg_network.ifgnetwork method)": [[13, "sarvey.ifg_network.IfgNetwork.writeToFile", false]], "writetofile() (sarvey.objects.basestack method)": [[13, "sarvey.objects.BaseStack.writeToFile", false]], "writetofile() (sarvey.objects.network method)": [[13, "sarvey.objects.Network.writeToFile", false]], "writetofile() (sarvey.objects.networkparameter method)": [[13, "sarvey.objects.NetworkParameter.writeToFile", false]], "writetofile() (sarvey.objects.points method)": [[13, "sarvey.objects.Points.writeToFile", false]], "writetofileblock() (sarvey.objects.basestack method)": [[13, "sarvey.objects.BaseStack.writeToFileBlock", false]]}, "objects": {"": [[13, 0, 0, "-", "sarvey"]], "sarvey": [[13, 0, 0, "-", "coherence"], [13, 0, 0, "-", "console"], [13, 0, 0, "-", "densification"], [13, 0, 0, "-", "filtering"], [13, 0, 0, "-", "geolocation"], [13, 0, 0, "-", "ifg_network"], [13, 0, 0, "-", "objects"], [13, 0, 0, "-", "osm_utils"], [13, 0, 0, "-", "preparation"], [13, 0, 0, "-", "sarvey_mask"], [13, 0, 0, "-", "sarvey_osm"], [13, 0, 0, "-", "triangulation"], [13, 0, 0, "-", "unwrapping"], [13, 0, 0, "-", "utils"], [13, 0, 0, "-", "version"], [13, 0, 0, "-", "viewer"]], "sarvey.coherence": [[13, 1, 1, "", "computeIfgs"], [13, 1, 1, "", "computeIfgsAndTemporalCoherence"], [13, 1, 1, "", "launchConvolve2d"]], "sarvey.console": [[13, 1, 1, "", "printCurrentConfig"], [13, 1, 1, "", "printStep"], [13, 1, 1, "", "showLogoSARvey"]], "sarvey.densification": [[13, 1, 1, "", "densificationInitializer"], [13, 1, 1, "", "densifyNetwork"], [13, 1, 1, "", "launchDensifyNetworkConsistencyCheck"]], "sarvey.filtering": [[13, 1, 1, "", "estimateAtmosphericPhaseScreen"], [13, 1, 1, "", "launchSpatialFiltering"], [13, 1, 1, "", "simpleInterpolation"]], "sarvey.geolocation": [[13, 1, 1, "", "calculateGeolocationCorrection"], [13, 1, 1, "", "getHeading"]], "sarvey.ifg_network": [[13, 2, 1, "", "DelaunayNetwork"], [13, 2, 1, "", "IfgNetwork"], [13, 2, 1, "", "SmallBaselineNetwork"], [13, 2, 1, "", "SmallBaselineYearlyNetwork"], [13, 2, 1, "", "SmallTemporalBaselinesNetwork"], [13, 2, 1, "", "StarNetwork"]], "sarvey.ifg_network.DelaunayNetwork": [[13, 3, 1, "", "configure"]], "sarvey.ifg_network.IfgNetwork": [[13, 3, 1, "", "getDesignMatrix"], [13, 4, 1, "", "ifg_list"], [13, 3, 1, "", "open"], [13, 3, 1, "", "plot"], [13, 3, 1, "", "writeToFile"]], "sarvey.ifg_network.SmallBaselineNetwork": [[13, 3, 1, "", "configure"]], "sarvey.ifg_network.SmallBaselineYearlyNetwork": [[13, 3, 1, "", "configure"]], "sarvey.ifg_network.SmallTemporalBaselinesNetwork": [[13, 3, 1, "", "configure"]], "sarvey.ifg_network.StarNetwork": [[13, 3, 1, "", "configure"]], "sarvey.objects": [[13, 2, 1, "", "AmplitudeImage"], [13, 2, 1, "", "BaseStack"], [13, 2, 1, "", "CoordinatesUTM"], [13, 2, 1, "", "Network"], [13, 2, 1, "", "NetworkParameter"], [13, 2, 1, "", "Points"]], "sarvey.objects.AmplitudeImage": [[13, 3, 1, "", "open"], [13, 3, 1, "", "plot"], [13, 3, 1, "", "prepare"]], "sarvey.objects.BaseStack": [[13, 3, 1, "", "close"], [13, 3, 1, "", "getShape"], [13, 3, 1, "", "prepareDataset"], [13, 3, 1, "", "read"], [13, 3, 1, "", "writeToFile"], [13, 3, 1, "", "writeToFileBlock"]], "sarvey.objects.CoordinatesUTM": [[13, 3, 1, "", "open"], [13, 3, 1, "", "prepare"]], "sarvey.objects.Network": [[13, 3, 1, "", "computeArcObservations"], [13, 3, 1, "", "open"], [13, 3, 1, "", "openExternalData"], [13, 3, 1, "", "removeArcs"], [13, 3, 1, "", "writeToFile"]], "sarvey.objects.NetworkParameter": [[13, 3, 1, "", "open"], [13, 3, 1, "", "prepare"], [13, 3, 1, "", "writeToFile"]], "sarvey.objects.Points": [[13, 3, 1, "", "addPointsFromObj"], [13, 4, 1, "", "coord_xy"], [13, 3, 1, "", "createMask"], [13, 4, 1, "", "file_path"], [13, 4, 1, "", "length"], [13, 4, 1, "", "num_points"], [13, 3, 1, "", "open"], [13, 3, 1, "", "openExternalData"], [13, 4, 1, "", "phase"], [13, 4, 1, "", "point_id"], [13, 3, 1, "", "prepare"], [13, 3, 1, "", "removePoints"], [13, 4, 1, "", "times"], [13, 4, 1, "", "wavelength"], [13, 4, 1, "", "width"], [13, 3, 1, "", "writeToFile"]], "sarvey.osm_utils": [[13, 1, 1, "", "getSpatialExtend"], [13, 1, 1, "", "runOsmQuery"], [13, 1, 1, "", "runOsmQueryBridge"]], "sarvey.preparation": [[13, 1, 1, "", "createArcsBetweenPoints"], [13, 1, 1, "", "createTimeMaskFromDates"], [13, 1, 1, "", "readCoherenceFromMiaplpy"], [13, 1, 1, "", "readSlcFromMiaplpy"], [13, 1, 1, "", "selectPixels"]], "sarvey.sarvey_mask": [[13, 2, 1, "", "CoordinateSearch"], [13, 2, 1, "", "Node"], [13, 1, 1, "", "computeLastRoadPixel"], [13, 1, 1, "", "convertToRadarCoord"], [13, 1, 1, "", "convertToRadarCoordPolygon"], [13, 1, 1, "", "createMask"], [13, 1, 1, "", "create_parser"], [13, 1, 1, "", "euclDist"], [13, 1, 1, "", "findLastRoadPixel"], [13, 1, 1, "", "main"], [13, 1, 1, "", "saveMask"]], "sarvey.sarvey_mask.CoordinateSearch": [[13, 3, 1, "", "createSearchTree"], [13, 3, 1, "", "getMeanDistanceBetweenPixels"], [13, 3, 1, "", "getNearestNeighbour"]], "sarvey.sarvey_osm": [[13, 1, 1, "", "create_parser"], [13, 1, 1, "", "downloadOSM"], [13, 1, 1, "", "main"]], "sarvey.triangulation": [[13, 2, 1, "", "PointNetworkTriangulation"]], "sarvey.triangulation.PointNetworkTriangulation": [[13, 3, 1, "", "getArcsFromAdjMat"], [13, 3, 1, "", "isConnected"], [13, 3, 1, "", "removeLongArcs"], [13, 3, 1, "", "triangulateGlobal"], [13, 3, 1, "", "triangulateKnn"]], "sarvey.unwrapping": [[13, 1, 1, "", "computeAvgCoherencePerPoint"], [13, 1, 1, "", "computeNumArcsPerPoints"], [13, 1, 1, "", "findOptimum"], [13, 1, 1, "", "gradientSearchTemporalCoherence"], [13, 1, 1, "", "gridSearchTemporalCoherence"], [13, 1, 1, "", "launchAmbiguityFunctionSearch"], [13, 1, 1, "", "launchSpatialUnwrapping"], [13, 1, 1, "", "objFuncTemporalCoherence"], [13, 1, 1, "", "oneDimSearchTemporalCoherence"], [13, 1, 1, "", "parameterBasedNoisyPointRemoval"], [13, 1, 1, "", "removeArcsByPointMask"], [13, 1, 1, "", "removeGrossOutliers"], [13, 1, 1, "", "spatialParameterIntegration"], [13, 1, 1, "", "spatialParameterIntegrationIterative"], [13, 1, 1, "", "spatialUnwrapping"], [13, 1, 1, "", "temporalUnwrapping"]], "sarvey.utils": [[13, 1, 1, "", "checkIfRequiredFilesExist"], [13, 1, 1, "", "convertBboxToBlock"], [13, 1, 1, "", "createSpatialGrid"], [13, 1, 1, "", "detectValidAreas"], [13, 1, 1, "", "estimateParameters"], [13, 1, 1, "", "invertIfgNetwork"], [13, 1, 1, "", "launchInvertIfgNetwork"], [13, 1, 1, "", "predictPhase"], [13, 1, 1, "", "predictPhaseCore"], [13, 1, 1, "", "predictPhaseSingle"], [13, 1, 1, "", "preparePatches"], [13, 1, 1, "", "readPhasePatchwise"], [13, 1, 1, "", "selectBestPointsInGrid"], [13, 1, 1, "", "setReferenceToPeakOfHistogram"], [13, 1, 1, "", "spatiotemporalConsistency"], [13, 1, 1, "", "splitDatasetForParallelProcessing"], [13, 1, 1, "", "splitImageIntoBoxesRngAz"], [13, 1, 1, "", "temporalAutoCorrelation"]], "sarvey.viewer": [[13, 2, 1, "", "TimeSeriesViewer"], [13, 1, 1, "", "plotColoredPointNetwork"], [13, 1, 1, "", "plotGridFromBoxList"], [13, 1, 1, "", "plotIfgs"], [13, 1, 1, "", "plotScatter"]], "sarvey.viewer.TimeSeriesViewer": [[13, 3, 1, "", "initFigureMap"], [13, 3, 1, "", "initFigureTimeseries"], [13, 3, 1, "", "onClick"], [13, 3, 1, "", "plotMap"], [13, 3, 1, "", "plotPointTimeseries"], [13, 3, 1, "", "updateButtonStatus"], [13, 3, 1, "", "updateReference"]]}, "objnames": {"0": ["py", "module", "Python module"], "1": ["py", "function", "Python function"], "2": ["py", "class", "Python class"], "3": ["py", "method", "Python method"], "4": ["py", "attribute", "Python attribute"]}, "objtypes": {"0": "py:module", "1": "py:function", "2": "py:class", "3": "py:method", "4": "py:attribute"}, "terms": {"": [8, 10, 11, 13], "0": [4, 7, 10, 12, 13], "001": 13, "009": 2, "01": 2, "02": 13, "04": [2, 3], "05": [2, 3], "06": 3, "08": 7, "09": 2, "0_ifg": 3, "1": [2, 4, 7, 8, 12, 13], "10": [2, 3, 11, 12], "100": [2, 3, 10, 13], "1000": 10, "1016": [2, 11], "105291": 11, "11": [3, 11], "1109": 11, "1124": 11, "1137": 11, "12": [7, 13], "12189041": [3, 4], "12544131": 12, "13": 2, "137": 3, "138": 3, "139": 3, "143": 2, "15": 13, "16": 11, "171": 11, "1780": 11, "180": 13, "19": 3, "190": 13, "1_ifg": 3, "1d": 13, "1e": 13, "2": [8, 12, 13], "20": 11, "200": 3, "2001": 11, "2002": [2, 11], "2004": [11, 12], "2006": 11, "2007": [11, 13], "2008": 13, "2014": [11, 13], "2015": 2, "2017": 2, "2018": [2, 11], "2019": 11, "2020": 12, "2021": 1, "2022": 11, "2023": 11, "2024": [1, 3, 7, 11, 12], "2375": 11, "2383": 11, "26": 11, "261": 2, "273": 2, "28": 3, "2920536": 11, "2d": 13, "3": [1, 4, 8, 12, 13], "305": 3, "39": 11, "3d": 13, "4": [4, 8, 12, 13], "40": [3, 11], "4326": 13, "5": [12, 13, 14], "50": [3, 13], "500": 10, "5281": 12, "57": 11, "6": [1, 8, 13], "60": [3, 11], "64": 8, "698": 11, "7": [1, 3, 4], "709": 11, "8": [1, 3, 4, 11], "800": 10, "8350": 11, "8361": 11, "888351": 11, "9": [1, 3, 11], "A": [1, 3, 10, 11, 12, 14], "As": [3, 8, 11], "Be": 3, "By": 11, "FOR": 1, "For": [3, 4, 11, 12, 13], "If": [1, 3, 4, 8, 11, 12, 13], "In": [3, 4, 11, 12], "It": [1, 3, 11, 12, 13, 14], "Its": 11, "On": [8, 12], "One": [12, 13], "Or": [11, 13], "The": [1, 2, 3, 4, 5, 10, 11, 12, 13, 14], "Their": 11, "Then": 8, "There": [2, 11], "These": 4, "To": [1, 11, 13, 14], "_____": 3, "_______": 3, "_________": 3, "_t": 4, "ab": 13, "about": [1, 10, 12, 13], "abov": [3, 11], "absolut": [11, 13], "abstract": 13, "accord": 13, "acquisit": [11, 13], "activ": 8, "actual": 13, "ad": [6, 10, 11], "adapt": [6, 13], "add": [1, 13], "addit": [10, 13], "addition": 13, "addpointsfromobj": [9, 13], "adjac": 13, "adjust": [4, 11, 13], "adopt": 13, "advanc": [2, 3, 4], "after": [3, 10, 11, 12, 14], "afterward": [11, 13], "again": [3, 13], "aim": 12, "al": [2, 11, 13], "algorithm": [11, 12], "all": [1, 3, 4, 5, 6, 8, 10, 11, 12, 13, 14], "all_arc": 13, "all_weight": 13, "allow": [1, 6, 10, 13], "along": 1, "alreadi": [10, 13], "also": [1, 3, 4, 8, 10, 11, 12, 13], "altern": [8, 10, 11], "although": [3, 11], "alwai": [1, 8], "ambigu": 13, "amelung": 11, "among": [11, 13], "amplitud": [3, 4, 11, 13], "amplitudeimag": [9, 13], "an": [1, 3, 4, 8, 10, 11, 12, 13, 14], "analysi": [3, 4, 10, 11, 12], "analyz": 12, "andrea": [0, 1, 12], "angl": 13, "ani": [1, 3, 4, 8, 12, 13], "anoth": 13, "anyth": 1, "ap": [11, 13], "api": 7, "append": 13, "appl": 7, "appli": [11, 12, 13], "applic": [11, 12], "apply_aps_filt": 11, "apply_temporal_unwrap": [3, 11], "appreci": 1, "approach": 11, "approxim": [3, 11], "aps1": 13, "aps2": 13, "ar": [1, 2, 3, 4, 5, 8, 11, 12, 13, 14], "arbitrari": [11, 13], "arc": [11, 13], "arc_idx_rang": 13, "arc_unwrapping_coher": 11, "arcs_per_point": 13, "area": [4, 10, 11, 12, 13, 14], "arg": 13, "argument": [12, 13], "arm": 7, "arm64": 8, "around": 14, "arrai": 13, "articl": 1, "ascend": 13, "assess": 4, "assign": 13, "assist": 3, "associ": 3, "assum": [1, 11, 13], "ath": 13, "atmospher": [11, 12, 13], "atr": 13, "attribut": 13, "auto": 13, "auto_corr": 13, "autocorrel": [11, 13], "automat": [1, 8], "avaiabl": 13, "avail": [3, 5, 8, 14], "averag": 13, "avg_neighbour": 13, "avoid": 11, "ax": 13, "axi": 13, "azimuth": [3, 10, 13], "b": [1, 11, 13], "back": 11, "background": [3, 4, 13], "background_map": [3, 11], "barra": 11, "base": [3, 4, 8, 11, 12, 13], "baselin": [10, 11, 13], "basemap": 13, "basestack": [9, 13], "bassol": 11, "bbox": 13, "been": [3, 8, 10], "befor": [1, 6, 8], "beginn": 2, "behaviour": 11, "behind": 11, "being": 11, "below": [2, 11, 12, 13], "berardino": 11, "best": [1, 5, 11, 13], "better": 11, "between": [10, 11, 13], "big": 7, "bin": 13, "binari": 13, "biouca": 11, "bit": 1, "block": 13, "blog": 1, "bmap_obj": 13, "bmapobj": 13, "book": 1, "bool": 13, "bool_plot": 13, "boolean": 13, "border": 13, "botei": 11, "both": [10, 11, 13, 14], "bottleneck": 11, "bound": [11, 13], "box": 13, "box_list": 13, "boykov": [11, 12], "boykov04": [1, 12], "branch": 1, "bridg": [13, 14], "bridge_highwai": 13, "bridge_railwai": 13, "brief": 10, "buffer": [13, 14], "bugfix": 1, "button": 13, "c": [1, 11, 12], "cageo": 11, "calcul": [3, 11, 13], "calculategeolocationcorrect": [9, 13], "call": [1, 3], "can": [1, 3, 4, 8, 10, 11, 12, 13, 14], "cand_mask": 13, "cand_mask_spars": 13, "candid": [11, 13], "cannot": 3, "carefulli": 3, "carri": 12, "case": [10, 11], "caus": 8, "caution": 10, "cautiou": [3, 4], "cb": 13, "cbar": 13, "cd": [1, 3, 4, 8, 12], "cell": 11, "centr": 12, "certain": [10, 11], "chang": [4, 6, 7, 11, 12, 13], "changelog": 7, "character": 2, "characterist": 11, "check": [1, 3, 6, 12, 13], "checkifrequiredfilesexist": [9, 13], "checkout": 1, "choic": 8, "choos": [3, 8], "chosen": 11, "chunk": 13, "ci": 6, "ci_dock": 8, "cite": [1, 7], "class": 13, "clear": 3, "click": [3, 4], "clim": 13, "clone": [1, 8], "close": [9, 10, 11, 13], "closest": [11, 13], "cm": 2, "cmap": 13, "cmap_nam": 13, "cmy": 13, "code": [1, 7, 8, 12], "coh": [4, 6], "coher": [3, 4, 9, 10, 11], "coherence_p1": 11, "coherence_p2": [3, 4, 11], "color": 13, "colorbar": 13, "colormap": 13, "colour": 13, "column": 13, "com": [1, 8], "combin": 6, "command": [3, 4, 7, 8, 11, 12], "comment": [6, 8], "commit": 7, "commnad": 3, "compar": [3, 4, 11], "comparison": [11, 12], "compat": 8, "complet": [10, 11], "complex": 11, "compon": [11, 12], "comprehens": [2, 3], "comput": [3, 11, 13], "computation": [10, 11], "computearcobserv": [9, 13], "computeavgcoherenceperpoint": [9, 13], "computeifg": [9, 13], "computeifgsandtemporalcoher": [9, 13], "computelastroadpixel": [9, 13], "computenumarcsperpoint": [9, 13], "concatenate_patch": 10, "conda": [3, 4, 8], "config": [6, 9, 10, 11, 12], "config_sect": 13, "config_section_default": 13, "configur": [3, 7, 9, 13], "connect": [11, 13], "consecut": [11, 13], "consid": [11, 13], "consist": [3, 12, 13], "consistency_check": 11, "consol": [3, 9], "consortium": 12, "construct": 2, "consum": [10, 11], "consumpt": 11, "contain": [1, 3, 10, 11, 12, 13, 14], "content": 9, "context": [1, 8], "continu": [6, 10, 12], "contribut": [7, 11, 12], "contributor": 7, "converg": 13, "convert": [10, 13], "convertbboxtoblock": [9, 13], "converttoradarcoord": [9, 13], "converttoradarcoordpolygon": [9, 13], "convex": 13, "convolut": 13, "cookiecutt": 12, "coord": 13, "coord_correct": 13, "coord_utm": 13, "coord_utm1": 13, "coord_utm2": 13, "coord_utm_img": 13, "coord_utmxi": 13, "coord_xi": [9, 13], "coordin": [10, 13, 14], "coordinates_utm": [3, 11, 13], "coordinatesearch": [9, 13], "coordinatesutm": [9, 13], "copi": [1, 3, 4], "copyright": 1, "core": [1, 3, 11, 12, 13], "coregist": [10, 12], "corner": 13, "correct": [10, 11, 12, 13], "correctli": [3, 4, 8], "correl": [11, 13], "correspond": [3, 4, 11, 13, 14], "could": [1, 11], "cover": 5, "coverag": 12, "creat": [1, 8, 11, 12, 13, 14], "create_pars": [9, 13], "createarcsbetweenpoint": [9, 13], "createmask": [9, 13], "createsearchtre": [9, 13], "createspatialgrid": [9, 13], "createtimemaskfromd": [9, 13], "creation": 8, "credit": [1, 7], "crippa": 11, "criteria": 13, "criterion": 13, "crop": 10, "crosetto": 11, "cross": [8, 12], "csearch": 13, "cubic": 13, "cueva": 11, "cur_nod": 13, "current": [10, 11, 12, 13], "cut": [11, 12], "d": [10, 11], "dai": 13, "dam": [5, 7], "data": [2, 7, 11, 12, 13, 14], "databas": 13, "dataset": [3, 7, 10, 12, 13, 14], "dataset_nam": 13, "date": [2, 11, 13], "date_list": 13, "de": [0, 1, 8, 12], "decid": [3, 4, 11], "default": [3, 11, 12, 13], "defin": [11, 13], "definit": 13, "deform": [2, 3, 11], "delaunai": [3, 11, 13], "delaunaynetwork": [9, 13], "dem": [3, 4, 10, 11, 13], "dem_error_bound": 11, "demerr": 13, "demerr_bound": 13, "demerr_grid": 13, "demerr_p1": 13, "demerr_p2": 13, "demerr_rang": 13, "demo": [2, 7, 12], "demod_phase1": 13, "demodul": 13, "densif": 9, "densifi": [11, 13], "densificationiniti": [9, 13], "densifynetwork": [9, 13], "densiti": [3, 4], "depend": [8, 11, 13], "depict": 14, "depsi": 13, "depth": [11, 12], "deriv": [1, 11, 12, 13, 14], "descend": [2, 13], "descent": 11, "describ": [11, 12, 13], "descript": [1, 2, 11, 12, 14], "design": [11, 13], "design_mat": 13, "desir": [11, 12], "detail": [1, 2, 4, 5, 11, 12, 14], "detect": 13, "detectvalidarea": [9, 13], "devanth\u00e9ri": 11, "develop": [1, 7, 12], "dia": 11, "dialog": 1, "dict": 13, "dictionari": 13, "differ": [3, 4, 11, 12, 13], "differenti": 11, "digit": [1, 12], "dim": 13, "dimens": 13, "dimension": 13, "dinsar": 11, "direct": [2, 13], "directli": [3, 14], "directori": [4, 8, 10, 11, 12, 13], "discret": 11, "disk": 13, "displac": [1, 11, 12, 14], "displai": 11, "dist_thrsh": 13, "distanc": [11, 13], "distribut": [1, 10, 11, 12], "do": [3, 5, 8], "doc": [1, 12], "docstr": 1, "document": [3, 6, 8], "doi": [2, 11, 12], "domain": 11, "done": [1, 8, 13], "dostep": 10, "doubl": [11, 13], "download": [8, 12, 13, 14], "downloadosm": [9, 13], "draw": [3, 4, 10], "drawn": 10, "driven": 1, "dshape": 13, "dtype": 13, "dure": [11, 13], "e": [8, 11, 12, 13], "each": [3, 4, 11, 12, 13], "easier": 1, "echo": 8, "edg": 13, "edgecolor": 13, "editor": 8, "effect": [5, 11], "effici": 13, "either": [1, 11, 13], "emadali": 2, "embank": 2, "en": 1, "enabl": [11, 13], "enclos": 10, "encount": 3, "end": [2, 3, 11, 13], "end_dat": [3, 11], "energi": [11, 12], "engin": [2, 12], "engstruct": 2, "enhanc": 1, "ensur": [3, 4, 11, 13], "entri": 13, "env": 8, "environ": [3, 8], "environment_sarvei": 8, "epsg": 13, "equal": 13, "error": [3, 4, 11, 13], "esc": 10, "especi": 11, "estim": [3, 4, 11, 13], "estimate_atmospheric_phase_screen": 13, "estimate_ref_atmo": 13, "estimateatmosphericphasescreen": [9, 13], "estimateparamet": [9, 13], "et": [2, 11, 13], "etc": 13, "eucldist": [9, 13], "euclidean": 13, "even": 1, "event": 13, "everi": [1, 5, 13], "everyth": [3, 4], "exampl": [3, 4, 10, 11, 12, 14], "except": 12, "execut": [3, 10, 11], "exist": [6, 13], "expens": 11, "experiment": [11, 12], "explain": [1, 12], "explan": 4, "export": [8, 12, 14], "extend": [1, 11, 12, 13, 14], "extent": [13, 14], "extern": 13, "f": [3, 4, 8, 11, 12], "facilit": 12, "factor": 13, "fals": [11, 13], "fast": [2, 3], "fattahi": 11, "featur": 7, "feder": [1, 12], "fern": [1, 12], "fernlab": [0, 1, 12], "ferretti": 11, "fig": 13, "figur": [10, 13], "file": [1, 6, 7, 8, 10, 12, 13, 14], "file_path": [9, 13], "filenam": [11, 13], "filenotfounderror": 13, "fill": 2, "filter": 9, "filter_kernel": 13, "filter_window_s": [3, 11], "final": [3, 4, 11], "find": [3, 11, 12, 13], "findlastroadpixel": [9, 13], "findoptimum": [9, 13], "fine": [3, 10], "finish": 3, "first": [3, 4, 6, 11, 13, 14], "fit": [1, 11, 13], "five": [3, 11], "fj": 11, "flag": [3, 4, 11, 12, 13], "flake8": 1, "float": 13, "flow": [11, 12], "focus": 2, "follow": [1, 3, 4, 8, 10, 11, 12, 13, 14], "fork": 1, "form": 11, "format": [7, 12], "fornaro": 11, "found": [1, 3, 11, 12, 13], "found_nod": 13, "foundat": 1, "fraction": 13, "frame": 13, "free": 1, "frequent": 13, "fring": 3, "from": [1, 3, 4, 6, 8, 10, 11, 12, 13, 14], "fuer": [1, 12], "full": 10, "fullfil": 13, "fulli": 8, "function": [1, 10, 13], "fund": [1, 12], "further": [11, 13], "futur": [10, 12], "g": [3, 4, 11, 12, 13], "gamma": [2, 12, 13], "gamma_p2": 13, "gcc_linux": 8, "gdf_infra": 13, "gener": [1, 3, 4, 6, 11, 12, 13], "generate_mask": 10, "geodatafram": 13, "geodet": 11, "geograph": [13, 14], "geoinform": [1, 11, 12], "geoloc": 9, "geom": 14, "geom_fil": [13, 14], "geom_refer": 10, "geometri": [10, 12, 13, 14], "geometryradar": [3, 10, 12, 13, 14], "geopackag": 14, "geoscienc": [11, 12], "german": [1, 12], "get": [1, 13, 14], "getarcsfromadjmat": [9, 13], "getdesignmatrix": [9, 13], "gethead": [9, 13], "getmeandistancebetweenpixel": [9, 13], "getnearestneighbour": [9, 13], "getshap": [9, 13], "getspatialextend": [9, 13], "gfz": [0, 1, 12], "gi": 12, "git": [1, 8], "github": [6, 8], "githubusercont": 8, "gitlab": [1, 6, 8], "give": 13, "given": [1, 11, 13, 14], "global": 13, "gnss": 2, "gnu": [1, 12], "go": 12, "gonz\u00e1lez": 11, "good": 3, "gpd": 13, "gpkg": 13, "gpl": [1, 12], "gplv3": 12, "gradient": 11, "gradientsearchtemporalcoher": [9, 13], "graph": [11, 13], "greatli": 1, "grei": 14, "grid": [11, 13], "grid_siz": [11, 13], "griddata": 13, "gridsearchtemporalcoher": [9, 13], "gross": 13, "guid": [2, 5, 8], "guidelin": 7, "h": [0, 3, 8, 10, 11, 12], "h5": [3, 4, 10, 11, 12, 13, 14], "ha": [1, 3, 8, 10, 11, 12, 13], "haghighi": [0, 2, 11, 12], "haghshena": [11, 12], "handl": [7, 12, 13], "handler": 13, "hannov": [0, 1, 8, 12], "hanssen": 13, "have": [1, 2, 3, 8, 10, 11, 13], "hdf5": 13, "head": 13, "heading_angl": 13, "heavi": 10, "height": 10, "help": [1, 3, 5, 8], "here": [1, 11, 12, 14], "hgt": 10, "high": [2, 11, 13], "higher": 3, "highest": [11, 13], "highli": 11, "highwai": [13, 14], "hint": 13, "histogram": [11, 13], "histori": 7, "hold": 10, "holstein": [1, 12], "hooper": 13, "hope": 1, "how": [3, 4, 5, 7, 10, 11, 14], "howev": [3, 4, 11], "html": 12, "http": [1, 3, 4, 8, 12], "hull": 13, "hw": 3, "hypothesi": 13, "i": [1, 2, 3, 4, 8, 10, 11, 12, 13, 14], "iarg": 13, "id": [1, 13], "idea": [11, 13], "identifi": 11, "idx": 13, "idx_rang": 13, "ieee": [11, 12], "ifg": [3, 13], "ifg_arrai": 13, "ifg_list": [9, 13], "ifg_net_obj": 13, "ifg_network": [3, 9, 11], "ifg_network_typ": [3, 11], "ifg_spac": 13, "ifg_stack": [3, 11], "ifgnetwork": [9, 13], "ifgram_invers": 13, "ilp": 13, "imag": [2, 3, 10, 11, 13, 14], "imageri": 2, "img": 13, "img_np": 13, "impact": 11, "implement": [11, 12, 13], "impli": 1, "import": [1, 14], "improv": [4, 6], "incid": 13, "inclu": 10, "includ": [1, 3, 4, 10, 11], "incoher": 13, "increas": [3, 4, 11], "independ": 11, "index": [7, 13], "indic": [2, 3, 12, 13], "individu": 4, "inf": 13, "info": [3, 10], "inform": [10, 11, 12, 13], "infrastructur": [11, 12, 13], "init": 13, "initfiguremap": [9, 13], "initfiguretimeseri": [9, 13], "initi": 13, "innov": 12, "input": [3, 10, 11, 12, 13, 14], "input_fil": [13, 14], "input_path": [3, 13], "inputs_crop": 10, "insar": [1, 2, 3, 4, 7, 10, 12, 14], "insarlab": 8, "insid": [3, 13], "instal": [1, 3, 7, 12], "instanc": [4, 13], "institut": 12, "instruct": [3, 4, 5, 8, 10, 12], "int": 13, "integ": 13, "integr": [11, 12, 13], "intellig": [11, 12], "interest": [11, 13, 14], "interfac": [11, 13], "interferogram": [3, 11, 13], "interferometr": [11, 13], "interferometri": 11, "intermedi": [6, 13], "interp_method": 13, "interpol": [11, 13], "interpolation_method": 11, "invers": 13, "invert": [10, 11, 13], "inverted_crop": 10, "inverted_path": 11, "invertifgnetwork": [9, 13], "investig": 2, "ipi": [0, 1, 8, 12], "iran": 2, "isc": 12, "isce2": 8, "isce_processed_data": 10, "isconnect": [9, 13], "issu": [1, 3], "iter": 13, "its": [3, 4], "itself": [12, 13], "iw": 10, "j": [2, 11], "jet_r": 13, "jj": 11, "jm": 11, "join": 11, "joint": 11, "jointli": 11, "journal": [11, 12], "json": [4, 6, 11, 12], "just": 1, "k": [11, 13], "karun": 2, "kd": 13, "kdtree": 13, "keep": [1, 13], "keep_id": 13, "kei": 10, "kept": 13, "kernel": 13, "keyword": 13, "knn": 13, "known": 13, "kolmogorov": [1, 11, 12], "krige": 13, "kwarg": 13, "l": 2, "lab": [1, 12], "lag": 13, "lanari": 11, "landesamt": [1, 12], "landesbetrieb": [1, 12], "larg": [10, 11], "larger": 13, "last": [4, 13], "lat": [10, 13], "later": [1, 10, 11], "latest": [8, 12], "latitud": 13, "launch": 13, "launch_spatial_filt": 13, "launchambiguityfunctionsearch": [9, 13], "launchconvolve2d": [9, 13], "launchdensifynetworkconsistencycheck": [9, 13], "launchinvertifgnetwork": [9, 13], "launchspatialfilt": [9, 13], "launchspatialunwrap": [9, 13], "lead": [3, 4, 7], "learn": [3, 5], "least": [11, 13], "left": [10, 13], "leibniz": 12, "leijen": [11, 13], "length": [9, 13], "less": [11, 13], "lesser": 1, "level": [11, 13], "librari": [1, 12], "licens": 7, "like": [3, 13], "limit": [10, 11, 13], "line": [2, 3, 7, 8, 11, 12, 13], "linear": [11, 13], "linearsegmentedcolormap": 13, "linestr": 14, "linewidth": 13, "link": [2, 3, 4, 11, 12, 13], "lint": 1, "linux": [3, 7, 12], "list": [1, 3, 13], "literatur": 7, "littl": 1, "ll_corner_wg": 13, "lo": 10, "load": [7, 11, 13], "load_data": [10, 12], "loc_inc": 13, "local": [1, 13], "locat": [11, 13, 14], "log": [3, 11, 13], "logger": 13, "logo": 13, "lon": [10, 13], "long": 13, "longer": 13, "longitud": 13, "look": [1, 3, 10, 11], "low": [3, 4, 11, 13], "lower": [3, 13], "lowest": 11, "m": [1, 2, 8, 11, 12, 13], "m2": 7, "mac": 8, "machin": [11, 12], "maco": [3, 7, 12], "magnitud": 11, "mahdi": 12, "mahmud": [0, 12], "mai": [3, 4], "main": [8, 9, 12, 13], "major": 12, "make": [1, 10, 11], "mallorqui": 11, "mani": [1, 11, 13], "manner": 13, "map": [10, 13], "masj": [5, 7], "mask": [11, 12, 13, 14], "mask_p1_fil": 11, "mask_p2_fil": 11, "mask_phase_linking_fil": 11, "mask_ps_fil": 11, "mask_railwai": 14, "mask_valid_area": 13, "maskp": [10, 11], "maskroad": 13, "matplotlib": 13, "matrix": 13, "max": [11, 12], "max_arc_length": [11, 13], "max_dist": 13, "max_dist_p1": 13, "max_distance_to_p1": 11, "max_rm_fract": 13, "max_tbas": [3, 13], "max_temporal_autocorrel": 11, "maxim": [11, 13], "maximum": [2, 13], "mean": [3, 11, 13], "mean_amp_img": 13, "mean_gamma_point": 13, "meant": 11, "measur": [2, 3], "meet": 1, "memori": [11, 13], "merchant": 1, "mere": 11, "merg": [7, 8, 10], "merge_request": 1, "messag": [3, 8, 13], "metadata": 13, "method": [8, 11, 12, 13], "methodolog": 12, "methodologi": 12, "mh": 2, "miaplpi": [7, 8, 11, 12, 13], "miaplpy_path": 8, "miaplpy_template_fil": 10, "miaplpyapp": 10, "might": [1, 10], "min": [11, 12], "min_dist": 13, "min_num_arc": [11, 13], "minim": [2, 11, 12, 13, 14], "minimum": 13, "ministri": [1, 12], "minor": 12, "mintpi": [10, 13], "mirzae": 11, "miss": 13, "mkdir": 8, "mkvirtualenv": 1, "mm": 13, "mode": 13, "model": [11, 13], "modifi": [1, 4, 10, 12], "modul": [7, 9], "monitor": [11, 12], "monserrat": 11, "more": [1, 4, 10, 11, 12], "moreov": 11, "most": [8, 13], "motagh": [2, 11, 12], "motion": 13, "move": 10, "mr\u00f3z": 11, "mti": [3, 14], "mtinsar": 10, "much": [3, 4], "multi": 14, "multiprocess": 13, "multitempor": [1, 3, 4, 7], "must": [11, 13], "my_mask": 14, "my_shapefil": 14, "n": [3, 8, 11], "name": [1, 3, 4, 6, 8, 11, 13], "narrow": 1, "navig": 8, "ncpu": 3, "ndarrai": 13, "nearest": [11, 13], "necessarili": 13, "need": [3, 11, 13], "neighbor": [11, 13], "neighbour": 13, "neighbourhood": 11, "net_obj": 13, "net_par_obj": 13, "network": [3, 9, 11, 13], "networkparamet": [9, 13], "new": [1, 3, 8, 10, 11, 13], "new_coord_xi": 13, "new_lat": 13, "new_lon": 13, "new_num_point": 13, "new_phas": 13, "new_point_id": 13, "newer": [3, 4], "next": 3, "node": [9, 13, 14], "node1": 13, "node2": 13, "node_idx": 13, "nois": 11, "noisi": [3, 4], "non": 11, "none": [3, 13], "normal": 3, "note": [1, 3, 8, 11], "now": [1, 3], "np": 13, "nproc": 3, "num_bin": 13, "num_box": 13, "num_box_az": 13, "num_box_rng": 13, "num_conn_p1": 13, "num_connections_to_p1": 11, "num_cor": [3, 4, 11, 13], "num_ifg": [3, 13], "num_imag": 13, "num_link": 13, "num_nearest_neighbour": 11, "num_optimization_sampl": 11, "num_patch": [3, 11, 13], "num_point": [9, 13], "num_points1": 13, "num_points2": 13, "num_points_p1": 13, "num_points_p2": 13, "num_points_remov": 13, "num_sampl": 13, "num_sibl": 11, "num_slc": 13, "num_tim": 13, "num_time_step": 13, "number": [2, 3, 11, 12, 13], "o": [3, 4, 10, 11, 12, 14], "obj": 13, "object": 9, "objfunctemporalcoher": [9, 13], "obs_phas": 13, "observ": 13, "occur": 13, "odd": 13, "off": 1, "offer": 5, "offici": 1, "offset": 13, "omega": 13, "onclick": [9, 13], "one": [2, 3, 6, 7, 13], "onedimsearchtemporalcoher": [9, 13], "onli": [1, 3, 10, 11, 12, 13], "only_vel": 13, "onward": 8, "open": [1, 2, 3, 8, 9, 12, 13], "openexternaldata": [9, 13], "openstreetmap": [13, 14], "oper": 1, "opt_val": 13, "optim": 13, "option": [1, 7, 8, 13, 14], "orbit": 2, "orbit_direct": 13, "order": [3, 4, 6, 11, 13], "org": [1, 3, 4, 12], "origin": 1, "osm": [12, 13], "osm_util": 9, "other": [1, 3, 4, 8, 11, 12, 13, 14], "other_file_path": 13, "our": 1, "out": [8, 12], "out_file_nam": [13, 14], "outlier": [11, 13], "output": [3, 10, 11, 12, 13, 14], "output_path": 3, "outsid": 13, "over": 13, "overpi": 13, "overview": 7, "p": [3, 4, 8, 11, 13], "p1_ap": [3, 11], "p1_ifg_t": 11, "p1_ifg_unw": [3, 11], "p1_ifg_wr": [3, 11], "p1_t": 3, "p1_ts_filt": [3, 11], "p2_coh": 6, "p2_coh70_t": 3, "p2_coh80": 12, "p2_coh80_ap": [3, 11], "p2_coh80_ifg_unw": 11, "p2_coh80_ifg_wr": [3, 11], "p2_coh80_t": [3, 4, 11, 12], "p2_cohxx_ap": 11, "p2_cohxx_ifg_unw": 11, "p2_cohxx_ifg_wr": 11, "p2_cohxx_t": 11, "p_mask": 13, "packag": [1, 7, 8, 9, 12], "page": 7, "pami": 12, "paper": [2, 11, 12], "parallel": [11, 13], "paramet": [3, 4, 6, 10, 11, 12], "parameterbasednoisypointremov": [9, 13], "part": [1, 13], "particular": 1, "particularli": 10, "pass": [1, 11, 13], "patch": [11, 12, 13], "patchwis": 13, "path": [8, 11, 12, 13, 14], "path_geom": 13, "path_ifg": 13, "path_slc": 13, "path_temp_coh": 13, "path_to_fil": 13, "pattern": [11, 12], "pbase": 13, "peak": [11, 13], "per": 13, "perform": [5, 10, 11, 12, 13], "perman": 11, "perpendicular": [11, 13], "persist": [3, 11], "pfg": [11, 12], "phase": [9, 11, 13], "phase_link": [10, 11], "phase_point": 13, "phase_seri": [10, 13], "phase_t": 13, "phd": 11, "photogrammetri": [11, 12], "pi": 13, "pic": [3, 4], "pie": [3, 4, 7], "pip": [1, 8], "piter": [0, 1, 11, 12], "pixel": [3, 11, 13, 14], "place": 11, "placehold": 11, "plan": [10, 12], "platform": [8, 12], "pleas": [1, 4, 8, 10, 12], "plot": [9, 12, 13, 14], "plot_point_timeseri": 13, "plotcoloredpointnetwork": [9, 13], "plotgridfromboxlist": [9, 13], "plotifg": [9, 13], "plotmap": [9, 13], "plotpointtimeseri": [9, 13], "plotscatt": [9, 13], "plt": 13, "plugin": [3, 4], "png": 3, "point": [3, 4, 6, 9, 10, 11, 13], "point1_obj": 13, "point2_obj": 13, "point_id": [9, 13], "point_id_img": 13, "point_network": [3, 11], "point_network_paramet": [3, 11], "point_obj": 13, "pointnetworktriangul": [9, 13], "pointsparamet": 13, "polygon": [10, 13, 14], "polylin": 13, "poor": [3, 4], "possibl": [1, 13], "post": [1, 2], "potenti": [3, 4], "potsdam": [0, 1, 12], "practic": [3, 5], "prati": 11, "pred_phas": 13, "pred_phase_demerr": 13, "pred_phase_vel": 13, "predict": 13, "predictphas": [9, 13], "predictphasecor": [9, 13], "predictphasesingl": [9, 13], "prefer": [4, 8], "prepar": [4, 7, 9, 12], "preparedataset": [9, 13], "preparepatch": [9, 13], "preprocess": [7, 12, 13], "press": [10, 13], "prev_nod": 13, "previou": [2, 13], "principl": 12, "print": 13, "print_msg": 13, "printcurrentconfig": [9, 13], "printstep": [9, 13], "problem": 8, "proce": [3, 4], "procedur": 11, "process": [3, 4, 5, 6, 7, 9, 10, 14], "processor": [2, 10, 12], "product": [3, 4, 10], "program": 1, "progress": 10, "project": [1, 12, 14], "projekt": [1, 8], "projektpag": 12, "prompt": 3, "propos": [1, 13], "protocol": 12, "provid": [2, 3, 4, 5, 8, 11], "psnetwork": 13, "pspoint": 13, "public": [1, 12], "publish": 1, "pull": 1, "puma": [3, 12, 13], "purpos": [1, 11, 12], "push": 1, "put": 1, "py": [1, 3, 10], "pycharm": 1, "pymaxflow": [1, 12], "pypackag": 12, "pytest": [1, 12], "python": [1, 7, 8, 12], "pythonpath": 8, "qgi": [3, 4, 12], "qualiti": [3, 4, 11, 13], "quality_thrsh": 13, "queri": 13, "r": 11, "rad": 13, "radar": [2, 13, 14], "radian": 13, "railwai": [13, 14], "rang": [3, 4, 10, 13], "rate": 2, "raw": 8, "rdr": 10, "re": [1, 11], "read": [3, 9, 11, 13, 14], "readcoherencefrommiaplpi": [9, 13], "readm": 1, "readphasepatchwis": [9, 13], "readslcfrommiaplpi": [9, 13], "reason": [3, 4], "receiv": [1, 11], "recent": [8, 12], "recommend": 11, "record": [3, 4], "recurs": 13, "redistribut": 1, "reduc": [3, 4, 11, 13], "redund": 11, "ref_atmo": 13, "ref_idx": 13, "refer": [3, 4, 7, 10, 11, 13], "referenc": 11, "refin": 11, "regular": 11, "rel": [3, 11], "relat": [10, 11, 12, 13], "releas": [6, 8], "rememb": 1, "remot": [11, 12], "remov": [11, 13], "removearc": [9, 13], "removearcsbypointmask": [9, 13], "removegrossoutli": [9, 13], "removelongarc": [9, 13], "removepoint": [9, 13], "repo": 1, "report": 12, "repositori": [7, 8], "reproduc": 1, "request": 7, "requir": [1, 8, 10, 11, 12, 13, 14], "required_fil": 13, "rerun": 4, "res_tol": 13, "resampl": 11, "research": [1, 12], "residu": [11, 13], "resolut": 2, "resolv": 8, "respect": 11, "rest": 13, "restor": 11, "restrict": 13, "result": [1, 6, 10, 11, 12, 13, 14], "result_date_list": 13, "results_dir": 10, "retriev": 11, "review": [11, 12], "right": [3, 4, 13], "river": 2, "rm": 8, "rmse": 13, "rmse_thrsh": 13, "road": 13, "rocca": 11, "rock": 2, "roipoli": 10, "rst": 1, "run": [1, 8, 10, 11, 12, 13, 14], "runosmqueri": [9, 13], "runosmquerybridg": [9, 13], "same": [8, 10, 11, 13], "sampl": 13, "sansosti": 11, "sar": [7, 11, 13, 14], "sar4infra": [1, 8, 12], "sarvei": [1, 5, 8, 9, 10, 11, 14], "sarvey_export": [3, 4, 9, 12, 14], "sarvey_input_data_masjed_soleyman_dam_s1_dsc_2015_2018": [3, 4], "sarvey_mask": [9, 12, 14], "sarvey_mti": 9, "sarvey_osm": [9, 12, 14], "sarvey_plot": [3, 4, 9, 12, 14], "satellit": 13, "satisfi": 3, "save": [13, 14], "savemask": [9, 13], "sb": [3, 11], "sba": 3, "scalar": 13, "scale": 13, "scale_demerr": 13, "scale_vel": 13, "scatter": [10, 11, 13], "scheme": 12, "schleswig": [1, 12], "scienc": [11, 12], "scientif": 12, "scipi": 13, "scm": 1, "scope": 1, "screen": [11, 13], "script": 13, "search": [7, 11, 13, 14], "second": [3, 4, 6, 11, 13], "secondari": 13, "section": [3, 4, 6, 11, 12, 13], "sed": 8, "see": [1, 3, 4, 12], "seismic": 13, "sel_min": 13, "select": [10, 11, 13], "selectbestpointsingrid": [9, 13], "selected_pixels_temp_coh_0": 3, "selection_method": 13, "selectpixel": [9, 13], "semant": 12, "send": 1, "sens": [11, 12], "sensor": 2, "sentinel": [2, 11, 12], "separ": 3, "septemb": 12, "seri": [1, 11, 12, 13, 14], "serv": 5, "set": [1, 4, 6, 8, 11, 13], "setreferencetopeakofhistogram": [9, 13], "settlement": 2, "setup": 1, "sever": [5, 13], "sh": 8, "shall": 13, "shape": 13, "shapefil": [3, 4, 12, 13, 14], "share": 3, "shortest": 13, "should": [1, 3, 8, 10, 11, 13], "showlogosarvei": [9, 13], "shown": [2, 8, 13], "shp": [3, 4, 12, 13, 14], "sibl": 11, "sight": 2, "silicon": 7, "similar": [11, 13], "similarli": 11, "simpl": 13, "simpleinterpol": [9, 13], "sinc": 11, "singl": [10, 11, 13], "size": [11, 13], "skip": [8, 11], "slant": 13, "slant_rang": 13, "slc": [2, 10, 11, 12, 13, 14], "slc_stack_obj": 13, "slcstack": [3, 10, 11, 12, 13], "slightli": [3, 4], "small": [10, 11, 12, 13], "smallbaselin": 13, "smallbaselinenetwork": [9, 13], "smallbaselineyearlynetwork": [9, 13], "smalltemporalbaselinesnetwork": [9, 13], "snap": 12, "snapshot": 4, "softwar": [1, 3, 4, 5, 8, 12, 13, 14], "soleyman": [5, 7], "solut": 13, "solv": 13, "sourc": [7, 8, 12, 13], "source_sarvei": 8, "southwest": 2, "space": 13, "span": 11, "spars": [11, 13], "spatial": [11, 13, 14], "spatial_ref_id": 13, "spatial_ref_idx": 13, "spatial_unwrapping_method": [3, 11], "spatialparameterintegr": [9, 13], "spatialparameterintegrationit": [9, 13], "spatialunwrap": [9, 13], "spatiotempor": 13, "spatiotemporalconsist": [9, 13], "specif": 5, "specifi": [3, 4, 11, 12, 13, 14], "split": [11, 13], "split2box": 13, "splitdatasetforparallelprocess": [9, 13], "splitimageintobox": 13, "splitimageintoboxesrngaz": [9, 13], "spotlight": 2, "squar": [11, 13], "src": 8, "stabl": [8, 11, 13], "stack": [2, 4, 10, 11, 12, 13, 14], "stack_obj": 13, "stamp": 13, "star": [11, 13], "starnetwork": [9, 13], "start": [2, 3, 10, 11, 13], "start_dat": [3, 11, 13], "statu": 7, "stb": 11, "stb_yearli": 11, "stc": 13, "step": [1, 4, 7, 12, 13, 14], "step_0_amplitude_imag": 3, "step_0_interferogram_network": 3, "step_0_temporal_phase_coher": 3, "step_1_arc_coher": 3, "step_1_arc_coherence_reduc": 3, "step_1_mask_p1": 3, "step_1_rmse_dem_error_0th_it": 3, "step_1_rmse_vel_0th_it": 3, "step_2_estimation_dem_error": 3, "step_2_estimation_veloc": 3, "step_3_mask_p2_coh80": 3, "step_3_stable_point": 3, "step_3_temporal_autocorrel": 3, "step_dict": 13, "still": 13, "stop": 13, "stop_dat": 13, "store": [3, 4, 11, 12, 13], "str": 13, "strassenbau": [1, 12], "strategi": [5, 11], "strawberri": [3, 4, 7], "street": 13, "structur": [2, 10, 12], "studi": [11, 12], "submodul": 9, "subplot": 13, "subsequ": 11, "subset": [1, 11, 13], "subsystem": [8, 12], "sum": 13, "sun\u00e9": 11, "supervis": 12, "support": [1, 10, 11, 12, 14], "suppos": 11, "sure": [1, 10], "surfac": 11, "survei": 7, "symmetr": 13, "sysctl": 3, "system": 1, "t": [3, 4, 12], "tabl": 2, "tag": 1, "take": 11, "tbase": 13, "technologi": 12, "temp_coh": 13, "templat": [10, 12], "tempor": [3, 4, 10, 11, 13, 14], "temporal_coher": [3, 10, 11, 13], "temporalautocorrel": [9, 13], "temporalunwrap": [9, 13], "temporari": 13, "term": [1, 11], "termin": [8, 10, 14], "terrasar": 2, "test": [1, 8, 12, 13], "test_process": 1, "tgr": 11, "than": [3, 11, 13], "thei": [1, 3, 5, 10], "them": [1, 3, 4, 10, 11, 13], "theori": 11, "therebi": [11, 13], "therefor": [3, 8, 11], "thesi": 11, "thi": [1, 2, 3, 4, 8, 10, 11, 12, 13, 14], "those": 11, "three": 12, "threshold": [11, 13], "through": 1, "thrsh": 13, "time": [1, 8, 9, 10, 12, 13, 14], "time_mask": 13, "timeseri": [1, 8, 12], "timeseries_path": 8, "timeseriesview": [9, 13], "tip": [7, 11], "titl": 13, "togeth": 1, "too": [3, 4, 13], "tool": [1, 7, 10], "top": [10, 11, 13], "tox": 1, "tpami": 11, "track": [2, 3, 14], "transact": [11, 12], "transfer": 12, "transport": [1, 11, 12, 13], "tree": 13, "tree_p1": 13, "triangul": 9, "triangulateglob": [9, 13], "triangulateknn": [9, 13], "troubleshoot": 1, "true": [3, 11, 13], "try": 10, "ttl": 13, "tune": [3, 4], "tupl": 13, "tutori": [3, 4], "two": [2, 7, 12, 13], "txt": 10, "type": [7, 11, 13], "type_list": 13, "typeerror": 13, "und": [1, 12], "under": [1, 11, 12], "undergo": 2, "understand": 3, "uni": [0, 1, 8, 12], "union": 13, "unit": 13, "univers": 12, "until": 11, "unw_phas": 13, "unwrap": [7, 9, 12], "unzip": [3, 4], "up": [1, 8], "updat": [1, 8, 13], "updatebuttonstatu": [9, 13], "updaterefer": [9, 13], "upper": 13, "ur_corner_wg": 13, "urlcheck": 1, "us": [1, 2, 3, 4, 5, 7, 10, 11, 12, 13, 14], "usag": [7, 12, 13], "use_arcs_from_temporal_unwrap": 11, "use_moving_point": 11, "use_p": 11, "use_phase_linking_result": 11, "user": [2, 3, 4, 6], "usual": 13, "util": 9, "utm": 13, "v": 11, "v2": 1, "v_hat": 13, "val": 13, "val_arc": 13, "val_point": 13, "val_rang": 13, "valadao": 11, "valid": 13, "valu": [3, 4, 11, 13], "valueerror": 13, "van": [11, 13], "vari": 11, "variabl": 13, "variogram": 13, "variou": [3, 4, 11], "vel": 13, "vel_grid": 13, "vel_p1": 13, "vel_p2": 13, "vel_rang": 13, "vel_scal": 13, "veloc": [3, 4, 11, 13], "velocity_bound": [11, 13], "verkehr": [1, 12], "vermessung": [1, 12], "version": [1, 3, 4, 6, 8, 9, 12], "vertex": 10, "via": 11, "view": [3, 10], "viewer": [3, 4, 9], "virtual": 8, "virtualenv": 1, "virtualenvwrapp": 1, "vision": [11, 12], "visual": [3, 4, 10, 12], "vladimir": [1, 12], "vmax": 13, "vmin": 13, "volunt": 1, "v\u00e1zquez": 11, "w": 13, "wa": [1, 12], "wai": 1, "want": [1, 3, 8, 10], "warn": 3, "warranti": 1, "wavelength": [9, 13], "wdw_size": 13, "we": [3, 12], "web": 1, "websit": 1, "weight": 13, "welcom": 1, "well": 13, "were": 11, "wget": [3, 4, 8], "wgs84": 13, "when": [1, 3, 13], "where": [8, 11], "whether": 1, "which": [3, 10, 11, 12, 13, 14], "whoever": 1, "whole": 13, "whose": 13, "width": [9, 13, 14], "window": [7, 10, 11, 12, 13], "wish": [4, 10, 11], "within": [10, 11, 12, 13], "without": [1, 11, 13], "work": [1, 3, 4, 10, 12, 13], "work_dir": 13, "workflow": [3, 4, 7, 12, 14], "working_dir": 12, "would": [1, 11], "wrap": [11, 13], "wrapper": 13, "write": 13, "write2hdf5_block": 13, "writetofil": [9, 13], "writetofileblock": [9, 13], "wrong": 13, "wsl": [7, 12], "www": 1, "x": [2, 10, 13], "x0": 13, "x1": 13, "x_correct": 13, "xend": 13, "xmax": 13, "xmin": 13, "xml": 10, "xstart": 13, "xx": 11, "y": [10, 11, 13], "y0": 13, "y1": 13, "y_correct": 13, "year": 2, "yearli": [11, 13], "yend": 13, "yield": 11, "ymax": 13, "ymin": 13, "yml": 8, "you": [1, 3, 4, 5, 8, 10, 11, 12, 14], "your": [8, 11, 12], "ystart": 13, "yuri": 12, "yyyymmdd": 10, "yyyymmdd_yyyymmdd": 10, "z0": 13, "z1": 13, "zend": 13, "zenodo": [3, 4, 12], "zero": 11, "zhao": 11, "zip": [3, 4], "zstart": 13}, "titles": ["Credits", "Contributing", "Masjed Soleyman dam", "Detailed Guide for Masjed Soleyman Dam", "Fast Track Guide for Masjed Soleyman Dam", "Demo Datasets", "History", "SARvey documentation", "Installation", "Python API reference", "Preparation", "Multitemporal InSAR processing workflow", "SARvey - survey with SAR", "sarvey package", "Usage"], "titleterms": {"0": [3, 6, 11], "08": 6, "1": [3, 6, 11], "12": 6, "2": [3, 11], "2024": 6, "3": [3, 11], "4": [3, 11], "5": 3, "6": 3, "activ": [3, 4], "anaconda": 8, "api": [9, 14], "appl": 8, "arm": 8, "befor": 3, "big": 11, "bug": 1, "chang": [1, 3], "changelog": 12, "check": [4, 10, 11], "cite": 12, "coher": 13, "command": 14, "commit": 1, "config": [3, 4, 13], "configur": 11, "consist": 11, "consol": 13, "content": [7, 13], "contribut": 1, "contributor": 0, "creat": [3, 4, 10], "credit": [0, 12], "dam": [2, 3, 4], "data": [3, 4, 10], "dataset": [2, 5, 11], "demo": 5, "densif": [11, 13], "detail": 3, "develop": 0, "directori": 3, "document": [1, 7, 12], "download": [3, 4], "environ": 4, "export": [3, 4], "fast": 4, "featur": [1, 12], "feedback": 1, "file": [3, 4, 11], "filter": [11, 13], "fix": 1, "format": [3, 4, 10], "gamma": 10, "geoloc": 13, "gi": [3, 4], "guid": [3, 4], "guidelin": 1, "handl": 11, "header": 1, "histori": [6, 12], "how": [1, 12], "ifg_network": 13, "implement": 1, "indic": 7, "insar": 11, "instal": 8, "isc": 10, "json": 3, "lead": 0, "licens": [1, 12], "line": 14, "link": 10, "linux": 8, "literatur": [2, 11], "load": 10, "m2": 8, "maco": 8, "mamba": 8, "manual": 10, "masj": [2, 3, 4], "mask": 10, "merg": 1, "miaplpi": 10, "miniconda": 8, "modifi": 3, "modul": 13, "multitempor": 11, "object": 13, "one": 11, "option": [10, 11], "osm_util": 13, "output": 4, "overview": 12, "packag": 13, "paramet": 13, "phase": 10, "pie": 6, "plot": [3, 4], "prepar": [3, 10, 11, 13], "preprocess": 10, "process": [11, 12, 13], "python": [9, 14], "rais": 13, "recommend": 8, "refer": 9, "report": 1, "request": 1, "rerun": 3, "result": [3, 4], "return": 13, "run": [3, 4], "sar": 12, "sarvei": [3, 4, 7, 12, 13], "sarvey_export": 13, "sarvey_mask": 13, "sarvey_mti": 13, "sarvey_osm": 13, "sarvey_plot": 13, "seri": [3, 4], "sign": 1, "silicon": 8, "snap": 10, "soleyman": [2, 3, 4], "space": 11, "statu": 12, "step": [3, 10, 11], "strawberri": 6, "submit": 1, "submodul": 13, "subset": 10, "survei": 12, "tabl": 7, "time": [3, 4, 11], "tip": 1, "tool": 14, "track": 4, "triangul": 13, "tutori": 2, "two": 11, "type": 1, "unwrap": [11, 13], "us": 8, "usag": 14, "util": 13, "valid": [3, 4], "version": 13, "viewer": 13, "window": 8, "workflow": 11, "write": 1, "wsl": 8, "your": [1, 3, 4]}}) \ No newline at end of file diff --git a/doc/usage.html b/doc/usage.html new file mode 100644 index 0000000..a089a55 --- /dev/null +++ b/doc/usage.html @@ -0,0 +1,175 @@ + + + + + + + + Usage — SARvey 1.0.0 documentation + + + + + + + + + + + + + + + +
+
+
+
+ +
+

Usage

+SARvey workflow + +

Processing workflow for using the SARvey software to derive displacement time series. The minimal required processing +steps and datasets are depicted in grey. All other steps are optional.

+
+

Command-line tools

+

The following command-line tools are available and can be run directly in the terminal.

+
+
sarvey

A tool to derive displacements from the SLC stack with Multi-Temporal InSAR (MTI). +A detailed description of the processing steps is given here.

+
+
sarvey_plot

A tool to plot the results from sarvey processing.

+
+
sarvey_export

A tool to export the results from sarvey processing to shapefile or geopackage.

+
+
sarvey_mask

A tool to create a mask from shapefile containing the area of interest, which can be used in sarvey processing. +The tool reads from an input file, which is a shapefile or geopackage containing the geographic data. +It supports both ‘LineString’ and ‘Polygon’ geometries. +The tool first gets the spatial extent of the geographic data and searches the location of the polygon/line nodes in the image coordinates of the radar image. +A buffer around the polygon/line is created specified by a width in pixel. +The buffer is then used to create the mask.

+

Here is an example of how to use the sarvey_mask tool:

+
sarvey_mask --input_file my_shapefile.shp --geom_file ./inputs/geometryRadar.h5 --out_file_name my_mask.h5 --width 5
+
+
+
+
sarvey_osm

A tool to download OpenStreetMap data for the area of interest specified by the spatial extend of the SLC stack. +The tool first gets the spatial extent of the SAR image from the geometry file. +It then uses this spatial extent to download the OpenStreetMap data for the corresponding area. +The download of railway tracks, highways and bridges is supported. +After downloading the data, the tool saves it to a shapefile.

+

After downloading the OpenStreetMap data with sarvey_osm, you can use the sarvey_mask tool to create a mask from the shapefile.

+

Here is an example of how to use the sarvey_osm tool:

+
sarvey_osm --geom ./geometryRadar.h5 --railway                       # download railway
+sarvey_osm --geom ./geometryRadar.h5 --highway                       # download highway
+sarvey_osm --geom ./geometryRadar.h5 --railway --bridge              # download railway bridge
+sarvey_osm --geom ./geometryRadar.h5 --railway -o mask_railway.shp   # specify output path
+
+
+
+
+
+
+

Usage of the Python API

+

To use SARvey in a project:

+
+
import sarvey
+
+
+
+
+
+ + +
+
+
+
+ +
+
+ + + + \ No newline at end of file