diff --git a/docs/Makefile b/doc/Makefile similarity index 96% rename from docs/Makefile rename to doc/Makefile index c271f08..d4bb2cb 100644 --- a/docs/Makefile +++ b/doc/Makefile @@ -6,7 +6,7 @@ SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build SOURCEDIR = . -BUILDDIR = . +BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: diff --git a/docs/acknowledgments.rst b/doc/acknowledgments.rst similarity index 100% rename from docs/acknowledgments.rst rename to doc/acknowledgments.rst diff --git a/docs/api-reference.rst b/doc/api-reference.rst similarity index 100% rename from docs/api-reference.rst rename to doc/api-reference.rst diff --git a/docs/api/helpers.rst b/doc/api/helpers.rst similarity index 100% rename from docs/api/helpers.rst rename to doc/api/helpers.rst diff --git a/docs/api/plotting.rst b/doc/api/plotting.rst similarity index 100% rename from docs/api/plotting.rst rename to doc/api/plotting.rst diff --git a/docs/api/radarspec.rst b/doc/api/radarspec.rst similarity index 100% rename from docs/api/radarspec.rst rename to doc/api/radarspec.rst diff --git a/docs/api/readers.rst b/doc/api/readers.rst similarity index 100% rename from docs/api/readers.rst rename to doc/api/readers.rst diff --git a/docs/api/simulator.rst b/doc/api/simulator.rst similarity index 100% rename from docs/api/simulator.rst rename to doc/api/simulator.rst diff --git a/docs/api/suborbital.rst b/doc/api/suborbital.rst similarity index 100% rename from docs/api/suborbital.rst rename to doc/api/suborbital.rst diff --git a/docs/api/writers.rst b/doc/api/writers.rst similarity index 100% rename from docs/api/writers.rst rename to doc/api/writers.rst diff --git a/docs/conf.py b/doc/conf.py similarity index 100% rename from docs/conf.py rename to doc/conf.py diff --git a/docs/doctrees/acknowledgments.doctree b/doc/doctrees/acknowledgments.doctree similarity index 100% rename from docs/doctrees/acknowledgments.doctree rename to doc/doctrees/acknowledgments.doctree diff --git a/docs/doctrees/api-reference.doctree b/doc/doctrees/api-reference.doctree similarity index 100% rename from docs/doctrees/api-reference.doctree rename to doc/doctrees/api-reference.doctree diff --git a/docs/doctrees/api/helpers.doctree b/doc/doctrees/api/helpers.doctree similarity index 100% rename from docs/doctrees/api/helpers.doctree rename to doc/doctrees/api/helpers.doctree diff --git a/docs/doctrees/api/plotting.doctree b/doc/doctrees/api/plotting.doctree similarity index 100% rename from docs/doctrees/api/plotting.doctree rename to doc/doctrees/api/plotting.doctree diff --git a/docs/doctrees/api/radarspec.doctree b/doc/doctrees/api/radarspec.doctree similarity index 100% rename from docs/doctrees/api/radarspec.doctree rename to doc/doctrees/api/radarspec.doctree diff --git a/docs/doctrees/api/readers.doctree b/doc/doctrees/api/readers.doctree similarity index 100% rename from docs/doctrees/api/readers.doctree rename to doc/doctrees/api/readers.doctree diff --git a/docs/doctrees/api/simulator.doctree b/doc/doctrees/api/simulator.doctree similarity index 100% rename from docs/doctrees/api/simulator.doctree rename to doc/doctrees/api/simulator.doctree diff --git a/docs/doctrees/api/suborbital.doctree b/doc/doctrees/api/suborbital.doctree similarity index 100% rename from docs/doctrees/api/suborbital.doctree rename to doc/doctrees/api/suborbital.doctree diff --git a/docs/doctrees/api/writers.doctree b/doc/doctrees/api/writers.doctree similarity index 100% rename from docs/doctrees/api/writers.doctree rename to doc/doctrees/api/writers.doctree diff --git a/docs/doctrees/environment.pickle b/doc/doctrees/environment.pickle similarity index 100% rename from docs/doctrees/environment.pickle rename to doc/doctrees/environment.pickle diff --git a/docs/doctrees/getting_started.doctree b/doc/doctrees/getting_started.doctree similarity index 100% rename from docs/doctrees/getting_started.doctree rename to doc/doctrees/getting_started.doctree diff --git a/docs/doctrees/index.doctree b/doc/doctrees/index.doctree similarity index 100% rename from docs/doctrees/index.doctree rename to doc/doctrees/index.doctree diff --git a/docs/doctrees/installation.doctree b/doc/doctrees/installation.doctree similarity index 100% rename from docs/doctrees/installation.doctree rename to doc/doctrees/installation.doctree diff --git a/docs/doctrees/output_description.doctree b/doc/doctrees/output_description.doctree similarity index 100% rename from docs/doctrees/output_description.doctree rename to doc/doctrees/output_description.doctree diff --git a/docs/doctrees/overview.doctree b/doc/doctrees/overview.doctree similarity index 100% rename from docs/doctrees/overview.doctree rename to doc/doctrees/overview.doctree diff --git a/docs/getting_started.rst b/doc/getting_started.rst similarity index 100% rename from docs/getting_started.rst rename to doc/getting_started.rst diff --git a/docs/graphics/example_joyce_20210406_210km_280km.png b/doc/graphics/example_joyce_20210406_210km_280km.png similarity index 100% rename from docs/graphics/example_joyce_20210406_210km_280km.png rename to doc/graphics/example_joyce_20210406_210km_280km.png diff --git a/docs/graphics/pic_flow_chart_suborbital_radar.png b/doc/graphics/pic_flow_chart_suborbital_radar.png similarity index 100% rename from docs/graphics/pic_flow_chart_suborbital_radar.png rename to doc/graphics/pic_flow_chart_suborbital_radar.png diff --git a/docs/index.rst b/doc/index.rst similarity index 100% rename from docs/index.rst rename to doc/index.rst diff --git a/docs/installation.rst b/doc/installation.rst similarity index 100% rename from docs/installation.rst rename to doc/installation.rst diff --git a/docs/make.bat b/doc/make.bat similarity index 100% rename from docs/make.bat rename to doc/make.bat diff --git a/docs/output_description.rst b/doc/output_description.rst similarity index 100% rename from docs/output_description.rst rename to doc/output_description.rst diff --git a/docs/overview.rst b/doc/overview.rst similarity index 100% rename from docs/overview.rst rename to doc/overview.rst diff --git a/docs/.nojekyll b/docs/.nojekyll deleted file mode 100644 index e69de29..0000000 diff --git a/docs/html/.buildinfo b/docs/html/.buildinfo deleted file mode 100644 index dba8d60..0000000 --- a/docs/html/.buildinfo +++ /dev/null @@ -1,4 +0,0 @@ -# Sphinx build info version 1 -# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. -config: 982191455396e23e93e51a8ae8264128 -tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/docs/html/_images/example_joyce_20210406_210km_280km.png b/docs/html/_images/example_joyce_20210406_210km_280km.png deleted file mode 100644 index 3a54631..0000000 Binary files a/docs/html/_images/example_joyce_20210406_210km_280km.png and /dev/null differ diff --git a/docs/html/_images/pic_flow_chart_suborbital_radar.png b/docs/html/_images/pic_flow_chart_suborbital_radar.png deleted file mode 100644 index 1c9e298..0000000 Binary files a/docs/html/_images/pic_flow_chart_suborbital_radar.png and /dev/null differ diff --git a/docs/html/_modules/index.html b/docs/html/_modules/index.html deleted file mode 100644 index c09a3e2..0000000 --- a/docs/html/_modules/index.html +++ /dev/null @@ -1,115 +0,0 @@ - - -
- - -
-"""
-This module contains helper functions for the orbital radar simulator.
-"""
-
-import numpy as np
-
-
-[docs]def db2li(x):
- """
- Conversion from dB to linear.
-
- Parameters
- ----------
- x : float
- Any value or array to be converted from dB to linear unit
- """
- return 10 ** (0.1 * x)
-
-
-[docs]def li2db(x):
- """
- Conversion from linear to dB.
-
- Parameters
- ----------
- x : float
- Any value or array to be converted from linear to dB unit
- """
- return 10 * np.log10(x)
-
-"""
-This module contains the satellite class and functions to calculate the
-along-track and along-range averaging parameters. Main methode and
-definitions based on Lamer et al (2020) and Schirmacher et al. (2023).
-
-Two pre-defined satellites are available: EarthCARE and CloudSat.
-
-**EarthCARE**
-
-- frequency: Kollias et al. (2014), Table 1
-- velocity: Kollias et al. (2014), Eq 4
-- antenna diameter: Kollias et al. (2014), Table 1
-- altitude: Kollias et al. (2014), Table 1
-- pulse length: Kollias et al. (2014), Table 1
-- along track resolution: Kollias et al. (2014), Table 1
-- range resolution: Kollias et al. (2014), Table 1
-- ifov_factor: Kollias et al. (2022), Table 1
-- ifov_scale: based on Tanelli et al. (2008) and as long nothing is reported it is 1 s
-- detection limit: Kollias et al. (2014), Table 1
-- pules repetition frequency (PRF): Kollias et al. (2022), Table 1
-- noise_ze: Kollias et al. (2014), Table 1
-- ze_bins: Hogan et al. (2005),
-- ze_std: Hogan et al. (2005),
-- ze_std_background:
-- vm_bins_broad: Kollias et al. (2022), Figure 7
-- vm_std_broad: Kollias et al. (2022), Figure 7
-- vm_std_broad_background: Kollias et al. (2022)
-
-**CloudSat**
-
-- frequency: Kollias et al. (2014), Table 1
-- velocity: Kollias et al. (2014), Eq 4
-- antenna diameter: Kollias et al. (2014), Table 1
-- altitude: Kollias et al. (2014), Table 1
-- pulse length: Kollias et al. (2014), Table 1
-- along track resolution: Kollias et al. (2014), Table 1
-- range resolution: Kollias et al. (2014), Table 1
-- ifov_factor: Kollias et al. (2022), Table 1 for the Arctic Schirmacher et al. (2023)
-- ifov_scale: Tanelli et al. (2008), "integration accurecy of the pulse is 0.968 s"
-- detection limit: Kollias et al. (2014), Table 1
-- pules repetition frequency (PRF): Kollias et al. (2014, 2022), Table 1
-- noise_ze:
-- ze_bins: Hogan et al. (2005),
-- ze_std: Hogan et al. (2005),
-- ze_std_background:
-- vm_bins_broad: not used
-- vm_std_broad: not used
-- vm_std_broad_background: not used
-
-References
-----------
-Hogan et al. (2005) : https://doi.org/10.1175/JTECH1768.1
-Kollias et al. (2014) : https://doi.org/10.1175/JTECH-D-11-00202.1
-Kollias et al. (2022) : https://doi.org/10.3389/frsen.2022.860284
-Lamer et al. (2020) : https://doi.org/10.5194/amt-13-2363-2020
-Schirmacher et al. (2023) : https://doi.org/10.5194/egusphere-2023-636
-Tanelli et al. (2008) : https://doi.org/10.1109/TGRS.2008.2002030
-"""
-
-from dataclasses import dataclass
-from pathlib import Path
-from typing import List, Union, Optional
-
-import numpy as np
-
-from orbital_radar.helpers import db2li
-from orbital_radar.readers.rangewf import read_range_weighting_function
-
-SPEED_OF_LIGHT = 299792458.0 # unit: m s-1
-RADARS_PREDEFINED = {
- "earthcare": {
- "name": "EarthCARE",
- "frequency": 94.05e9,
- "velocity": 7600,
- "antenna_diameter": 2.5,
- "altitude": 400000,
- "pulse_length": 500,
- "along_track_resolution": 500,
- "range_resolution": 100,
- "ifov_factor": 74.5,
- "ifov_scale": 1,
- "detection_limit": -37,
- "nyquist_velocity": 5.7,
- "pulse_repetition_frequency": 7150,
- "noise_ze": -37.01,
- "ze_bins": [-37, -25, -13],
- "ze_std": [0.5, 0.3, 0.2],
- "ze_std_background": 0.2176,
- "vm_bins_broad": [
- -37,
- -34,
- -31,
- -28,
- -25,
- -22,
- -19,
- -16,
- -13,
- -10,
- -7,
- -4,
- ],
- "vm_std_broad": [
- 3.27,
- 3.12,
- 2.83,
- 2.35,
- 1.63,
- 1.09,
- 0.76,
- 0.59,
- 0.52,
- 0.49,
- 0.48,
- 0.47,
- ],
- "vm_std_broad_background": 1.09,
- },
- "cloudsat": {
- "name": "CloudSat",
- "frequency": 94.05e9,
- "velocity": 6800,
- "antenna_diameter": 1.85,
- "altitude": 705000,
- "pulse_length": 480,
- "along_track_resolution": 1093,
- "range_resolution": 240,
- "ifov_factor": 67,
- "ifov_scale": 0.968,
- "detection_limit": -27,
- "nyquist_velocity": 5.7,
- "pulse_repetition_frequency": 7150,
- "noise_ze": -27.0,
- "ze_bins": [
- -37,
- -34,
- -31,
- -28,
- -25,
- -22,
- -19,
- -16,
- -13,
- -10,
- -7,
- -4,
- ],
- "ze_std": [
- 9.24,
- 4.77,
- 2.54,
- 1.41,
- 0.85,
- 0.56,
- 0.42,
- 0.35,
- 0.32,
- 0.3,
- 0.29,
- 0.28,
- ],
- "ze_std_background": 0.2176,
- "vm_bins_broad": [],
- "vm_std_broad": [],
- "vm_std_broad_background": np.nan,
- },
-}
-
-
-[docs]@dataclass
-class RadarSpec:
- """
- This class contains the satellite parameters.
-
- Units of radar specification
- ----------------------------
- - frequency: radar frequency [Hz]
- - velocity: satellite velocity [m s-1]
- - antenna diameter: radar antenna diameter [m]
- - altitude: satellite altitude [m]
- - pulse length: radar pulse length [m]
- - along track resolution: radar along track resolution [m]
- - range resolution: radar range resolution [m]
- - detection limit: radar detection limit [dBZ]
- - noise_ze: radar noise floor [dBZ]
- - ze_bins: radar Ze lookup table [dBZ]
- - ze_std: radar standard deviation lookup table [dBZ]
- - ze_std_background: radar standard deviation background [dBZ]
- - vm_bins_broad: radar reflectivity bin of vm_std_broad [dBZ]
- - vm_std_broad: Doppler velocity broadening due to platform motion [m s-1]
- - vm_std_broad_background: radar standard deviation background [m s-1]
- - nyquist velocity: radar nyquist velocity [m s-1]
- - pulse repetition frequency: radar pulse repetition frequency [Hz]
- """
-
- name: str
- frequency: float
- velocity: int
- antenna_diameter: float
- altitude: int
- pulse_length: int
- along_track_resolution: int
- range_resolution: int
- ifov_factor: float
- ifov_scale: float
- detection_limit: float
- noise_ze: float
- ze_bins: Union[List[float], np.ndarray]
- ze_std: Union[List[float], np.ndarray]
- ze_std_background: float
- vm_bins_broad: Union[List[float], np.ndarray]
- vm_std_broad: Union[List[float], np.ndarray]
- vm_std_broad_background: float
- nyquist_velocity: float = np.nan
- pulse_repetition_frequency: float = np.nan
-
-
-[docs]class RadarBeam:
- """
- This class manages the satellite specifications from pre-defined or user-
- specified space-borne radars. It also contains transformation functions
- for along-track and along-range averaging.
- """
-
- def __init__(
- self,
- file_earthcare=None,
- sat_name=None,
- nyquist_from_prf=False,
- **sat_params,
- ):
- """
- Initializes the satellite parameters and calculates along-track and
- along-range weighting functions, and the velocity error due to
- satellite velocity.
-
- The function requires along-track and along-range bins.
-
- The following parameters will be derived for later use in the simulator
-
- - instantaneous field of view
- - normalized along-track weighting function
- - along track resolution
- - normalized along-range weighting function
- - range resolution
- - satellite velocity error
-
- Parameters
- ----------
- file_earthcare : str
- path to file containing EarthCARE CPR weighting function. This
- file is used if the satellite name is 'earthcare'.
- satellite_name : str
- name of the satellite, e.g. 'earthcare' or 'cloudsat'
- nqv_from_prf : bool
- if True, the Nyquist velocity is calculated from the pulse
- repetition frequency. If False, the Nyquist velocity must be given
- as a parameter. Default is False.
- **sat_params: keyword arguments to overwrite the predefined satellite
- """
-
- # check if either sat_name or sat_params is given
- if sat_name is None and sat_params is None:
- raise ValueError("Either sat_name or sat_params must be given")
-
- # check if sat_name is valid
- if sat_name is not None and sat_name not in RADARS_PREDEFINED.keys():
- raise ValueError(
- f"Unknown satellite name: {sat_name}. "
- f"Valid names are: {RADARS_PREDEFINED.keys()}"
- )
-
- # check if sat_params are valid
- for key in sat_params.keys():
- if key not in RADARS_PREDEFINED["earthcare"].keys():
- raise ValueError(f"Unknown parameter: {key}")
-
- # check if all keys are given if no satellite name is given
- if sat_name is None and sat_params is not None:
- for key in RADARS_PREDEFINED["earthcare"].keys():
- if key not in sat_params.keys():
- raise ValueError(f"Parameter {key} missing")
-
- # check if file to EarthCARE CPR weighting function exists
- if sat_name == "earthcare" and file_earthcare is not None:
- if not Path(file_earthcare).exists():
- raise ValueError(
- f"EarthCARE CPR weighting function file does not exist: "
- f"{file_earthcare}"
- )
-
- # warn if file for EarthCARE CPR weighting function is not given
- if sat_name == "earthcare" and file_earthcare is None:
- print(
- "Warning: EarthCARE CPR weighting function file is not given. "
- "Gaussian range weighting function will be used instead."
- )
-
- # warn that earthcare weighting function is not used
- if sat_name != "earthcare" and file_earthcare is not None:
- print(
- "Warning: EarthCARE CPR weighting function file is not used "
- "because satellite name is not 'earthcare'"
- )
-
- # set satellite parameters from pre-defined satellites
- if sat_name is not None:
- # update pre-defined satellite parameters with sat_params
- radar_predefined = RADARS_PREDEFINED[sat_name].copy()
- radar_predefined.update(sat_params)
- self.spec = RadarSpec(**radar_predefined)
-
- # set satellite parameters from user-specified satellite
- else:
- self.spec = RadarSpec(**sat_params)
-
- # convert lookup tables to numpy arrays
- self.spec.ze_bins = np.array(self.spec.ze_bins)
- self.spec.ze_std = np.array(self.spec.ze_std)
- self.spec.vm_bins_broad = np.array(self.spec.vm_bins_broad)
- self.spec.vm_std_broad = np.array(self.spec.vm_std_broad)
-
- self.sat_name = sat_name
-
- # add range weighting function file
- self.file_earthcare = file_earthcare
-
- # initialize along-track and along-range averaging parameters
- self.atrack_bins = np.array([])
- self.atrack_weights = np.array([])
- self.range_weights = np.array([])
- self.range_bins = np.array([])
-
- # initialize derived parameters
- self.wavelength = np.nan
- self.ifov = np.nan
- self.theta_along = np.nan
- self.velocity_error = np.nan
-
- # calculate derived parameters
- self.calculate_wavelength()
-
- # calculate Nyquist velocity from pulse repetition frequency
- if nyquist_from_prf:
- print(
- "Nyquist velocity is calculated from pulse repetition frequency."
- )
- self.calculate_nyquist_velocity()
-
- else:
- print("Nyquist velocity parameter is used instead of pulse "
- "repition frequency.")
-
- # show summary of satellite parameters
- self.params
-
- @property
- def params(self):
- """Prints a summary of the satellite parameters"""
-
- print(
- f"Satellite: {self.spec.name}\n"
- f"Frequency: {self.spec.frequency*1e-9} GHz\n"
- f"Velocity: {self.spec.velocity} m s-1\n"
- f"Antenna diameter: {self.spec.antenna_diameter} m\n"
- f"Altitude: {self.spec.altitude} m\n"
- f"Pulse length: {self.spec.pulse_length} m\n"
- f"Horizontal resolution: {self.spec.along_track_resolution} m\n"
- f"Vertical resolution: {self.spec.range_resolution} m\n"
- f"Nyquist velocity: {np.round(self.spec.nyquist_velocity, 2)} m s-1\n"
- f"Pulse repetition frequency: {np.round(self.spec.pulse_repetition_frequency, 0)} Hz\n"
- )
-
-[docs] def calculate_wavelength(self):
- """
- Calculates the radar wavelength from the radar frequency.
-
- Units:
- - frequency: radar frequency [Hz]
- - wavelength: radar wavelength [m]
- - speed of light: speed of light [m s-1]
- """
-
- self.wavelength = SPEED_OF_LIGHT / self.spec.frequency
-
-[docs] def calculate_nyquist_velocity(self):
- """
- Calculates the Nyquist velocity from the pulse repetition frequency
- and the radar wavelength.
- """
-
- self.spec.nyquist_velocity = (
- self.wavelength * self.spec.pulse_repetition_frequency / 4
- )
-
-[docs] def calculate_ifov(self):
- """
- Calculates the instantaneous field of view (IFOV) from the along-track
- averaging parameters.
- """
-
- # constant for ifov calculation
- self.theta_along = (
- self.spec.ifov_factor * self.wavelength
- ) / self.spec.antenna_diameter
-
- # instantaneous field of view
- self.ifov = (
- self.spec.altitude
- * np.tan(np.pi * self.theta_along / 180)
- * self.spec.ifov_scale
- )
-
-[docs] def create_along_track_grid(self, along_track_coords):
- """
- Creates the along-track grid.
-
- The along-track grid is defined from -ifov/2 to ifov/2. The spacing
- is defined by the along-track resolution. The outermost along-track
- bins relative to the line of size always lie within the IFOV.
-
- If the along-track grid is not equidistant, the along-track weighting
- function cannot be calculated.
-
- Parameters
- ----------
- along_track_coords : array
- along-track coordinates of the ground-based radar [m]
- """
-
- assert len(np.unique(np.diff(along_track_coords))) == 1, (
- "Along-track grid is not equidistant. "
- "Along-track weighting function cannot be calculated."
- )
-
- # grid with size of ifov centered around zero
- step = np.diff(along_track_coords)[0]
- self.atrack_bins = np.append(
- np.arange(-step, -self.ifov / 2, -step)[::-1],
- np.arange(0, self.ifov / 2, step),
- )
-
-[docs] def create_along_range_grid(self, range_coords):
- """
- Creates range grid at which range weighting function is evaluated.
-
- The range grid is defined from -pulse_length to pulse_length. The
- spacing is defined by the range resolution of the ground-based radar.
-
- If the range grid is not equidistant, the range weighting function
- cannot be calculated.
-
- Parameters
- ----------
- range_coords : array
- range coordinates of the ground-based radar [m]
- """
-
- assert len(np.unique(np.diff(range_coords))) == 1, (
- "Range grid is not equidistant. "
- "Range weighting function cannot be calculated."
- )
-
- # grid with size of two pulse lengths centered around zero
- step = np.diff(range_coords)[0]
- self.range_bins = np.arange(
- -self.spec.pulse_length,
- self.spec.pulse_length + step,
- step,
- )
-
-[docs] def calculate_along_track(self, along_track_coords):
- """
- Calculates along-track averaging parameters.
-
- Parameters
- ----------
- along_track_coords : array
- along-track coordinates of the ground-based radar [m]
- """
-
- # instantaneous field of view
- self.calculate_ifov()
-
- # calculate along-track grid
- self.create_along_track_grid(along_track_coords=along_track_coords)
-
- # along-track weighting function
- w_at = np.exp(
- -2 * np.log(2) * (self.atrack_bins / (self.ifov / 2)) ** 2
- )
- self.atrack_weights = w_at / np.sum(w_at) # normalization
-
- assert (
- np.sum(self.atrack_weights) - 1 < 1e-10
- ), "Along-track weighting function is not normalized"
-
-[docs] def calculate_velocity_error(self):
- """
- Calculates the velocity error due to satellite velocity.
- """
-
- # velocity error due to satellite velocity
- self.velocity_error = (
- self.spec.velocity / self.spec.altitude
- ) * self.atrack_bins
-
-[docs] def calculate_along_range(self, range_coords):
- """
- Calculates along-range averaging parameters.
-
- Parameters
- ----------
- range_coords : array
- range coordinates of the ground-based radar [m]
- """
-
- self.create_along_range_grid(range_coords=range_coords)
-
- # range weighting function
- if self.sat_name == "earthcare" and self.file_earthcare is not None:
- self.range_weights = (
- self.normalized_range_weighting_function_earthcare()
- )
-
- else:
- self.range_weights = (
- self.normalized_range_weighting_function_default(
- pulse_length=self.spec.pulse_length,
- range_bins=self.range_bins,
- )
- )
-
-[docs] def normalized_range_weighting_function_earthcare(self):
- """
- Prepares EarthCARE range weighting function for along-range averaging.
-
- The high-resolution weighting function is interpolated to the
- range resolution of the ground-based radar.
-
- Returns
- -------
- range_weights : array
- normalized range weighting function
- """
-
- ds_wf = read_range_weighting_function(self.file_earthcare)
-
- # linearize the weighting function
- da_wf = db2li(ds_wf["response"])
-
- # convert from tau factor to range and set height as dimension
- da_wf["height"] = da_wf["tau_factor"] * self.spec.pulse_length
- da_wf = da_wf.swap_dims({"tau_factor": "height"})
-
- # interpolate to range grid of ground-based radar
- da_wf = da_wf.interp(height=self.range_bins, method="linear")
- da_wf = da_wf.fillna(0)
-
- # normalize the linear weighting function
- da_wf /= da_wf.sum()
-
- range_weights = da_wf.values
-
- return range_weights
-
-[docs] @staticmethod
- def normalized_range_weighting_function_default(pulse_length, range_bins):
- """
- Defines the range weighting function for the along-range averaging.
- """
-
- # calculate along-range weighting function
- w_const = -(np.pi**2) / (2.0 * np.log(2) * pulse_length**2)
- range_weights = np.exp(w_const * range_bins**2)
- range_weights = range_weights / np.sum(range_weights) # normalization
-
- return range_weights
-
-[docs] def calculate_weighting_functions(self, along_track_coords, range_coords):
- """
- Calculates the along-track and along-range weighting functions.
-
- Parameters
- ----------
- along_track_coords : array
- along-track coordinates of the ground-based radar [m]
- range_coords : array
- range coordinates of the ground-based radar [m]
- """
-
- # calculate along-track averaging parameters
- self.calculate_along_track(along_track_coords=along_track_coords)
-
- # calculate velocity error due to satellite velocity
- self.calculate_velocity_error()
-
- # calculate along-range averaging parameters
- self.calculate_along_range(range_coords=range_coords)
-
-"""
-This script contains functions to read cloudnet data.
-"""
-
-import os
-from glob import glob
-
-import numpy as np
-import pandas as pd
-import xarray as xr
-
-FILENAMES = {
- "cloudnet_ecmwf": "ecmwf",
- "cloudnet_categorize": "categorize",
-}
-
-
-[docs]def read_cloudnet(
- attenuation_correction_input, date, site_name, path, add_date=True
-):
- """
- Reads Cloudnet data.
-
- The following file naming is expected (e.g. for 2022-02-14 at Mindelo):
- 20220214_mindelo_ecmwf.nc
- 20220214_mindelo_categorize.nc
-
- Parameters
- ----------
- attenuation_correction_input: str
- Cloudnet product to read. Either 'categorize' or 'ecmwf'.
- date: np.datetime64
- Date for which data is read.
- site_name: str
- Name of the site.
- path: str
- Path to the Cloudnet data. The path should contain the year, month, and
- day as subdirectories.
- add_date: bool, optional
- If True, the date is added to the path. Default is True.
-
- Returns
- -------
- ds: xarray.Dataset
- Cloudnet data.
- """
-
- if add_date:
- path = os.path.join(
- path,
- pd.Timestamp(date).strftime(r"%Y"),
- pd.Timestamp(date).strftime(r"%m"),
- pd.Timestamp(date).strftime(r"%d"),
- )
-
- if not os.path.exists(path):
- print(f"Warning: The cloudnet data path {path} does not exist")
- print("Warning: No attenuation correction will be applied")
-
- return None
-
- files = glob(
- os.path.join(path, f"*{FILENAMES[attenuation_correction_input]}.nc")
- )
-
- # return none if no files are found
- if len(files) == 0:
- # print warning
- print(
- f"No {attenuation_correction_input} Cloudnet files found "
- f"for {date} at "
- f"{site_name}"
- )
-
- return None
-
- # warn if more than one file is found
- if len(files) > 1:
- print(
- f"More than one {attenuation_correction_input} Cloudnet file "
- f"found for "
- f"{date} at {site_name}. Reading first file."
- )
-
- file = files[0]
-
- print(f"Reading {attenuation_correction_input} Cloudnet data: {file}")
-
- # model_time unit for older cloudnetpy versions in bad format
- if attenuation_correction_input == "cloudnet_categorize":
- ds = xr.open_dataset(file, decode_times=False)
-
- if (
- ds["model_time"].units == "decimal hours since midnight"
- or ds["model_time"].units == f"hours since {str(date)} +00:00"
- ):
- # model time
- ds = convert_time(
- ds=ds,
- time_variable="model_time",
- base_time=np.datetime64(date),
- factor=60 * 60 * 1e9,
- )
-
- # radar time
- ds = convert_time(
- ds=ds,
- time_variable="time",
- base_time=np.datetime64(date),
- factor=60 * 60 * 1e9,
- )
-
- # make sure that difference between first and last time is more than 12 h
- if (
- ds["model_time"].values[-1] - ds["model_time"].values[0]
- ) < np.timedelta64(12, "h"):
- print(
- f"Warning: The time difference between the first and last time "
- f"step is less than 12 hours for {date} at {site_name}. "
- f"Check if time format is being read correctly."
- )
-
- return None
-
- if (ds["time"].values[-1] - ds["time"].values[0]) < np.timedelta64(
- 12, "h"
- ):
- print(
- f"Warning: The time difference between the first and last time "
- f"step is less than 12 hours for {date} at {site_name}. "
- f"Check if time format is being read correctly."
- )
-
- return None
-
- # problem did not occur for ecmwf data
- else:
-
- ds = xr.open_dataset(file)
-
- return ds
-
-
-[docs]def convert_time(ds, time_variable, base_time, factor=1):
- """
- Convert time in seconds since base_time to datetime64.
-
- Parameters
- ----------
- ds : xarray.Dataset
- Dataset containing the time variable.
- time_variable : str
- Name of the time variable.
- base_time : str
- Base time as string (e.g. "1970-01-01")
- factor : float, optional
- Factor to convert time to nanoseconds. Default is 1.
- """
-
- ds[time_variable] = (ds[time_variable] * factor).astype(
- "timedelta64[ns]"
- ) + np.datetime64(base_time)
-
- return ds
-
-"""
-Reads TOML configuration
-"""
-
-import os
-
-# use tomllib (only for Python >= 3.11) if available, otherwise use toml
-try:
- import tomllib as toml
-
- MODE = "rb"
-except ImportError:
- import toml
-
- MODE = "r"
-
-
-[docs]def read_config(filename):
- """
- Reads user configuration from TOML file.
-
- Parameters
- ----------
- filename: str
- Name of the TOML file
-
- Returns
- -------
- config: dict
- Configuration dictionary
- """
-
- # use filename if environment variable does not exist, otherwise combine
- if os.getenv("ORBITAL_RADAR_CONFIG_PATH") is not None:
- # this uses filename if it is an absolute path, otherwise it uses
- # the path from the environment variable
- filename = os.path.join(
- os.getenv("ORBITAL_RADAR_CONFIG_PATH"), filename
- )
-
- else:
- # check if filename is an absolute path, otherwise use the current
- # working directory
- if not os.path.isabs(filename):
- filename = os.path.join(os.getcwd(), filename)
-
- # make sure that file exists
- if not os.path.isfile(filename):
- raise FileNotFoundError(f"Config file {filename} not found")
-
- with open(filename, MODE) as f:
- config = toml.load(f)
-
- # validate config
- check_config(config)
-
- return config
-
-
-[docs]def check_config(config):
- """
- Check config file for consistency
- """
-
- # validity checks
- # make sure that cloudnet product is either caterogize or ecmwf
- if config["prepare"]["general"]["attenuation_correction_input"] not in [
- "cloudnet_categorize",
- "cloudnet_ecmwf",
- ]:
- raise ValueError(
- "attenuation_correction_input must be either "
- "'cloudnet_categorize' or 'cloudnet_ecmwf'"
- )
-
- # type checks
- # make sure that attenuation correction is boolean
- if not isinstance(
- config["prepare"]["general"]["attenuation_correction"], bool
- ):
- raise ValueError("attenuation_correction must be boolean")
-
-"""
-This script contains all reader functions for the different radar formats.
-These functions are wrapped by the main function, which picks the correct
-reader depending on the radar site.
-
-The final output is always an xarray.Dataset with the two variables radar
-reflectivity "ze" in [mm6 m-3] and "vm" in [m s-1] as a function of range and
-time.
-
-The input Doppler velocity should be negative for downward motion and positive
-for upward motion. This is changed to negative upward and positive downward to
-match spaceborne convention.
-"""
-
-import os
-import os.path
-from glob import glob
-from scipy.interpolate import interp1d
-
-import numpy as np
-import pandas as pd
-import xarray as xr
-
-from orbital_radar.readers.cloudnet import read_cloudnet
-
-
-[docs]class Radar:
- """
- This class selects the reading function for the provided site and
- performs quality-checks of the imported data. The output contains "ze"
- and "vm" variables.
-
- Implemented readers
- -------------------
- bco: Barbados Cloud Observatory, Barbados
- jue: JOYCE, Juelich, Germany
- mag: Magurele, Romania
- min: Mindelo, Cape Verde
- nor: Norunda, Sweden
- nya: Ny-Alesund, Svalbard
- mirac_p5: Polar 5, Mirac radar
- rasta: Falcon, RASTA radar
- arm: ARM sites
- pamtra: PAMTRA simulations
- cloudnet: cloudnet format
- """
-
- def __init__(self, date, site_name, path, input_radar_format) -> None:
- """
- Reads hourly radar data for a specific site and date with these
- standardized output variables:
- - Radar reflectivity (ze) in mm6 m-3
- - Mean Doppler velocity (vm) in m s-1
- - Range as height above NN (range) in m
- - Time (time)
-
- Parameters
- ----------
- date : pd.Timestamp
- Radar data will be read for this day
- site_name : str
- Name of the radar site (e.g. Mindelo)
- path : str
- Directory of radar data. The NetCDF files are expected inside a
- sub-folder structure starting from path: "path/yyyy/mm/dd/*.nc".
- Other options are not implemented yet.
- input_radar_format : str
- Format of input NetCDF radar data (uoc_v0, uoc_v1, uoc_v2,
- uoc_geoms, bco, mirac_p5). Default is cloudnet. Otherwise, these
- formats might be correct: uoc_v0 for nya, uoc_v1 for jue, uoc_v2
- for nor and mag, geoms for min, bco for bco, mirac_p5 for mp5.
- """
-
- self.date = pd.Timestamp(date)
- self.site_name = site_name
- self.path = path
- self.make_date_path()
- self.ds_rad = xr.Dataset()
-
- # defines the reader for each site
- readers = {
- "uoc_v0": self.read_uoc_v0,
- "uoc_v1": self.read_uoc_v1,
- "uoc_v2": self.read_uoc_v2,
- "geoms": self.read_geoms,
- "bco": self.read_bco,
- "mirac_p5": self.read_mirac_p5,
- "cloudnet": self.read_cloudnet,
- "pamtra": self.read_pamtra,
- "rasta": self.read_rasta,
- "arm": self.read_arm,
- }
-
- reader = readers.get(input_radar_format)
- if reader is None:
- raise NotImplementedError(
- f"No reader that handles input format {input_radar_format}. "
- f"Please choose one of {list(readers.keys())}."
- )
- reader()
-
- if self.ds_rad == xr.Dataset():
- print("No radar data found.")
- self.ds_rad = None
-
- else:
- print("Vm sign convention: negative=upward, " "positive=downward")
-
- self.ds_rad["vm"] = -self.ds_rad["vm"]
-
- print(f"Quality checks for {self.site_name} radar data.")
-
- # ensure ze and vm variables exist
- assert "ze" in list(self.ds_rad)
- assert "vm" in list(self.ds_rad)
-
- # ensure same dimension order
- if "height" in list(self.ds_rad.dims):
- dim_order = ["time", "height"]
- else:
- dim_order = ["time", "range"]
- self.ds_rad["ze"] = self.ds_rad.ze.transpose(*dim_order)
- self.ds_rad["vm"] = self.ds_rad.vm.transpose(*dim_order)
-
- # ensure reasonable value ranges
- assert (
- self.ds_rad.ze.isnull().all() or self.ds_rad.ze.min() >= 0
- ), "Ze out of range."
- assert self.ds_rad.ze.isnull().all() or (
- 10 * np.log10(self.ds_rad.ze.max()) < 100
- ), "Ze out of range."
-
- assert (
- self.ds_rad.vm.isnull().all() or self.ds_rad.vm.min() > -80
- ), "Vm values out of range."
- assert (
- self.ds_rad.vm.isnull().all() or self.ds_rad.vm.max() < 80
- ), "Vm values out of range."
-
- # make sure that alt is in the data
- assert "alt" in list(self.ds_rad), "Altitude not found."
-
-[docs] def make_date_path(self):
- """
- Creates path with date structure if it exists. Otherwise, uses regular
- path without date extension.
- """
-
- date_path = os.path.join(
- self.path,
- self.date.strftime(r"%Y"),
- self.date.strftime(r"%m"),
- self.date.strftime(r"%d"),
- )
-
- if os.path.exists(date_path):
- self.path = date_path
-
- elif os.path.exists(self.path):
- pass # use regular path without date extension
-
- else:
- raise FileNotFoundError(
- f"The radar data path {self.path} does not exist"
- )
-
-[docs] def get_all_files(self, pattern):
- """
- Lists all radar files in the directory.
-
- Parameters
- ----------
- pattern : str
- Specific file pattern depending on the product.
-
- Returns
- -------
- list
- list of all files inside the directory
- """
-
- pattern_path = os.path.join(self.path, pattern)
- files = sorted(glob(pattern_path))
-
- if len(files) == 0:
- Warning(f"No files found with pattern: {pattern_path}")
-
- return files
-
-[docs] @staticmethod
- def status_message(i, file, files):
- """
- This message will be printed while reading the radar data.
- """
-
- print(f"Reading radar file {i+1}/{len(files)}: {file}")
-
-[docs] @staticmethod
- def remove_duplicate_times(ds):
- """
- Removes duplicate times.
-
- Parameters
- ----------
- ds : xr.Dataset
- Any data with a "time" coordinate.
- """
-
- _, index = np.unique(ds["time"], return_index=True)
- ds = ds.isel(time=index)
-
- return ds
-
-[docs] def convert_and_sort_time(self, base_time):
- """
- Convert time in seconds since base to np.datetime64 format and sort
- time.
-
- Parameters
- ----------
- base_time : str
- Base time as string (e.g. "1970-01-01")
- """
-
- self.ds_rad["time"] = self.ds_rad["time"].astype(
- "timedelta64[s]"
- ) + np.datetime64(base_time)
-
- self.ds_rad = self.ds_rad.sel(time=np.sort(self.ds_rad.time))
-
- # ensure that time of files matches provided date
- assert np.abs(
- self.date.to_datetime64() - self.ds_rad.time
- ).max() < np.timedelta64(2, "D")
-
-[docs] def read_uoc_v2(self):
- """
- This function reads the radar netCDF files of the RPG w-band radar
- The data are precessed with the Matlab code of the Uuniversity of
- Cologne, UoC. They correspond to the level 2 version of the data
- processing
-
- Units of file:
- ze unit: mm6 m-3
- vm unit: m s-1
-
- Note: fill value -999 in attributes not recognized by xarray.
- """
-
- files = self.get_all_files("*compact_v2.nc")
-
- if len(files) == 0:
- return None
-
- for i, file in enumerate(files):
- self.status_message(i, file, files)
-
- with xr.open_dataset(file, decode_times=False) as ds:
- ds.load()
-
- ds = self.remove_duplicate_times(ds)
-
- # override keeps attributes from the last file opened
- self.ds_rad = xr.merge(
- [ds[["ze", "vm"]], self.ds_rad], combine_attrs="override"
- )
-
- # extract instrument location and altitude
- self.ds_rad["lon"] = ds["lon"]
- self.ds_rad["lat"] = ds["lat"]
- self.ds_rad["alt"] = ds["zsl"]
-
- self.convert_and_sort_time(base_time="2001-01-01")
-
- # replace fill_value by nan
- self.ds_rad = self.ds_rad.where(self.ds_rad != -999)
-
-[docs] def read_uoc_v0(self):
- """
- This function reads the radar netCDF files of the RPG w-band radar
- The data are precessed with the Matlab code of the Uuniversity of
- Cologne, UoC. They correspond to the level 2 version of the data
- processing
-
- Units of file:
- ze unit: dBZ
- vm unit: m s-1
-
- Note: Only one file per day
- """
-
- files = self.get_all_files("*joyrad94_nya_lv1b_*")
-
- if len(files) == 0:
- return None
-
- for i, file in enumerate(files):
- self.status_message(i, file, files)
-
- with xr.open_dataset(file, decode_times=False) as ds:
- ds.load()
-
- ds = ds.rename({"height": "range"})
-
- # round times to full seconds
- ds["time"] = np.around(ds["time"]).astype("int")
-
- ds = self.remove_duplicate_times(ds)
-
- # override keeps attributes from the last file opened
- self.ds_rad = xr.merge(
- [ds[["ze", "vm"]], self.ds_rad], combine_attrs="override"
- )
-
- # extract instrument location and altitude
- self.ds_rad["lon"] = ds["lon"]
- self.ds_rad["lat"] = ds["lat"]
- self.ds_rad["alt"] = ds["instrument_altitude"]
-
- self.convert_and_sort_time(base_time="2001-01-01")
-
- # convert from dB to linear units
- self.ds_rad["ze"] = 10 ** (0.1 * self.ds_rad["ze"])
-
-[docs] def read_uoc_v1(self):
- """
- This function reads the radar netCDF files of the RPG w-band radar
- The data are precessed with the Matlab code of the Uuniversity of
- Cologne, UoC. They correspond to the level 2 version of the data
- processing
-
- Units of file:
- ze unit: mm6 m-3
- vm unit: m s-1
-
- Note: Doppler spectra are not read to improve performance.
- """
-
- files = self.get_all_files("*ZEN_v2.nc")
-
- if len(files) == 0:
- return None
-
- for i, file in enumerate(files):
- self.status_message(i, file, files)
-
- xr_kwds = dict(drop_variables=["sze"], decode_times=False)
- with xr.open_dataset(file, **xr_kwds) as ds:
- ds.load()
-
- ds = self.remove_duplicate_times(ds)
-
- # override keeps attributes from the last file opened
- self.ds_rad = xr.merge(
- [ds[["ze", "vm"]], self.ds_rad], combine_attrs="override"
- )
-
- # extract instrument location and altitude
- self.ds_rad["lon"] = ds["lon"]
- self.ds_rad["lat"] = ds["lat"]
- self.ds_rad["alt"] = ds["zsl"]
-
- self.convert_and_sort_time(base_time="2001-01-01")
-
-[docs] def read_geoms(self):
- """
- This function reads the radar netCDF files of the RPG w-band radar
- The data are precessed with the Matlab code of the Uuniversity of
- Cologne, UoC. They correspond to the level 2 version of the data
- processing
-
- Units of file:
- ze unit: mm6 m-3
- vm unit: m s-1
-
- Note: fill value -999 in attributes not recognized by xarray.
- """
-
- files = self.get_all_files("*groundbased_radar_profiler*")
-
- if len(files) == 0:
- return None
-
- for i, file in enumerate(files):
- self.status_message(i, file, files)
-
- with xr.open_dataset(file, decode_times=False) as ds:
- ds.load()
-
- ds = ds.rename(
- {
- "RANGE": "range",
- "DATETIME": "time",
- "RADAR.REFLECTIVITY.FACTOR": "ze",
- "DOPPLER.VELOCITY_MEAN": "vm",
- }
- )
- ds = self.remove_duplicate_times(ds)
-
- # add range and time as coordinates
- ds.coords["range"] = ds["range"]
- ds.coords["time"] = ds["time"]
-
- # override keeps attributes from the last file opened
- self.ds_rad = xr.merge(
- [ds[["ze", "vm"]], self.ds_rad], combine_attrs="override"
- )
-
- # extract instrument location and altitude
- self.ds_rad["lon"] = ds["LONGITUDE.INSTRUMENT"]
- self.ds_rad["lat"] = ds["LATITUDE.INSTRUMENT"]
- self.ds_rad["alt"] = ds["ALTITUDE.INSTRUMENT"]
-
- self.convert_and_sort_time(base_time="2001-01-01")
-
- # replace fill_value by nan
- self.ds_rad = self.ds_rad.where(self.ds_rad != -999)
-
-[docs] def read_bco(self):
- """
- This function reads the radar netCDF files of the RPG w-band radar
- The data are precessed with the Matlab code of the Uuniversity of
- Cologne, UoC. They correspond to the level 2 version of the data
- processing
-
- Units of file:
- ze unit: dBZ
- vm unit: m s-1
-
- Note: Only one file per day
- """
-
- files = self.get_all_files(f'*{self.date.strftime(r"%Y%m%d")}.nc')
-
- if len(files) == 0:
- return None
-
- for i, file in enumerate(files):
- self.status_message(i, file, files)
-
- with xr.open_dataset(file, decode_times=False) as ds:
- ds.load()
-
- ds = ds.rename({"Ze": "ze", "VEL": "vm"})
-
- ds = self.remove_duplicate_times(ds)
-
- # override keeps attributes from the last file opened
- self.ds_rad = xr.merge(
- [ds[["ze", "vm"]], self.ds_rad], combine_attrs="override"
- )
-
- # extract instrument location and altitude
- self.ds_rad["lon"] = ds["lon"]
- self.ds_rad["lat"] = ds["lat"]
- self.ds_rad["alt"] = np.nan
-
- self.convert_and_sort_time(base_time="1970-01-01")
-
- # convert from dB to linear units
- self.ds_rad["ze"] = 10 ** (0.1 * self.ds_rad["ze"])
-
- # apply dBz threshold
- dbz_threshold = 10 ** (95.5 / 10.0)
- dbz_noise_level = self.ds_rad["range"] ** 2 / dbz_threshold
- dbz_noise_level = xr.DataArray(
- dbz_noise_level,
- dims="range",
- coords={"range": self.ds_rad["range"]},
- )
- self.ds_rad["ze"] = self.ds_rad["ze"].where(
- self.ds_rad["ze"] > dbz_noise_level
- )
- self.ds_rad["vm"] = self.ds_rad["vm"].where(
- self.ds_rad["ze"] > dbz_noise_level
- )
-
-[docs] def read_mirac_p5(self):
- """
- This function reads the radar netCDF files of the RPG W-band radar
- onboard the Polar 5 aircraft.
-
- Units of file:
- ze unit: dBZ
-
- Note: no mean Doppler velocity available.
- """
-
- files = self.get_all_files(f'*{self.date.strftime(r"%Y%m%d")}*.nc')
-
- if len(files) == 0:
- return None
-
- for i, file in enumerate(files):
- self.status_message(i, file, files)
-
- with xr.open_dataset(file, decode_times=False) as ds:
- ds.load()
-
- ds = ds.rename({"Ze": "ze"})
-
- ds = self.remove_duplicate_times(ds)
-
- # override keeps attributes from the last file opened
- self.ds_rad = xr.merge(
- [ds[["ze"]], self.ds_rad], combine_attrs="override"
- )
-
- # extract instrument location and altitude
- self.ds_rad["lon"] = ds["lon"]
- self.ds_rad["lat"] = ds["lat"]
- self.ds_rad["alt"] = ds["alt"]
-
- self.convert_and_sort_time(base_time="2017-01-01")
-
- # replace fill_value by nan
- self.ds_rad = self.ds_rad.where(self.ds_rad != -999)
-
- # add dummy variable for Doppler velocity
- self.ds_rad["vm"] = xr.DataArray(
- np.zeros(self.ds_rad["ze"].shape),
- dims=["time", "height"],
- coords={
- "time": self.ds_rad["time"],
- "height": self.ds_rad["height"],
- },
- )
-
-[docs] def read_cloudnet(self):
- """
- Reads radar reflectivity and Doppler velocity from Cloudnet categorize
- files.
-
- Note: Cloudnet height is already in height above mean sea level.
- """
-
- ds = read_cloudnet(
- attenuation_correction_input="cloudnet_categorize",
- date=self.date,
- site_name=self.site_name,
- path=self.path,
- add_date=False,
- )
-
- ds = ds.rename({"Z": "ze", "v": "vm"})
- ds = self.remove_duplicate_times(ds)
-
- self.ds_rad = ds[["ze", "vm"]]
-
- # extract instrument location and altitude
- self.ds_rad["lon"] = ds["longitude"]
- self.ds_rad["lat"] = ds["latitude"]
- self.ds_rad["alt"] = ds["altitude"]
-
- # convert from dB to linear units
- self.ds_rad["ze"] = 10 ** (0.1 * self.ds_rad["ze"])
-
- # set inf ze to nan
- self.ds_rad["ze"] = self.ds_rad["ze"].where(
- self.ds_rad["ze"] != np.inf
- )
-
- # set very low vm to nan
- self.ds_rad["vm"] = self.ds_rad["vm"].where(self.ds_rad["vm"] > -500)
-
- self.ds_rad["vm"] = self.ds_rad["vm"].where(self.ds_rad["vm"] < 500)
-
-[docs] def read_pamtra(self):
- """
- Reads PAMTRA simulation for a point location as a function of time.
-
- Attenuation: The output radar reflectivity contains attenuation for
- bottom-up view (groundbased radar), and additionally one radar
- reflectivity without attenuation.
-
- Convention of output:
- ze: radar reflectivity without attenuation
- ze_top_down: radar reflectivity with attenuation for top-down view
- ze_bottom_up: radar reflectivity with attenuation for bottom-up view
-
- Units of file:
- ze unit: dBZ
- vm unit: m s-1
- """
-
- files = self.get_all_files(f'*{self.date.strftime(r"%Y%m%d")}*v1.nc')
-
- if len(files) == 0:
- files = self.get_all_files(f'*{self.date.strftime(r"%Y%m%d")}*v0.nc')
-
- if len(files) == 0:
- return None
-
- for i, file in enumerate(files):
- self.status_message(i, file, files)
-
- with xr.open_dataset(file) as ds:
- ds.load()
-
- # currently, only 94 GHz simulations are supported
- assert ds.grid_y.size == 1
- ds = ds.isel(grid_y=0)
-
- # change heightbins to actual height values
- assert (ds.height.diff("grid_x") == 0).all()
- ds["height"] = ds["height"].isel(grid_x=0)
- ds = ds.swap_dims({"heightbins": "height"})
-
- # rename variables
- ds = ds.rename(
- {
- "Ze": "ze",
- "Radar_MeanDopplerVel": "vm",
- "datatime": "time",
- "longitude": "lon",
- "latitude": "lat",
- }
- )
-
- assert ds.frequency == 94
- assert ds.radar_polarisation.size == 1
- assert ds.radar_peak_number.size == 1
-
- ds["ze"] = ds["ze"].isel(
- frequency=0, radar_polarisation=0, radar_peak_number=0
- )
- ds["vm"] = ds["vm"].isel(
- frequency=0, radar_polarisation=0, radar_peak_number=0
- )
-
- ds = ds.swap_dims({"grid_x": "time"})
- ds = ds.reset_coords()
-
- # check if attenuation correction was performed by pamtra
- props = {
- p.split(": ")[0]: p.split(": ")[1]
- for p in ds.attrs["properties"]
- .replace("'", "")[1:-1]
- .split(", ")
- }
-
- # two-way attenuation handling
- da_att = 2 * (
- ds.Attenuation_Hydrometeors.isel(frequency=0)
- + ds.Attenuation_Atmosphere.isel(frequency=0)
- )
- da_att_bu = da_att.cumsum("height")
- da_att_td = (
- da_att.sel(height=np.flip(ds.height))
- .cumsum("height")
- .sel(height=ds.height)
- )
-
- # radar reflectivity contains no attenuation
- if props["radar_attenuation"] == "disabled":
- pass
-
- # radar reflectivity contains attenuation for top-down view
- elif props["radar_attenuation"] == "top-down":
- ds["ze"] = ds["ze"] + da_att_td
-
- # radar reflectivity contains attenuation for bottom-up view
- elif props["radar_attenuation"] == "bottom-up":
- ds["ze"] = ds["ze"] + da_att_bu
-
- else:
- raise ValueError(
- f"Attenuation correction {props['radar_attenuation']} not "
- "supported."
- )
-
- ds["ze_bottom_up"] = ds["ze"] - da_att_bu
- ds["ze_top_down"] = ds["ze"] - da_att_td
-
- # override keeps attributes from the last file opened
- self.ds_rad = xr.merge(
- [ds[["ze", "vm", "ze_bottom_up", "ze_top_down"]], self.ds_rad],
- combine_attrs="override",
- )
-
- # add longitude and latitude
- assert (ds["lon"].diff("time") == 0).all()
- assert (ds["lat"].diff("time") == 0).all()
-
- self.ds_rad["lon"] = ds["lon"].isel(time=0).reset_coords(drop=True)
- self.ds_rad["lat"] = ds["lat"].isel(time=0).reset_coords(drop=True)
-
- if "alt" not in list(self.ds_rad):
- print("No altitude found in PAMTRA file. Setting alt to 0 m.")
- self.ds_rad["alt"] = 0
-
- # convert from dB to linear units
- self.ds_rad["ze"] = 10 ** (0.1 * self.ds_rad["ze"])
- self.ds_rad["ze_bottom_up"] = 10 ** (0.1 * self.ds_rad["ze_bottom_up"])
- self.ds_rad["ze_top_down"] = 10 ** (0.1 * self.ds_rad["ze_top_down"])
-
-[docs] def read_rasta(self):
- """
- This function reads the radar netCDF files of the airborne RASTA radar.
- The time-dependent height is interpolated onto a regular height grid.
- No correction for gas attenuation is applied (gaseous_twowayatt).
- The horizontal aircraft speed is also read.
-
- The following two flags are applied to ze and vm:
-
- First flag array: flag (1 and 2 are removed)
- -4: nadir not available
- -3: Last gates not valid
- -2: First gates not valid
- -1: beyond maximum range
- 0: no cloud
- 1: cloud or precipitation
- 2: possible cloud
- 3: ground echo
- 4: ghost ground echo (downward domain)
- 5: ghost ground echo (upward domain)
- 6: underground signal
- 7: underground noise
- 8: Z can be interpolated
-
- Second flag array: flag_Z_interpolated (2 and 3 are removed)
- 0: no interpolation
- 1: interpolated but data available
- 2: interpolated and no data available
- 3: ghost ground echo (upward) is interpolated
-
- Units of file:
- ze unit: dBZ
- vm unit: m s-1
- ac_speed unit: m s-1
-
- Note: only the vertical-pointing antennas are used.
- """
-
- files = self.get_all_files(f'*{self.date.strftime(r"%Y%m%d")}*.nc')
-
- if len(files) == 0:
- return None
-
- for i, file in enumerate(files):
- self.status_message(i, file, files)
-
- with xr.open_dataset(file) as ds:
- ds.load()
-
- ds = self.remove_duplicate_times(ds)
-
- # filter data
- keep_data = (
- (ds.flag != 1)
- & (ds.flag != 2)
- & (ds.flag_Z_interpolated != 2)
- & (ds.flag_Z_interpolated != 3)
- )
- ds["Z_interpolated"] = ds["Z_interpolated"].where(keep_data)
- ds["Vz"] = ds["Vz"].where(keep_data)
-
- # change doppler velocity from positive upward to negative upward
- ds["Vz"] = -ds["Vz"]
-
- # rename variables
- ds = ds.rename(
- {
- "longitude": "lon",
- "latitude": "lat",
- "height": "height_index", # this is not actual height
- "aircraft_vh": "ac_speed",
- "altitude": "alt",
- }
- )
-
- # interpolate onto regular height grid
- ds.coords["height"] = np.arange(-2000, 15000, 60)
- ze_height = np.zeros((len(ds["time"]), len(ds["height"])))
- vm_height = np.zeros((len(ds["time"]), len(ds["height"])))
- for i in range(len(ds.time)):
-
- # interpolate ze in linear space
- f_ze = interp1d(
- ds["height_2D"].isel(time=i).values * 1e3,
- 10 ** (0.1 * ds["Z_interpolated"].isel(time=i).values),
- kind="linear",
- fill_value=np.nan,
- bounds_error=False,
- )
- ze_height[i, :] = f_ze(ds["height"].values)
-
- # interpolate vm
- f_vm = interp1d(
- ds["height_2D"].isel(time=i).values * 1e3,
- ds["Vz"].isel(time=i).values,
- kind="linear",
- fill_value=np.nan,
- bounds_error=False,
- )
- vm_height[i, :] = f_vm(ds["height"].values)
-
- # add variables to dataset and name them ze and vm
- ds["ze"] = xr.DataArray(
- ze_height,
- dims=["time", "height"],
- coords={"time": ds["time"], "height": ds["height"]},
- )
- ds["vm"] = xr.DataArray(
- vm_height,
- dims=["time", "height"],
- coords={"time": ds["time"], "height": ds["height"]},
- )
-
- # override keeps attributes from the last file opened
- self.ds_rad = xr.merge(
- [
- ds[["ze", "vm", "lon", "lat", "alt", "ac_speed"]],
- self.ds_rad,
- ],
- combine_attrs="override",
- )
-
- self.convert_and_sort_time(base_time=self.date)
-
-[docs] def read_arm(self):
- """
- Reader for ARM radar data. The reader uses the best estimate of the
- radar reflectivity.
-
- Units of file:
- ze unit: dBZ
- vm unit: m s-1
- """
-
- files = self.get_all_files(f'*{self.date.strftime(r"%Y%m%d")}*.cdf')
-
- if len(files) == 0:
- return None
-
- for i, file in enumerate(files):
- self.status_message(i, file, files)
-
- with xr.open_dataset(file) as ds:
- ds.load()
-
- ds = self.remove_duplicate_times(ds)
-
- ds = ds.rename(
- {
- "mean_doppler_velocity": "vm",
- "reflectivity_best_estimate": "ze",
- }
- )
-
- # override keeps attributes from the last file opened
- self.ds_rad = xr.merge(
- [ds[["ze", "vm"]], self.ds_rad], combine_attrs="override"
- )
-
- # add longitude and latitude
- self.ds_rad["lon"] = ds["lon"]
- self.ds_rad["lat"] = ds["lat"]
- self.ds_rad["alt"] = ds["alt"]
-
- self.ds_rad["ze"] = 10 ** (0.1 * self.ds_rad["ze"])
-
-"""
-Reads weighting function from EarthCARE CPR.
-"""
-
-import numpy as np
-import xarray as xr
-
-
-[docs]def read_range_weighting_function(file):
- """
- Reads EarthCARE CPR range weighting function. The pulse length factor
- is reversed to match the sign convention of the groundbased radar.
-
- Parameters
- ----------
- file : str
- Path to file containing weighting function
-
- Returns
- -------
- wf : xarray.Dataset
- Weighting function
- """
-
- wf = np.loadtxt(file)
-
- ds_wf = xr.Dataset()
- ds_wf.coords["tau_factor"] = -wf[:, 0]
- ds_wf["response"] = ("tau_factor", wf[:, 1])
-
- ds_wf.tau_factor.attrs = dict(
- long_name="pulse length factor",
- short_name="tau_factor",
- description="multiply by tau to get height relative to pulse center",
- )
-
- ds_wf.response.attrs = dict(
- long_name="weighting function",
- short_name="response",
- units="dB",
- description="weighting function for CPR",
- )
-
- return ds_wf
-
-"""
-This script reads the output of the simulator
-"""
-
-import xarray as xr
-
-
-[docs]def read_spaceview(file):
- """
- Reads the output of the simulator.
-
- Parameters
- ----------
- file: str
- Path to the NetCDF file.
-
- Returns
- -------
- ds: xarray.Dataset
- Dataset containing the output of the simulator.
- """
-
- with xr.open_dataset(file) as ds:
- ds.load()
-
- return ds
-
-"""
-Runs the orbital radar simulator.
-"""
-
-import numpy as np
-import xarray as xr
-from scipy import stats
-from scipy.interpolate import interp1d
-
-from orbital_radar.helpers import db2li, li2db
-from orbital_radar.plotting.curtains import plot_along_track
-from orbital_radar.plotting.histogram import plot_histogram
-from orbital_radar.plotting.scatter import plot_scatter
-from orbital_radar.radarspec import RadarBeam
-from orbital_radar.version import __version__
-
-
-[docs]class Simulator:
- """
- Runs the orbital radar simulator.
- """
-
- def __init__(
- self,
- sat_name,
- file_earthcare=None,
- nyquist_from_prf=False,
- ms_threshold=12,
- ms_threshold_integral=41,
- **radar_specs,
- ):
- """
- Initialize the simulator class. The input dataset will be extended with
- intermediate simulation steps.
-
- To run the simulator:
- - initialize the class
- - run the transform method
-
- Requirement:
- - all nan values should be filled with zeros
- - along-track and height coordinates should be monotonic increasing,
- evenly spaced, and multiples of the satellite resolution to ensure
- that each satellite bin contains the same number of high-resolution
- bins (e.g. 0, 100, 200, 300... --> 0, 500, 1000)
-
- Parameters
- ----------
- sat_name : str
- Name of the satellite. This is used to get the radar specifications
- from the config file.
- nyquist_from_prf : bool
- If True, the Nyquist velocity is calculated from the pulse
- repetition frequency (PRF).
- file_earthcare : str
- path to file containing EarthCARE CPR weighting function. This
- file is used if the satellite name is 'earthcare'.
- radar_specs : dict
- Dictionary with radar specifications.
- """
-
- # initialize class variables
- self.sat_name = sat_name
- self.radar_specs = radar_specs
- self.ds = xr.Dataset()
-
- self.ms_threshold = ms_threshold
- self.ms_threshold_integral = ms_threshold_integral
-
- # get radar specifications
- self.beam = RadarBeam(
- sat_name=self.sat_name,
- file_earthcare=file_earthcare,
- nyquist_from_prf=nyquist_from_prf,
- **self.radar_specs,
- )
-
-[docs] def check_input_dataset(self):
- """
- Check user input for consistency.
- """
-
- # make sure that ds is an xarray dataset
- assert isinstance(self.ds, xr.Dataset)
-
- # make sure that dimensions are named correctly
- assert self.ds["ze"].dims == ("along_track", "height")
-
- # make sure that variables exist
- assert "ze" in self.ds
-
- # make sure that radar reflectivity is in linear units
- assert self.ds["ze"].min() >= 0
-
- # check if satellite resolution is a multiple of the range resolution
- assert (
- self.beam.spec.range_resolution
- % self.ds["height"].diff("height")[0]
- == 0
- ), (
- f"Height resolution is not a multiple of the satellite resolution: "
- f"{self.ds['height'].diff('height')[0]} m"
- )
-
- # check if range resolution is smaller or equal to satellite resolution
- assert (
- self.ds["height"].diff("height")[0]
- <= self.beam.spec.range_resolution
- ), (
- f"Range resolution is larger than the satellite resolution: "
- f"{self.ds['height'].diff('height')[0]} m"
- )
-
-[docs] def prepare_input_dataset(self):
- """
- Prepares input dataset for computations. This only includes replacing
- nan values by zero in both ze and vm.
- """
-
- self.ds = self.ds.fillna(0)
-
- # make sure that ze has no nan values
- assert not self.ds["ze"].isnull().any()
-
- # make sure that vm has no nan values
- assert not self.ds["vm"].isnull().any()
-
-[docs] def calculate_along_track_sat_bin_edges(self):
- """
- Calculate the bin edges of the along-track satellite grid. This way
- is equivalent to height.
- """
-
- along_track_sat_bin_edges = np.append(
- self.ds["along_track_sat"]
- - self.beam.spec.along_track_resolution / 2,
- self.ds["along_track_sat"][-1]
- + self.beam.spec.along_track_resolution / 2,
- )
-
- return along_track_sat_bin_edges
-
-[docs] def calculate_height_sat_bin_edges(self):
- """
- Calculate the bin edges of the height satellite grid. This way is
- equivalent to along-track.
- """
-
- height_sat_bin_edges = np.append(
- self.ds["height_sat"] - self.beam.spec.range_resolution / 2,
- self.ds["height_sat"][-1] + self.beam.spec.range_resolution / 2,
- )
-
- return height_sat_bin_edges
-
-[docs] def convolve_along_track(self):
- """
- Calculates the along-track convolution from the input suborbital data
- using the along-track weighting function of the spaceborne radar.
- Further, the function calculates the error due to satellite velocity.
- """
-
- ds = self.ds[["ze", "vm"]].copy()
- ds = ds.fillna(0)
-
- # create dask array by splitting height to reduce memory when expanding
- # window dimension
- ds = ds.chunk(chunks={"height": 50})
-
- # create new dataset with window dimension stacked as third dimension
- weight = xr.DataArray(self.beam.atrack_weights, dims=["window"])
- ds = ds.rolling(along_track=len(weight), center=True).construct(
- "window"
- )
-
- # add error due to satellite motion to each doppler velocity window
- da_vel_error = xr.DataArray(self.beam.velocity_error, dims=["window"])
- ds["vm_err"] = ds["vm"] + da_vel_error
-
- # calculate along-track convolution and convert dask to xarray
- self.ds["ze_acon"] = ds["ze"].dot(weight).compute()
- self.ds["vm_acon"] = ds["vm"].dot(weight).compute()
- self.ds["vm_acon_err"] = ds["vm_err"].dot(weight).compute()
-
-[docs] def integrate_along_track(self):
- """
- Integrates the along-track convoluted data to profiles, which represent
- the satellite's footprint. The along-track integration is given by the
- along track resolution satellite variable.
-
- Along track bins of satellite refer to center of field of view.
- """
-
- # create bin edges for along-track integration
- # the last radar bin is created only if it is included in the input
- # grid. the same convention is applied to the height grid
- along_track_sat_edges = np.arange(
- self.ds["along_track"][0],
- self.ds["along_track"][-1],
- self.beam.spec.along_track_resolution,
- )
-
- # create bin centers for along-track integration
- along_track_bin_center = (
- along_track_sat_edges[:-1] + along_track_sat_edges[1:]
- ) / 2
-
- # along-track integration onto satellite along-track grid
- kwds = {
- "group": "along_track",
- "bins": along_track_sat_edges,
- "labels": along_track_bin_center,
- }
-
- self.ds["ze_aconint"] = self.ds["ze_acon"].groupby_bins(**kwds).mean()
- self.ds["vm_aconint"] = self.ds["vm_acon"].groupby_bins(**kwds).mean()
- self.ds["vm_aconint_err"] = (
- self.ds["vm_acon_err"].groupby_bins(**kwds).mean()
- )
-
- # rename along-track dimension
- self.ds = self.ds.rename({"along_track_bins": "along_track_sat"})
-
-[docs] def convolve_height(self):
- """
- Convolution of the along-track integrated data with the range
- weighting function of the spaceborne radar.
- """
-
- # this defines the weights for the range gates that will be averaged
- da_range_weights = xr.DataArray(
- data=self.beam.range_weights,
- coords={"pulse_center_distance": self.beam.range_bins},
- dims=["pulse_center_distance"],
- name="range_weights",
- )
-
- # this defines the rolling window interval, i.e., the factor by which
- # the along-track resolution is reduced
- stride = int(
- self.beam.spec.range_resolution
- / self.ds["height"].diff("height")[0]
- )
-
- # create new dimension with all range gates that contribute to the
- # along-height convolution at each range gate
- ds = (
- self.ds[["ze_aconint", "vm_aconint", "vm_aconint_err"]]
- .rolling(height=len(da_range_weights), center=True)
- .construct("pulse_center_distance", stride=stride)
- )
- ds = ds.rename({"height": "height_sat"})
-
- # calculate along-range convolution
- self.ds["ze_sat"] = ds["ze_aconint"].dot(da_range_weights)
- self.ds["vm_sat"] = ds["vm_aconint"].dot(da_range_weights)
- self.ds["vm_sat_vel"] = ds["vm_aconint_err"].dot(da_range_weights)
-
-[docs] def calculate_nubf(self):
- r"""
- Calculates the non-uniform beam filling from the standard
- deviation of Ze within the radar volume.
-
- Currently, the flag is expressed as standard deviation only and no
- threshold to indicate high standard deviation is applied. This may
- be added in the future to reduce the output file size.
- """
-
- # create labels for each satellite pixel (height_sat x along_track_sat)
- labels = np.arange(
- self.ds["height_sat"].size * self.ds["along_track_sat"].size
- ).reshape(self.ds["ze_sat"].shape)
-
- # calculate bin edges of satellite grid
- along_track_sat_bin_edges = self.calculate_along_track_sat_bin_edges()
- height_sat_bin_edges = self.calculate_height_sat_bin_edges()
-
- # assign satellite pixel label to each input pixel of suborbital radar
- ix_along_track = np.searchsorted(
- along_track_sat_bin_edges[:-1],
- self.ds["along_track"].values,
- side="left",
- )
- ix_height = np.searchsorted(
- height_sat_bin_edges[:-1],
- self.ds["height"].values,
- side="left",
- )
-
- # adjust index at first position
- ix_height[ix_height == 0] = 1
- ix_along_track[ix_along_track == 0] = 1
-
- ix_height = ix_height - 1
- ix_along_track = ix_along_track - 1
-
- ix_height, ix_along_track = np.meshgrid(
- ix_along_track, ix_height, indexing="ij"
- )
- labels_input_grid = labels[ix_height, ix_along_track]
-
- # calculate standard deviation of ze on input grid in linear units
- # this is done with pandas for faster performance
- df_ze = (
- self.ds["ze"]
- .stack({"x": ("along_track", "height")})
- .to_dataframe()
- )
- df_ze["labels"] = labels_input_grid.flatten()
- df_nubf = li2db(df_ze["ze"]).groupby(df_ze["labels"]).std()
-
- # convert to xarray
- self.ds["nubf"] = xr.DataArray(
- df_nubf.values.reshape(labels.shape),
- dims=["along_track_sat", "height_sat"],
- coords={
- "along_track_sat": self.ds["along_track_sat"],
- "height_sat": self.ds["height_sat"],
- },
- )
-
-[docs] def calculate_nubf_flag(self, threshold=1):
- """
- Calculate non-uniform beam filling flag. The flag is 1 if the
- non-uniform beam filling is higher than a certain threshold, and 0
- otherwise.
-
- Parameters
- ----------
- threshold : float
- Threshold for non-uniform beam filling. The default is 1 dB.
- """
-
- self.ds["nubf_flag"] = (self.ds["nubf"] > threshold).astype("int")
-
-[docs] def calculate_vm_bias(self):
- """
- Calculate the satellite Doppler velocity bias between the estimate
- with and without satellite motion error.
- """
-
- self.ds["vm_bias"] = self.ds["vm_sat"] - self.ds["vm_sat_vel"]
-
-[docs] def calculate_vm_bias_flag(self, threshold=0.5):
- """
- Calculate the satellite Doppler velocity bias flag. The flag is 1 if
- the absolute satellite Doppler velocity bias is higher than 0.5 m s-1,
- and 0 otherwise.
-
- Parameters
- ----------
- threshold : float
- Threshold for satellite Doppler velocity bias. The default is 0.5
- m s-1.
- """
-
- self.ds["vm_bias_flag"] = (
- np.abs(self.ds["vm_bias"]) > threshold
- ).astype("int")
-
-[docs] def calculate_signal_fraction(self):
- """
- Calculates the fraction of bins that contain a ze signal above the
- detection limit of the spaceborne radar. The fraction is 1 if all
- bins contain signal, and 0 if no bins contain signal.
- """
-
- # calculate bin edges of satellite grid
- along_track_sat_bin_edges = self.calculate_along_track_sat_bin_edges()
- height_sat_bin_edges = self.calculate_height_sat_bin_edges()
-
- # calculate fraction of bins that contain signal
- self.ds["signal_fraction"] = self.ds["ze"] > 0
-
- self.ds["signal_fraction"] = (
- self.ds["signal_fraction"]
- .groupby_bins(
- "along_track",
- bins=along_track_sat_bin_edges,
- labels=self.ds["along_track_sat"].values,
- )
- .mean()
- ).rename({"along_track_bins": "along_track_sat"})
-
- self.ds["signal_fraction"] = (
- self.ds["signal_fraction"]
- .groupby_bins(
- "height",
- bins=height_sat_bin_edges,
- labels=self.ds["height_sat"].values,
- )
- .mean()
- ).rename({"height_bins": "height_sat"})
-
-[docs] def calculate_ms_flag(self):
- """
- Calculates the multiple scattering flag. The flag is 1 if multiple
- scattering occurs, and 0 if no multiple scattering occurs.
-
- The flag is calculated from the radar reflectivity of the spaceborne
- radar from these steps:
- - Calculate integral of radar reflectivity above a certain threshold
- from the top of the atmosphere (TOA) down to the surface.
- - Multiple scattering occurs if the integral reaches a critical value
- at a certain height.
- """
-
- # get ze above multiple scattering threshold
- da_ze_above_threshold = self.ds["ze_sat"] > db2li(self.ms_threshold)
-
- # integrate from top to bottom (this requires sel)
- self.ds["ms_flag"] = (
- self.ds["ze_sat"]
- .where(da_ze_above_threshold)
- .sel(height_sat=self.ds["height_sat"][::-1])
- .cumsum("height_sat")
- .sel(height_sat=self.ds["height_sat"])
- ) * self.beam.spec.range_resolution
-
- # convert to dBZ and calculate flag
- self.ds["ms_flag"] = (
- li2db(self.ds["ms_flag"]) > self.ms_threshold_integral
- ).astype("int")
-
- # set flag to 0 below the surface
- subsurface = self.ds["height_sat"].where(
- self.ds["height_sat"] < 0, drop=True
- )
- self.ds["ms_flag"].loc[{"height_sat": subsurface}] = 0
-
-[docs] def apply_detection_limit(self, var_ze, var_other: list):
- """
- Applies the detection limit of the spaceborne radar to the along-height
- convoluted data.
-
- Parameters
- ----------
- var_ze : xr.DataArray
- Radar reflectivity reflectivity variable name
- var_other : list
- List with other variables that should be masked with the radar
- reflectivity detection limit.
- """
-
- # apply radar reflectivity detection limit
- ix = self.ds[var_ze] > db2li(self.beam.spec.detection_limit)
-
- for var in var_other:
- self.ds[var] = self.ds[var].where(ix)
-
-[docs] @staticmethod
- def add_noise(x, x_std, noise):
- """
- Equation to calculate the noise from values without noise, the
- uncertainty of the values, and random noise.
-
- Parameters
- ----------
- x : xr.DataArray
- Radar reflectivity [dB] or doppler velocity [m s-1]
- x_std : float
- Radar reflectivity uncertainty [dB] or doppler velocity uncertainty
- [m s-1]
- noise : np.array
- Random noise with shape equal to x.
-
- Returns
- -------
- x_noise : xr.DataArray
- Radar reflectivity with added noise [dB]
- """
-
- x_noise = x + x_std * noise
-
- return x_noise
-
-[docs] def calculate_vm_std_nubf(self):
- """
- Calculate outstanding error in correcting Mean Doppler Velocity biases
- caused by non-uniform beam filling
-
- The calculation is based on the horizontal radar reflectivity gradient
- at the input resolution. The gradient is calculated along the along-
- track direction. The gradient is then averaged onto the satellite grid
- and the absolute value is taken. The error is then calculated as 0.15
- times the gradient divided by 3 dBZ/km. Bins without reflectivity are
- set to 0 before averaging onto satellite resolution.
- """
-
- # calculate bin edges of satellite grid
- along_track_sat_bin_edges = self.calculate_along_track_sat_bin_edges()
- height_sat_bin_edges = self.calculate_height_sat_bin_edges()
-
- # calculate horizontal ze gradient on input grid in dBZ/km
- ze_gradient = li2db(self.ds["ze"]).diff("along_track") / (
- self.ds["along_track"].diff("along_track").mean() / 1000
- )
-
- # fill nan values with zero
- ze_gradient = ze_gradient.fillna(0)
- ze_gradient = (
- ze_gradient.groupby_bins(
- "along_track",
- bins=along_track_sat_bin_edges,
- labels=self.ds["along_track_sat"].values,
- )
- .mean()
- .groupby_bins(
- "height",
- bins=height_sat_bin_edges,
- labels=self.ds["height_sat"].values,
- )
- .mean()
- )
- ze_gradient = ze_gradient.rename(
- {
- "along_track_bins": "along_track_sat",
- "height_bins": "height_sat",
- }
- )
-
- # calculate absolute value of ze gradient
- ze_gradient = np.abs(ze_gradient)
-
- vm_std_nubf = 0.15 * ze_gradient / 3
-
- return vm_std_nubf
-
-[docs] def vm_uncertainty_equation(self, vm_std_broad, vm_std_nubf):
- """
- Calculates the total Doppler velocity uncertainty based on the
- broadening Doppler velocity uncertainty and the non-uniform beam
- filling Doppler velocity uncertainty.
-
- Based on Equation (4) in
-
- Parameters
- ----------
- vm_std_broad : float, np.array
- Doppler velocity uncertainty due to broadening [m s-1]
- vm_std_nubf : float, np.array
- Doppler velocity uncertainty due to non-uniform beam filling
- [m s-1]
- """
-
- vm_std = np.sqrt(vm_std_broad**2 + vm_std_nubf**2)
-
- return vm_std
-
-[docs] def calculate_ze_noise(self):
- """
- Adds noise to satellite radar reflectivity based on the pre-defined
- lookup table with noise values for different radar reflectivity bins.
- Empty bins are filled with noise according to the noise level.
- """
-
- # generate noise
- lower = -3
- upper = 3
- mu = 0
- sigma = 1
- n = np.prod(self.ds["ze_sat"].shape)
- noise = np.array(
- stats.truncnorm.rvs(
- a=(lower - mu) / sigma,
- b=(upper - mu) / sigma,
- loc=mu,
- scale=sigma,
- size=n,
- )
- ).reshape(self.ds["ze_sat"].shape)
-
- # interpolates discrete standard deviations
- f = interp1d(
- self.beam.spec.ze_bins,
- self.beam.spec.ze_std,
- kind="linear",
- fill_value="extrapolate", # type: ignore
- )
-
- # apply noise
- self.ds["ze_sat_noise"] = db2li(
- self.add_noise(
- x=li2db(self.ds["ze_sat"]),
- x_std=f(li2db(self.ds["ze_sat"])),
- noise=noise,
- )
- )
-
-[docs] def calculate_vm_noise(self):
- """
- Adds noise to satellite Doppler velocity based on the pre-defined
- lookup table with noise values for different radar reflectivity bins.
-
- Note:
- The noise is added to the satellite Doppler velocity with the satellite
- motion error.
- """
-
- lower = -self.beam.spec.nyquist_velocity
- upper = self.beam.spec.nyquist_velocity
- mu = 0
- sigma = 1
- n = np.prod(self.ds["vm_sat_vel"].shape)
- noise = np.array(
- stats.truncnorm.rvs(
- a=(lower - mu) / sigma,
- b=(upper - mu) / sigma,
- loc=mu,
- scale=sigma,
- size=n,
- )
- ).reshape(self.ds["vm_sat_vel"].shape)
-
- # interpolates discrete standard deviations
- f = interp1d(
- self.beam.spec.vm_bins_broad,
- self.beam.spec.vm_std_broad,
- kind="linear",
- fill_value="extrapolate", # type: ignore
- )
-
- # calculate uncertainty due to broadening
- vm_std_broad = f(li2db(self.ds["ze_sat"]))
-
- # calculate uncertainty due to non-uniform beam filling
- vm_std_nubf = self.calculate_vm_std_nubf()
-
- # calculate total Doppler velocity uncertainty
- vm_std = self.vm_uncertainty_equation(
- vm_std_broad=vm_std_broad,
- vm_std_nubf=vm_std_nubf,
- )
-
- # add Doppler velocity error
- self.ds["vm_sat_noise"] = self.add_noise(
- x=self.ds["vm_sat_vel"], x_std=vm_std, noise=noise
- )
-
-[docs] def fold_vm(self):
- """
- Doppler velocity folding correction.
- """
-
- # keys: nyquist velocity offset added for folding
- # values: velocity bin edges as multiple of the nyquist velocity
- folding_dct = {
- -2: [1, 3],
- -4: [3, 5],
- -6: [5, 7],
- -8: [7, 9],
- 2: [-3, -1],
- 4: [-5, -3],
- 6: [-7, -5],
- 8: [-9, -7],
- }
-
- # data array with folded velocity
- self.ds["vm_sat_folded"] = self.ds["vm_sat_noise"].copy()
-
- # folding flag
- self.ds["folding_flag"] = xr.zeros_like(self.ds["vm_sat_noise"])
-
- for offset, (v0, v1) in folding_dct.items():
- # convert factors to doppler velocity
- v0 = v0 * self.beam.spec.nyquist_velocity
- v1 = v1 * self.beam.spec.nyquist_velocity
- vm_offset = offset * self.beam.spec.nyquist_velocity
-
- # this is true if folding is applied
- in_interval = (self.ds["vm_sat_folded"] >= v0) & (
- self.ds["vm_sat_folded"] < v1
- )
-
- # assign folding factor to flag
- self.ds["folding_flag"] = xr.where(
- in_interval,
- 1,
- self.ds["folding_flag"],
- )
-
- # fold velocity within the given interval
- self.ds["vm_sat_folded"] = xr.where(
- in_interval,
- self.ds["vm_sat_folded"] + vm_offset,
- self.ds["vm_sat_folded"],
- )
-
- # ensure that doppler velocity is within the nyquist velocity
- assert (
- self.ds["vm_sat_folded"].min() >= -self.beam.spec.nyquist_velocity
- ), (
- f"Velocity values below the nyquist velocity: "
- f'{self.ds["vm_sat_folded"].min()}'
- )
-
- assert (
- self.ds["vm_sat_folded"].max() <= self.beam.spec.nyquist_velocity
- ), (
- f"Velocity values above the nyquist velocity: "
- f'{self.ds["vm_sat_folded"].max()}'
- )
-
-[docs] def add_attributes(self):
- """
- Adds attributes to the variables of the dataset
- """
-
- # overwrite attributes of ze and vm inputs
- self.ds["ze"].attrs = dict(
- standard_name="radar_reflectivity_factor",
- long_name="Radar reflectivity factor of input",
- units="mm6 m-3",
- description="Radar reflectivity factor of input",
- )
-
- self.ds["vm"].attrs = dict(
- standard_name="mean_doppler_velocity",
- long_name="Mean Doppler velocity of input",
- units="m s-1",
- description="Mean Doppler velocity of input",
- )
-
- # add attributes to dimensions
- self.ds["along_track"].attrs = dict(
- standard_name="along_track",
- long_name="Along-track",
- units="m",
- description="Along-track distance",
- )
-
- self.ds["height"].attrs = dict(
- standard_name="height",
- long_name="height",
- units="m",
- description="Height of bin in meters above mean sea level",
- )
-
- self.ds["along_track_sat"].attrs = dict(
- standard_name="along_track",
- long_name="Along-track",
- units="m",
- description="Along-track distance at satellite resolution",
- )
-
- self.ds["height_sat"].attrs = dict(
- standard_name="height",
- long_name="height",
- units="m",
- description="Height of bin in meters above mean sea level at "
- "satellite resolution",
- )
-
- # add attributes to variables
- self.ds["nubf"].attrs = dict(
- standard_name="non_uniform_beam_filling",
- long_name="Non-uniform beam filling",
- units="dBZ",
- description="Non-uniform beam filling calculated as the standard "
- "deviation of radar reflectivity in linear units of the input "
- "data.",
- )
-
- self.ds["nubf_flag"].attrs = dict(
- standard_name="non_uniform_beam_filling_flag",
- long_name="Non-uniform beam filling flag",
- description="Non-uniform beam filling flag. 1 means non-uniform "
- "beam filling is higher than 1 dB, 0 means non-uniform beam "
- "filling is lower than 1 dB.",
- )
-
- self.ds["signal_fraction"].attrs = dict(
- standard_name="signal_fraction",
- long_name="Fraction of bins that contain signal",
- description="Fraction of bins that contain signal. 1 means all "
- "bins contain signal, 0 means no bins contain signal.",
- )
-
- self.ds["ms_flag"].attrs = dict(
- standard_name="multiple_scattering",
- long_name="Multiple scattering flag",
- description="Multiple scattering flag. 1 means multiple "
- "scattering occurs, 0 means no multiple scattering occurs. "
- "This flag only makes sense for airborne observations. "
- "Groundbased observations likely underestimate the occurrence of "
- "multiple scattering due to rain attenuation.",
- )
-
- self.ds["folding_flag"].attrs = dict(
- standard_name="folding_flag",
- long_name="Folding flag",
- description="Folding flag. 1 means velocity is folded, 0 means "
- "velocity is not folded.",
- )
-
- self.ds["ze_acon"].attrs = dict(
- standard_name="radar_reflectivity_factor",
- long_name="Convolved radar reflectivity factor",
- units="mm6 m-3",
- description="Convolved radar reflectivity factor",
- )
-
- self.ds["vm_acon"].attrs = dict(
- standard_name="mean_doppler_velocity",
- long_name="Convolved mean Doppler velocity",
- units="m s-1",
- description="Convolved mean Doppler velocity",
- )
-
- self.ds["vm_acon_err"].attrs = dict(
- standard_name="mean_doppler_velocity",
- long_name="Convolved mean Doppler velocity with satellite motion "
- "error",
- units="m s-1",
- description="Convolved mean Doppler velocity with satellite "
- "motion error",
- )
-
- self.ds["ze_aconint"].attrs = dict(
- standard_name="radar_reflectivity_factor",
- long_name="Convolved and integrated radar reflectivity factor",
- units="mm6 m-3",
- description="Convolved and integrated radar reflectivity factor",
- )
-
- self.ds["vm_aconint"].attrs = dict(
- standard_name="mean_doppler_velocity",
- long_name="Convolved and integrated mean Doppler velocity",
- units="m s-1",
- description="Convolved and integrated mean Doppler velocity",
- )
-
- self.ds["vm_aconint_err"].attrs = dict(
- standard_name="mean_doppler_velocity",
- long_name="Convolved and integrated mean Doppler velocity with "
- "satellite motion error",
- units="m s-1",
- description="Convolved and integrated mean Doppler velocity with "
- "satellite motion error",
- )
-
- self.ds["ze_sat"].attrs = dict(
- standard_name="radar_reflectivity_factor",
- long_name="Convolved and integrated radar reflectivity factor",
- units="mm6 m-3",
- description="Convolved and integrated radar reflectivity factor"
- "along height and along track",
- )
-
- self.ds["vm_sat"].attrs = dict(
- standard_name="mean_doppler_velocity",
- long_name="Convolved and integrated mean Doppler velocity",
- units="m s-1",
- description="Convolved and integrated mean Doppler velocity"
- "along height and along track",
- )
-
- self.ds["vm_sat_vel"].attrs = dict(
- standard_name="mean_doppler_velocity",
- long_name="Convolved and integrated mean Doppler velocity with "
- "satellite motion error",
- units="m s-1",
- description="Convolved and integrated mean Doppler velocity with "
- "satellite motion error along height and along track",
- )
-
- self.ds["vm_bias"].attrs = dict(
- standard_name="mean_doppler_velocity_bias",
- long_name="Doppler velocity bias",
- units="m s-1",
- description="Doppler velocity bias between the estimate with and "
- "without satellite motion error. Higher biases occur under higher "
- "non-uniform beam filling.",
- )
-
- self.ds["ze_sat_noise"].attrs = dict(
- standard_name="radar_reflectivity_factor",
- long_name="Convolved and integrated radar reflectivity factor "
- "with noise",
- units="mm6 m-3",
- description="Convolved and integrated radar reflectivity factor "
- "with noise",
- )
-
- self.ds["vm_sat_noise"].attrs = dict(
- standard_name="mean_doppler_velocity",
- long_name="Convolved and integrated mean Doppler velocity with "
- "noise and satellite motion error",
- units="m s-1",
- description="Convolved and integrated mean Doppler velocity with "
- "noise and satellite motion error",
- )
-
- self.ds["vm_sat_folded"].attrs = dict(
- standard_name="mean_doppler_velocity",
- long_name="Doppler velocity with noise, satellite motion error, "
- "and folding",
- units="m s-1",
- description="Doppler velocity with noise, satellite motion "
- "error, and folding",
- )
-
- # time encoding
- self.ds["time"].encoding = dict(
- units="seconds since 1970-01-01 00:00:00",
- calendar="gregorian",
- )
- self.ds["time"].attrs = dict(
- standard_name="time",
- long_name="Time",
- )
-
- # add variables about satellite
- self.ds["sat_ifov"] = xr.DataArray(
- self.beam.ifov,
- attrs=dict(
- standard_name="sat_ifov",
- long_name="Satellite instantaneous field of view",
- units="m",
- description="Satellite instantaneous field of view",
- ),
- )
-
- self.ds["sat_range_resolution"] = xr.DataArray(
- self.beam.spec.range_resolution,
- attrs=dict(
- standard_name="sat_range_resolution",
- long_name="Satellite range resolution",
- units="m",
- description="Satellite range resolution",
- ),
- )
-
- self.ds["sat_along_track_resolution"] = xr.DataArray(
- self.beam.spec.along_track_resolution,
- attrs=dict(
- standard_name="sat_along_track_resolution",
- long_name="Satellite along-track resolution",
- units="m",
- description="Satellite along-track resolution",
- ),
- )
-
- # global attributes
- self.ds.attrs["title"] = (
- f"{self.beam.spec.name} simulated from "
- f"suborbital observations with orbital-radar {__version__}"
- )
- self.ds.attrs["created"] = str(np.datetime64("now"))
- self.ds.attrs["description"] = (
- "Simulated spaceborne radar reflectivity and Doppler velocity "
- "from suborbital radar data. The forward simulation "
- "follows Kollias et al. (2014) and Lamer et al. (2020)"
- )
-
-[docs] def plot(self, **kwds):
- """
- Along-track plot of the simulated radar reflectivity and Doppler
- velocity.
- """
-
- fig = plot_along_track(ds=self.ds, **kwds)
-
- return fig
-
-[docs] def plot_histogram(self, **kwds):
- """
- Histogram plot of the simulated radar reflectivity and Doppler
- velocity.
- """
-
- fig = plot_histogram(ds=self.ds, **kwds)
-
- return fig
-
-[docs] def plot_scatter(self, **kwds):
- """
- Scatter plot between satellite data and suborbital data.
- """
-
- fig = plot_scatter(ds=self.ds, **kwds)
-
- return fig
-
-[docs] def transform(self, ds):
- """
- Runs the entire simulator.
-
- Parameters
- ----------
- ds : xarray.Dataset
- Data from suborbital radar interpolated to "along_track" [m] and
- "height" [m] coordinates. The dataset must contain the following
- variables:
- Radar reflectivity "ze" [mm6 m-3],
- Doppler velocity "vm" [m s-1].
- Both variables should have no nan values. Any nan's should be
- filled with zeros.
- """
-
- # add input dataset to class
- self.ds = ds
-
- # check input dataset for consistency
- print("Check input dataset")
- self.check_input_dataset()
-
- # prepare input dataset for computations
- print("Prepare input dataset")
- self.prepare_input_dataset()
-
- # compute weighting functions
- print("Compute weighting functions")
- self.beam.calculate_weighting_functions(
- range_coords=self.ds["height"],
- along_track_coords=self.ds["along_track"],
- )
-
- # detection limit
- print("Apply detection limit to input data")
- self.apply_detection_limit(var_ze="ze", var_other=["ze", "vm"])
-
- # transformations to spaceborne radar
- print("Convolve along track")
- self.convolve_along_track()
-
- print("Integrate along track")
- self.integrate_along_track()
-
- print("Convolve height")
- self.convolve_height()
-
- # detection limit
- print("Apply detection limit on satellite view")
- self.apply_detection_limit(
- var_ze="ze_sat", var_other=["ze_sat", "vm_sat", "vm_sat_vel"]
- )
-
- # noise
- print("Calculate Ze noise")
- self.calculate_ze_noise()
-
- print("Calculate Vm noise")
- self.calculate_vm_noise()
-
- # doppler velocity folding
- print("Fold Vm")
- self.fold_vm()
-
- # non-uniform beam filling
- print("Calculate non-uniform beam filling")
- self.calculate_nubf()
-
- # non-uniform beam filling flag
- print("Calculate non-uniform beam filling flag")
- self.calculate_nubf_flag()
-
- # doppler velocity bias
- print("Calculate Doppler velocity bias")
- self.calculate_vm_bias()
-
- # doppler velocity bias flag
- print("Calculate Doppler velocity bias flag")
- self.calculate_vm_bias_flag()
-
- # multiple scattering flag
- print("Calculate multiple scattering flag")
- self.calculate_ms_flag()
-
- # signal fraction
- print("Calculate signal fraction")
- self.calculate_signal_fraction()
-
- # set attributes
- print("Add attributes")
- self.add_attributes()
-
-"""
-This module contains the OrbitalRadar class that runs the simulator for
-suborbital radar data. It is a subclass of the Simulator class.
-
-Difference between ground-based and airborne radar geometry:
-- along-track coordinate from mean wind for ground based and mean flight vel.
-from airborne radar
-- no ground echo added to airborne radar
-- range grid of airborne radar assumed to be height above mean sea level and
-height above ground for groundbased
-- no attenuation correction for airborne radar
-- lat/lon coordinates included as input to airborne radar
-"""
-
-import os
-
-import numpy as np
-import pandas as pd
-import xarray as xr
-
-from orbital_radar.helpers import db2li, li2db
-from orbital_radar.radarspec import RadarBeam
-from orbital_radar.readers.cloudnet import read_cloudnet
-from orbital_radar.readers.config import read_config
-from orbital_radar.readers.radar import Radar
-from orbital_radar.simulator import Simulator
-from orbital_radar.version import __version__
-from orbital_radar.writers.spaceview import write_spaceview
-
-
-[docs]class Suborbital(Simulator):
- """
- Run the simulator for suborbital radar data.
- """
-
- # list of all suborbital radar locations
- names = {
- "groundbased": [
- "bco",
- "jue",
- "nor",
- "mag",
- "min",
- "nya",
- "arm",
- "pamtra",
- ],
- "airborne": [
- "mp5",
- "rasta",
- ],
- }
-
- def __init__(
- self, geometry, name, config_file, suborbital_radar, input_radar_format
- ):
- """
- Initialize the simulator for suborbital radar data.
-
- Parameters
- ----------
- geometry : str
- Observation geometry of radar (groundbased or airborne).
- name : str
- Name of the suborbital radar (abbreviated).
- config_file : str
- Path to the configuration file that contains the site-dependent
- parameters and directory paths.
- suborbital_radar : str
- Name of the suborbital radar (abbreviated).
- input_radar_format : str
- Format of the input radar data (e.g. cloudnet).
- """
-
- # make sure that geometry is valid
- if geometry not in self.names.keys():
- raise ValueError(
- f"Geometry {geometry} not implemented. Choose from "
- f"{list(self.names.keys())}"
- )
-
- # check if site is in list of implemented sites
- if name not in self.names[geometry]:
- raise ValueError(
- f"Site {name} not implemented. Choose from "
- f"{self.names[geometry]}"
- )
-
- # check if config file is provided and exists
- if config_file is None:
- raise ValueError("No configuration file provided")
-
- if not os.path.isfile(
- os.path.join(os.environ["ORBITAL_RADAR_CONFIG_PATH"], config_file)
- ):
- raise FileNotFoundError(
- f"Configuration file {config_file} not found"
- )
-
- # set class attributes
- self.geometry = geometry
- self.name = name
- self.suborbital_radar = suborbital_radar
- self.input_radar_format = input_radar_format
-
- # attributes that will be derived
- self.is_sea_level = False
-
- # read configuration file
- self.config = read_config(config_file)
-
- # check if output path exists
- assert os.path.exists(
- self.config["paths"][self.name]["output"]
- ), f"Output path {self.config['paths'][self.name]['output']} does not exist"
-
- self.path_out = self.config["paths"][self.name]["output"]
- self.frequency = self.config["suborbital_radar"][
- self.suborbital_radar
- ]["frequency"]
-
- # preparation of input radar data
- self.prepare = self.config["prepare"]["general"]
- self.prepare.update(self.config["prepare"][self.geometry])
-
- # overview of simulation settings
- self.summary
-
- # initialize simulator class with spaceborne radar settings
- super().__init__(
- sat_name=self.config["spaceborne_radar"]["sat_name"],
- file_earthcare=self.config["spaceborne_radar"]["file_earthcare"],
- nyquist_from_prf=self.config["spaceborne_radar"]["nyquist_from_prf"],
- ms_threshold=self.config["spaceborne_radar"]["ms_threshold"],
- ms_threshold_integral=self.config["spaceborne_radar"][
- "ms_threshold_integral"
- ],
- **self.config["spaceborne_radar"]["radar_specs"],
- )
-
- @property
- def summary(self):
- """
- Prints short summary of simulator settings.
- """
-
- print(f"Site: {self.name}")
- print("\n")
-
- print("Directory paths:")
- print(f"Input: {self.config['paths'][self.name]['radar']}")
- print(f"Output: {self.config['paths'][self.name]['output']}")
- print("\n")
-
- if self.geometry == "groundbased":
- print("Groundbased data is prepared with:")
- print(f"Mean wind: {self.prepare['mean_wind']} m/s")
-
- if self.geometry == "airborne":
- print("Airborne data is prepared with:")
- print(
- f"Mean flight velocity: "
- f"{self.prepare['mean_flight_velocity']} m/s"
- )
-
- print(f"Height min: {self.prepare['height_min']} m")
- print(f"Height max: {self.prepare['height_max']} m")
- print(f"Height res: {self.prepare['height_res']} m")
- print("\n")
-
-[docs] @staticmethod
- def prepare_dates(start_date, end_date):
- """
- Creates a date array from start to end date.
-
- Parameters
- ----------
- start_date : np.datetime64
- Start date.
- end_date : np.datetime64
- End date.
-
- Returns
- -------
- dates: pd.DatetimeIndex
- Date range from start to end date.
- """
-
- # check if start date is before end date
- if start_date > end_date:
- raise ValueError("Start date must be before end date")
-
- dates = pd.date_range(start_date, end_date)
-
- return dates
-
-[docs] def convert_frequency(self, ds):
- """
- Convert frequency from 35 to 94 GHz.
-
- The conversion is based on Kollias et al. (2019)
- (doi: https://doi.org/10.5194/amt-12-4949-2019)
-
- Parameters
- ----------
- ds : xarray.Dataset
- Data with "ze" variable in mm6/mm3. Ze was measured at 35 GHz.
-
- Returns
- -------
- ds : xarray.Dataset
- Data with converted "ze" variable. Ze is now transformed to 94 GHz.
- """
-
- # keep only reflectivities below 30 dBZ
- ds["ze"] = ds["ze"].where(ds["ze"] < db2li(30))
-
- a = -16.8251
- b = 8.4923
- ds["ze"] = db2li(
- li2db(ds["ze"]) - 10**a * (li2db(ds["ze"]) + 100) ** b
- )
-
- # set negative ze to zero
- ds["ze"] = ds["ze"].where(ds["ze"] > 0.0, 0.0)
-
- return ds
-
-[docs] def correct_dielectric_constant(self, ds):
- r"""
- Apply correction for dielectric constant assumed in Ze calculation
- of suborbital radar to match the dielectric constant of the
- spaceborne radar.
-
- Correction equation with :math:`K_g` and :math:`K_s` as dielectric
- constants of the suborbital and spaceborne radar, respectively:
-
- .. math::
- Z_e = 10 \log_{10} \left( \frac{K_s}{K_g} \right) + Z_e
-
- Parameters
- ----------
- ds : xarray.Dataset
- Data with "ze" variable.
- """
-
- correction = (
- self.config["spaceborne_radar"]["k2"]
- / self.config["suborbital_radar"][self.suborbital_radar]["k2"]
- )
-
- ds["ze"] = db2li(li2db(ds["ze"]) + 10 * np.log10(correction))
-
- return ds
-
-[docs] def add_vmze_attrs(self, ds):
- """
- Adds attributes to Doppler velocity and radar reflectivity variables.
-
- Parameters
- ----------
- ds : xarray.Dataset
- Data with "ze" and "vm" variables.
-
- Returns
- -------
- ds : xarray.Dataset
- Data with added attributes.
- """
-
- ds["ze"].attrs = dict(
- units="mm6 m-3",
- standard_name="radar_reflectivity",
- long_name="Radar reflectivity",
- description="Radar reflectivity",
- )
-
- ds["vm"].attrs = dict(
- units="m s-1",
- standard_name="Doppler_velocity",
- long_name="Doppler velocity",
- description="Doppler velocity",
- )
-
- return ds
-
-[docs] def check_is_sea_level(self, ds):
- """
- Check if input radar range/height grid is defined with respect to
- ground level or sea level. The input to the simulator should be wrt.
- sea level.
-
- Parameters
- ----------
- ds : xr.Dataset
- Input radar data
- """
-
- if "height" in list(ds.coords.keys()):
- self.is_sea_level = True
-
- else:
- self.is_sea_level = False
-
-[docs] def range_to_height(self, ds):
- """
- Convert range coordinate to height coordinate by adding the station
- height above mean sea level to the range coordinate.
-
- The altitude is pre-defined for each station in the configuration file.
-
- Parameters
- ----------
- ds : xarray.Dataset
- Data with "range" coordinate.
- """
-
- ds["height"] = ds["range"] + ds.alt.item()
-
- # swap range with height
- ds = ds.swap_dims({"range": "height"})
-
- # drop range coordinate
- ds = ds.reset_coords(drop=True)
-
- return ds
-
-[docs] def create_along_track(self, ds):
- """
- Creates along-track coordinates from time coordinates.
-
- Parameters
- ----------
- ds : xarray.Dataset
- Data with "time" and "height" coordinates.
-
- Returns
- -------
- ds : xarray.Dataset
- Data with "along_track" coordinate.
- """
-
- if self.geometry == "groundbased":
- print("Using mean wind for along-track coordinates")
- v = self.prepare["mean_wind"]
-
- elif self.geometry == "airborne" and "ac_speed" in list(ds):
- print("Using flight velocity for along-track coordinates")
- v = ds.ac_speed.values
-
- elif self.geometry == "airborne" and "ac_speed" not in list(ds):
- print("Using mean flight velocity for along-track coordinates")
- v = self.prepare["mean_flight_velocity"]
-
- else:
- raise ValueError(
- f"Geometry {self.geometry} not implemented. Choose from "
- f"{list(self.names.keys())}"
- )
-
- # calculate the along-track distance
- dt = ds.time.diff("time") / np.timedelta64(1, "s")
- dt = xr.align(ds.time, dt, join="outer")[1].fillna(0) # start is dt=0
- arr_along_track = np.cumsum(v * dt)
-
- da_along_track = xr.DataArray(
- arr_along_track, dims="time", coords=[ds.time], name="along_track"
- )
- da_along_track.attrs = dict(
- standard_name="along_track_distance",
- long_name="Along track distance",
- units="m",
- description="Distance along track of the suborbital radar",
- )
-
- # swap from time to along track
- ds = ds.assign_coords(along_track=da_along_track)
- ds = ds.swap_dims({"time": "along_track"})
-
- # add time as variable
- ds = ds.reset_coords()
-
- return ds
-
-[docs] def create_regular_height(self):
- """
- Creates regular height coordinate for suborbital radar.
-
- Returns
- -------
- xarray.DataArray
- Regular height coordinate for suborbital radar.
- """
-
- height_regular = np.arange(
- self.prepare["height_min"],
- self.prepare["height_max"],
- self.prepare["height_res"],
- )
-
- da_height_regular = xr.DataArray(
- height_regular, dims="height", coords=[height_regular]
- )
- da_height_regular.attrs = dict(
- units="m",
- standard_name="height",
- long_name="Height of radar bin above sea level",
- description="Height of radar bin above sea level",
- )
-
- return da_height_regular
-
-[docs] def create_regular_along_track(self, ds):
- """
- Creates regular along-track coordinate for suborbital radar.
-
- Parameters
- ----------
- ds : xarray.Dataset
- Data with "along_track" coordinate.
-
- Returns
- -------
- xarray.DataArray
- Regular along-track coordinate for suborbital radar.
- """
-
- along_track_res = np.round(
- ds.along_track.diff("along_track").median().item()
- )
- along_track_max = ds.along_track.max().item()
-
- along_track_regular = np.arange(
- 0,
- along_track_max,
- along_track_res,
- )
-
- da_along_track_regular = xr.DataArray(
- along_track_regular,
- dims="along_track",
- coords=[along_track_regular],
- )
- da_along_track_regular.attrs = ds.along_track.attrs
-
- return da_along_track_regular
-
-[docs] def interpolate_to_regular_grid(self, ds):
- """
- Interpolates radar data to regular grid in along-track and height.
-
- Parameters
- ----------
- ds : xarray.Dataset
- Data with "time" and "height" coordinates.
-
- Returns
- -------
- ds : xarray.Dataset
- Data with interpolated "along_track" and "height" coordinates.
- """
-
- da_height_regular = self.create_regular_height()
- da_along_track_regular = self.create_regular_along_track(ds=ds)
-
- # interpolation along-track and height
- # workaround for time: convert to seconds since start, then
- # interpolate, then convert back to datetime
- da_time = ds["time"]
- ds = ds.interp(
- along_track=da_along_track_regular,
- height=da_height_regular,
- method="nearest",
- )
- # get nearest time for each regular along track grid point
- ds.coords["time"] = (
- ("along_track"),
- da_time.sel(along_track=ds.along_track, method="nearest").values,
- )
-
- # add attributes
- ds = self.add_vmze_attrs(ds)
-
- return ds
-
-[docs] def add_ground_echo(self, ds):
- """
- Calculates artificial ground echo inside ground-based radar range
- grid. The values are chosen such that the final ground echo after
- the along-range convolution is equal to the ground echo of the
- satellite. The pulse length used here is not the same as the pulse
- length of the satellite.
-
- Parameters
- ----------
- ds : xarray.Dataset
- Data with "ze" and "vm" variables.
-
- Returns
- -------
- ds : xarray.Dataset
- Data with added ground echo.
- """
-
- assert len(np.unique(np.diff(ds.height))) == 1, (
- "Height grid is not equidistant. "
- "Range weighting function cannot be calculated."
- )
-
- # grid with size of two pulse lengths centered around zero
- height_bins = np.arange(
- -self.prepare["ground_echo_pulse_length"],
- self.prepare["ground_echo_pulse_length"]
- + self.prepare["height_res"],
- self.prepare["height_res"],
- )
-
- # calculate range weighting function
- weights = RadarBeam.normalized_range_weighting_function_default(
- pulse_length=self.prepare["ground_echo_pulse_length"],
- range_bins=height_bins,
- )
-
- ground_echo = weights * db2li(self.prepare["ground_echo_ze_max"])
-
- # add ground echo to dataset shifted by one height bin to have maximum
- # below zero
- # get closest height bin to ground
- idx = (np.abs(ds.height - ds.alt.item())).argmin()
- base = ds.height[idx].item()
-
- # insert half of the calculated ground echo and shift maximum below the
- # surface
- ground_echo = ground_echo[int(len(ground_echo) / 2) :]
- height_bins = (
- base
- + height_bins[int(len(height_bins) / 2) :]
- - self.prepare["height_res"]
- )
-
- # add ground echo to dataset (first fill nan values with zero in this
- # height interval)
- ds["ze"].loc[{"height": height_bins}] = (
- ds["ze"].loc[{"height": height_bins}].fillna(0)
- )
- ds["ze"].loc[{"height": height_bins}] += ground_echo
-
- return ds
-
-[docs] def add_groundbased_variables(self):
- """
- Add variables specific to groundbased simulator to the dataset, i.e.,
- the mean horizontal wind.
- """
-
- self.ds["mean_wind"] = xr.DataArray(
- self.prepare["mean_wind"],
- attrs=dict(
- standard_name="v_hor",
- long_name="Mean horizontal wind",
- units="m s-1",
- description="Mean horizontal wind",
- ),
- )
-
-[docs] def add_airborne_variables(self):
- """
- Add variables specific to airborne simulator to the dataset, i.e.,
- the mean flight velocity.
- """
-
- self.ds["mean_flight_velocity"] = xr.DataArray(
- self.prepare["mean_flight_velocity"],
- attrs=dict(
- standard_name="v_hor",
- long_name="Mean flight velocity",
- units="m s-1",
- description="Mean flight velocity",
- ),
- )
-
-[docs] def to_netcdf(self, date):
- """
- Writes dataset to netcdf file. Note that not all variables are stored.
-
- Parameters
- ----------
- date : np.datetime64
- Date of the simulation. Used to create the filename.
- """
-
- output_variables = [
- "sat_ifov",
- "sat_range_resolution",
- "sat_along_track_resolution",
- "ze",
- "vm",
- "ze_sat",
- "vm_sat",
- "vm_sat_vel",
- "ze_sat_noise",
- "vm_sat_noise",
- "vm_sat_folded",
- "nubf_flag",
- "ms_flag",
- "folding_flag",
- ]
-
- if self.geometry == "groundbased":
- output_variables += ["mean_wind"]
-
- if self.geometry == "airborne":
- output_variables += ["mean_flight_velocity"]
-
- # name of output nc file
- filename = (
- "_".join(
- [
- "ora",
- __version__,
- self.config["spaceborne_radar"]["sat_name"],
- "l1",
- self.geometry,
- self.name,
- self.suborbital_radar,
- pd.Timestamp(date).strftime("%Y%m%d") + "T000000",
- pd.Timestamp(date).strftime("%Y%m%d") + "T235959",
- ]
- )
- + ".nc"
- )
-
- filename = os.path.join(
- self.path_out,
- filename,
- )
-
- write_spaceview(ds=self.ds[output_variables], filename=filename)
-
-[docs] def add_attenuation(self, ds, da_gas_atten):
- """
- Add attenuation to dataset.
-
- Parameters
- ----------
- ds : xarray.Dataset
- Data with "ze" variable. Unit: mm6 m-3
- da_gas_atten : xarray.DataArray
- Interpolated gas attenuation data on the same grid as ds.
- Unit: dBZ.
-
- Returns
- -------
- ds : xarray.Dataset
- Data with added attenuation.
- """
-
- # add attenuation in dB and convert back to
- ds["ze"] = db2li(li2db(ds["ze"]) + da_gas_atten)
-
- return ds
-
-[docs] def attenuation_correction(self, ds, ds_cloudnet):
- """
- Gas attenuation correction based on Cloudnet.
-
- Cloudnet contains gas attenuation (gas_atten) as a function of 137
- ERA5 levels. The height of each level varies with time. Therefore,
- the height is first interpolated onto the height grid of the radar
- data. Then, the gas attenuation is interpolated onto the time and
- height grid of the radar data. Finally, the gas attenuation is added
- to the radar reflectivity.
-
- There are major differences between the cloudnet_ecmwf and
- cloudnet_categorize attenuation products:
- - ecmwf height is time-dependent, categorize height is not
- - ecmwf height is wrt ground, categorize height is wrt mean sea level
- - ecmwf variable is named "radar_gas_atten", categorize variable is
- named "gas_atten"
- - ecmwf is calculated for both frequencies, categorize only for 94 GHz
-
- Parameters
- ----------
- ds : xarray.Dataset
- Data with "ze" variable. Unit: mm6 m-3
- ds_cloudnet : xarray.Dataset
- Cloudnet data with "gas_atten" variable. Unit: dBZ
-
- Returns
- -------
- ds : xarray.Dataset
- Data with added attenuation.
- """
-
- # select 94 GHz frequency
- if "frequency" in list(ds_cloudnet.dims):
- ds_cloudnet = ds_cloudnet.sel(frequency=94, method="nearest")
-
- if self.prepare["attenuation_correction_input"] == "cloudnet_ecmwf":
- lst_da_gas_atten = []
- for time in ds_cloudnet.time:
-
- # interpolate gas attenuation to radar height grid
- ds_cloudnet_t = ds_cloudnet.sel(time=time).swap_dims(
- {"level": "height"}
- )
-
- da_cloudnet_gas_atten_t = ds_cloudnet_t.gas_atten.interp(
- height=ds.height, method="linear"
- )
-
- lst_da_gas_atten.append(da_cloudnet_gas_atten_t)
-
- # merge all time steps
- da_gas_atten = xr.concat(lst_da_gas_atten, dim="time")
-
- # drop levels
- da_gas_atten = da_gas_atten.reset_coords(drop=True)
-
- elif (
- self.prepare["attenuation_correction_input"]
- == "cloudnet_categorize"
- ):
- # rename to match ecmwf variable
- ds_cloudnet = ds_cloudnet.rename({"radar_gas_atten": "gas_atten"})
-
- # interpolate to radar height grid
- da_gas_atten = ds_cloudnet.gas_atten.interp(
- height=ds.height, method="linear"
- )
-
- else:
- raise ValueError(
- f"Attenuation correction input "
- f"{self.prepare['attenuation_correction_input']} not "
- f"implemented"
- )
-
- # interpolate to radar time grid and extrapolate if needed
- da_gas_atten = da_gas_atten.interp(
- time=ds.time, method="linear", kwargs={"fill_value": "extrapolate"}
- )
-
- # ensures every time step contains attenuation in range column
- has_atten = ~da_gas_atten.isnull().all("height")
- assert (
- has_atten.all()
- ), f"Attenuation missing for times: {da_gas_atten.time[~has_atten]}"
-
- # fill nan values with zero
- da_gas_atten = da_gas_atten.fillna(0)
-
- # apply attenuation correction
- ds = self.add_attenuation(ds=ds, da_gas_atten=da_gas_atten)
-
- return ds
-
-[docs] def run_date(self, date, write_output=True):
- """
- Runs simulation for a single day.
-
- Parameters
- ----------
- date : np.datetime64
- Date to simulate.
- write_output : bool
- If True, write output to netcdf file.
- """
-
- # read radar data
- if self.input_radar_format == "cloudnet":
- radar_path = self.config["paths"][self.name]["cloudnet"]
- else:
- radar_path = self.config["paths"][self.name]["radar"]
-
- rad = Radar(
- date=date,
- site_name=self.name,
- path=radar_path,
- input_radar_format=self.input_radar_format,
- )
-
- # skip if radar data does not exist
- if rad.ds_rad is None:
- print(f"{date}: No radar data found")
- return
-
- # read cloudnet data
- if self.geometry == "groundbased":
- ds_cloudnet = read_cloudnet(
- attenuation_correction_input=self.prepare[
- "attenuation_correction_input"
- ],
- date=date,
- site_name=self.name,
- path=self.config["paths"][self.name]["cloudnet"],
- )
-
- if ds_cloudnet is None:
- self.prepare["attenuation_correction"] = False
-
- # frequency conversion
- if self.frequency == 35:
- rad.ds_rad = self.convert_frequency(rad.ds_rad)
-
- # correct dielectric constant
- if self.frequency == 94:
- rad.ds_rad = self.correct_dielectric_constant(rad.ds_rad)
-
- # range to height
- self.check_is_sea_level(rad.ds_rad)
-
- if (self.geometry == "groundbased") and not self.is_sea_level:
- print("Converting radar range grid to height above mean sea level")
- rad.ds_rad = self.range_to_height(rad.ds_rad)
- else:
- print(
- "Assume that input radar grid is defined as height above mean "
- "sea level"
- )
-
- # create along track dimension
- rad.ds_rad = self.create_along_track(ds=rad.ds_rad)
-
- # interpolate to regular grid
- ds = self.interpolate_to_regular_grid(rad.ds_rad)
-
- if self.geometry == "groundbased":
- # attenuation correction
- # no attenuation correction with 35 GHz radar reflectivity
- if self.frequency == 35:
- self.prepare["attenuation_correction"] = False
-
- if self.prepare["attenuation_correction"]:
- ds = self.attenuation_correction(ds, ds_cloudnet)
-
- # add ground echo
- ds = self.add_ground_echo(ds)
-
- # run simulator
- self.transform(ds)
-
- # add horizontal wind variable
- if self.geometry == "groundbased":
- self.add_groundbased_variables()
-
- if self.geometry == "airborne":
- self.add_airborne_variables()
-
- # write output to file
- if write_output:
- self.to_netcdf(date=date)
-
-[docs] def run(self, start_date, end_date, write_output=True):
- """
- Runs simulation for all days in the time frame.
-
- Parameters
- ----------
- start_date : np.datetime64
- Start date.
- end_date : np.datetime64
- End date (inclusive).
- """
-
- dates = self.prepare_dates(start_date, end_date)
-
- for i, date in enumerate(dates):
- print(f"Processing {date} ({i+1}/{len(dates)})")
-
- self.run_date(date, write_output=write_output)
-
-"""
-This function writes the output of the orbital radar simulator to a NetCDF
-file.
-"""
-
-import os
-
-import xarray as xr
-
-
-[docs]def write_spaceview(ds, filename):
- """
- Writes dataset to NetCDF file at the user-specified directory.
-
- Parameters
- ----------
- ds: xarray.Dataset
- Dataset containing the output of the orbital radar simulator
- filename: str
- Name of the output file
- """
-
- ds.to_netcdf(filename, mode="w")
-
- print(f"Written file: {filename}")
-
' + - '' + - Documentation.gettext("Hide Search Matches") + - "
" - ) - ); - }, - - /** - * helper function to hide the search marks again - */ - hideSearchWords: () => { - document - .querySelectorAll("#searchbox .highlight-link") - .forEach((el) => el.remove()); - document - .querySelectorAll("span.highlighted") - .forEach((el) => el.classList.remove("highlighted")); - const url = new URL(window.location); - url.searchParams.delete("highlight"); - window.history.replaceState({}, "", url); - }, - - /** - * helper function to focus on search bar - */ - focusSearchBar: () => { - document.querySelectorAll("input[name=q]")[0]?.focus(); - }, - - /** - * Initialise the domain index toggle buttons - */ - initDomainIndexTable: () => { - const toggler = (el) => { - const idNumber = el.id.substr(7); - const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`); - if (el.src.substr(-9) === "minus.png") { - el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`; - toggledRows.forEach((el) => (el.style.display = "none")); - } else { - el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`; - toggledRows.forEach((el) => (el.style.display = "")); - } - }; - - const togglerElements = document.querySelectorAll("img.toggler"); - togglerElements.forEach((el) => - el.addEventListener("click", (event) => toggler(event.currentTarget)) - ); - togglerElements.forEach((el) => (el.style.display = "")); - if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); - }, - - initOnKeyListeners: () => { - // only install a listener if it is really needed - if ( - !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && - !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS - ) - return; - - const blacklistedElements = new Set([ - "TEXTAREA", - "INPUT", - "SELECT", - "BUTTON", - ]); - document.addEventListener("keydown", (event) => { - if (blacklistedElements.has(document.activeElement.tagName)) return; // bail for input elements - if (event.altKey || event.ctrlKey || event.metaKey) return; // bail with special keys - - if (!event.shiftKey) { - switch (event.key) { - case "ArrowLeft": - if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; - - const prevLink = document.querySelector('link[rel="prev"]'); - if (prevLink && prevLink.href) { - window.location.href = prevLink.href; - event.preventDefault(); - } - break; - case "ArrowRight": - if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; - - const nextLink = document.querySelector('link[rel="next"]'); - if (nextLink && nextLink.href) { - window.location.href = nextLink.href; - event.preventDefault(); - } - break; - case "Escape": - if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; - Documentation.hideSearchWords(); - event.preventDefault(); - } - } - - // some keyboard layouts may need Shift to get / - switch (event.key) { - case "/": - if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; - Documentation.focusSearchBar(); - event.preventDefault(); - } - }); - }, -}; - -// quick alias for translations -const _ = Documentation.gettext; - -_ready(Documentation.init); diff --git a/docs/html/_static/documentation_options.js b/docs/html/_static/documentation_options.js deleted file mode 100644 index ac808c4..0000000 --- a/docs/html/_static/documentation_options.js +++ /dev/null @@ -1,14 +0,0 @@ -var DOCUMENTATION_OPTIONS = { - URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'), - VERSION: '0.0.2', - LANGUAGE: 'en', - COLLAPSE_INDEX: false, - BUILDER: 'html', - FILE_SUFFIX: '.html', - LINK_SUFFIX: '.html', - HAS_SOURCE: true, - SOURCELINK_SUFFIX: '.txt', - NAVIGATION_WITH_KEYS: false, - SHOW_SEARCH_SUMMARY: true, - ENABLE_SEARCH_SHORTCUTS: false, -}; \ No newline at end of file diff --git a/docs/html/_static/file.png b/docs/html/_static/file.png deleted file mode 100644 index a858a41..0000000 Binary files a/docs/html/_static/file.png and /dev/null differ diff --git a/docs/html/_static/graphviz.css b/docs/html/_static/graphviz.css deleted file mode 100644 index 19e7afd..0000000 --- a/docs/html/_static/graphviz.css +++ /dev/null @@ -1,19 +0,0 @@ -/* - * graphviz.css - * ~~~~~~~~~~~~ - * - * Sphinx stylesheet -- graphviz extension. - * - * :copyright: Copyright 2007-2022 by the Sphinx team, see AUTHORS. - * :license: BSD, see LICENSE for details. - * - */ - -img.graphviz { - border: 0; - max-width: 100%; -} - -object.graphviz { - max-width: 100%; -} diff --git a/docs/html/_static/jquery-3.6.0.js b/docs/html/_static/jquery-3.6.0.js deleted file mode 100644 index fc6c299..0000000 --- a/docs/html/_static/jquery-3.6.0.js +++ /dev/null @@ -1,10881 +0,0 @@ -/*! - * jQuery JavaScript Library v3.6.0 - * https://jquery.com/ - * - * Includes Sizzle.js - * https://sizzlejs.com/ - * - * Copyright OpenJS Foundation and other contributors - * Released under the MIT license - * https://jquery.org/license - * - * Date: 2021-03-02T17:08Z - */ -( function( global, factory ) { - - "use strict"; - - if ( typeof module === "object" && typeof module.exports === "object" ) { - - // For CommonJS and CommonJS-like environments where a proper `window` - // is present, execute the factory and get jQuery. - // For environments that do not have a `window` with a `document` - // (such as Node.js), expose a factory as module.exports. - // This accentuates the need for the creation of a real `window`. - // e.g. var jQuery = require("jquery")(window); - // See ticket #14549 for more info. - module.exports = global.document ? - factory( global, true ) : - function( w ) { - if ( !w.document ) { - throw new Error( "jQuery requires a window with a document" ); - } - return factory( w ); - }; - } else { - factory( global ); - } - -// Pass this if window is not defined yet -} )( typeof window !== "undefined" ? window : this, function( window, noGlobal ) { - -// Edge <= 12 - 13+, Firefox <=18 - 45+, IE 10 - 11, Safari 5.1 - 9+, iOS 6 - 9.1 -// throw exceptions when non-strict code (e.g., ASP.NET 4.5) accesses strict mode -// arguments.callee.caller (trac-13335). But as of jQuery 3.0 (2016), strict mode should be common -// enough that all such attempts are guarded in a try block. -"use strict"; - -var arr = []; - -var getProto = Object.getPrototypeOf; - -var slice = arr.slice; - -var flat = arr.flat ? function( array ) { - return arr.flat.call( array ); -} : function( array ) { - return arr.concat.apply( [], array ); -}; - - -var push = arr.push; - -var indexOf = arr.indexOf; - -var class2type = {}; - -var toString = class2type.toString; - -var hasOwn = class2type.hasOwnProperty; - -var fnToString = hasOwn.toString; - -var ObjectFunctionString = fnToString.call( Object ); - -var support = {}; - -var isFunction = function isFunction( obj ) { - - // Support: Chrome <=57, Firefox <=52 - // In some browsers, typeof returns "function" for HTML