Skip to content

Commit

Permalink
Merge pull request #8 from Daafip/dev
Browse files Browse the repository at this point in the history
Adding snow model
  • Loading branch information
Daafip authored Apr 8, 2024
2 parents 19fec73 + 720700b commit 7ddb7df
Show file tree
Hide file tree
Showing 3 changed files with 133 additions and 104 deletions.
4 changes: 3 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,4 +53,6 @@ Be aware of the non-intuitive [BMI](https://github.com/eWaterCycle/grpc4bmi) imp
## v1.3.1
- Fix bug in time indexing
## v1.3.2
- typo in update updating_dict_var_obj: was getting values wrong
- typo in update updating_dict_var_obj: was getting values wrong
## V1.4.0
- adding snow reservoir
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ name = "HBV"
description = "Dev version for a HBV hydrological model using BMI for eWaterCycle."
readme = "README.md"
license = "Apache-2.0"
version = "1.3.2"
version = "1.4.0"
authors = [
{ name = "David Haasnoot", email = "[email protected]" },
]
Expand Down
231 changes: 129 additions & 102 deletions src/HBV/HBV_bmi.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,25 +4,27 @@
import numpy as np
import warnings


DICT_VAR_UNITS = {"Imax":"mm",
"Ce": "-",
"Sumax": "mm",
"Beta": "-",
"Pmax": "mm",
"Tlag": "d",
"Kf": "-",
"Ks": "-",
"Si": "mm",
"Su": "mm",
"Sf": "mm",
"Ss": "mm",
"Ei_dt": "mm/d",
"Ea_dt": "mm/d",
"Qs_dt": "mm/d",
"Qf_dt": "mm/d",
"Q_tot_dt": "mm/d",
"Q": "mm/d"}
DICT_VAR_UNITS = {"Imax": "mm",
"Ce": "-",
"Sumax": "mm",
"Beta": "-",
"Pmax": "mm",
"Tlag": "d",
"Kf": "-",
"Ks": "-",
"FM": "mm/deg/d",
"Si": "mm",
"Su": "mm",
"Sf": "mm",
"Ss": "mm",
"Sp": "mm",
"M_dt": "mm/d",
"Ei_dt": "mm/d",
"Ea_dt": "mm/d",
"Qs_dt": "mm/d",
"Qf_dt": "mm/d",
"Q_tot_dt": "mm/d",
"Q": "mm/d"}


class HBV(Bmi):
Expand All @@ -44,18 +46,19 @@ def initialize(self, config_file: str) -> None:
# store forcing & obs
self.P = utils.load_var(self.config["precipitation_file"], "pr")

# add Tas, Tmin and Tmax support for snow component ??!
self.EP = utils.load_var(self.config["potential_evaporation_file"], "pev")

self.Tmean = utils.load_var(self.config["mean_temperature_file"], "tasmean")

# set up times
self.Ts = self.P['time'].astype("datetime64[s]")
self.end_timestep = len(self.Ts.values)
self.time = self.P['time'].astype("datetime64[s]")
self.end_timestep = len(self.time.values)
self.current_timestep = 0

# time step size in seconds (to be able to do unit conversions) - change here to days
self.dt = (
self.Ts.values[1] - self.Ts.values[0]
) / np.timedelta64(1, "s") / 24 / 3600
self.time.values[1] - self.time.values[0]
) / np.timedelta64(1, "s") / 24 / 3600

# define parameters
self.set_pars(np.array(self.config['parameters'].split(','), dtype=np.float64))
Expand All @@ -67,77 +70,95 @@ def initialize(self, config_file: str) -> None:
s_in = np.array(self.config['initial_storage'].split(','), dtype=np.float64)
self.set_storage(s_in)

# set other flows for initial step
self.Ei_dt = 0 # interception evaporation
self.Ea_dt = 0 # actual evaportation
self.Qs_dt = 0 # slow flow
self.Qf_dt = 0 # fast flow
self.Q_tot_dt = 0 # total flow
self.Q = 0 # Final model prediction
# set other flows for initial step
self.M_dt = 0 # snow melt
self.Ei_dt = 0 # interception evaporation
self.Ea_dt = 0 # actual evaportation
self.Qs_dt = 0 # slow flow
self.Qf_dt = 0 # fast flow
self.Q_tot_dt = 0 # total flow
self.Q = 0 # Final model prediction

# stores corresponding objects for variables
# other constant for Snow:
self.Tt = -0.5 # Threshold temperature set for now. Can be between -1 to 1

def set_pars(self, par) -> None:
self.I_max = par[0] # maximum interception
self.Ce = par[1] # Ea = Su / (sumax * Ce) * Ep
self.Su_max = par[2] # ''
self.beta = par[3] # Cr = (su/sumax)**beta
self.P_max = par[4] # Qus = Pmax * (Su/Sumax)
self.T_lag = self.set_tlag(par[5]) # used in triangular transfer function
self.Kf = par[6] # Qf=kf*sf
self.Ks = par[7] # Qs=Ks*
self.I_max = par[0] # maximum interception
self.Ce = par[1] # Ea = Su / (sumax * Ce) * Ep
self.Su_max = par[2] # ''
self.beta = par[3] # Cr = (su/sumax)**beta
self.P_max = par[4] # Qus = Pmax * (Su/Sumax)
self.T_lag = self.set_tlag(par[5]) # used in triangular transfer function
self.Kf = par[6] # Qf=kf*sf
self.Ks = par[7] # Qs=Ks*
self.FM = par[8] # degree day factor: Melt FM * (Tmean-Tt)

def set_storage(self, stor) -> None:
self.Si = stor[0] # Interception storage
self.Su = stor[1] # Unsaturated Rootzone Storage
self.Sf = stor[2] # Fastflow storage
self.Ss = stor[3] # Groundwater storage
self.Si = stor[0] # Interception storage
self.Su = stor[1] # Unsaturated Rootzone Storage
self.Sf = stor[2] # Fastflow storage
self.Ss = stor[3] # Groundwater storage
self.Sp = stor[4] # SnowPack storage

def update(self) -> None:
""" Updates model one timestep
Old documentation:
Function to run the update part of one timestep of the HBV model
par: array/list containing 8 parameters: Imax, Ce, Sumax, beta, Pmax, T_lag, Kf, Ks (floats)
s_in: array/list containing 4 storage terms which are input to the timestep: Si, Su, Sf, Ss (floats)
storage_terms: list of arrays which store: Si, Su, Sf, Ss, Ei_dt, Ea_dt, Qs_dt_lst, Qf_dt_lst, Q_tot_dt
par: array/list containing 9 parameters: Imax, Ce, Sumax, beta, Pmax, T_lag, Kf, Ks, Fm (floats)
s_in: array/list containing 5 storage terms which are input to the timestep: Si, Su, Sf, Ss,Sp (floats)
storage_terms: list of arrays which store: Si, Su, Sf, Ss,Sp, Ei_dt, Ea_dt, Qs_dt_lst, Qf_dt_lst, Q_tot_dt
step_n - nth step which formard model takes: used to determin which Precipitaion & evaporation to use
"""
if self.current_timestep < self.end_timestep:
self.P_dt = self.P.isel(time=self.current_timestep).to_numpy() * self.dt
self.P_dt = self.P.isel(time=self.current_timestep).to_numpy() * self.dt
self.Ep_dt = self.EP.isel(time=self.current_timestep).to_numpy() * self.dt
self.Tmean_i = self.Tmean.isel(time=self.current_timestep).to_numpy() * self.dt

# split P into rain and snow:
if self.Tmean_i < self.Tt:
self.Pr = 0 # if snowing, no rainfall
self.Ps = self.P_dt # all precip goes into snow
self.M_dt = 0 # too cold to meld
self.Sp += self.Ps
else:
self.Pr = self.P_dt # if not snowing, all rainfall
self.Ps = 0 # No snow
self.M_dt = min(self.Sp / self.dt, self.FM * (self.Tmean_i - self.Tt)) # melt factor * diff in temp
self.Sp -= self.M_dt # remove melt from snowpack content
self.Pr += self.M_dt # add it to `rainfall`: snow melt can also be intercepted

# Interception Reservoir
if self.P_dt > 0:
if self.Pr > 0:
# if there is rain, no evap
self.Si = self.Si + self.P_dt # increase the storage
self.Si = self.Si + self.Pr # increase the storage
self.Pe_dt = max((self.Si - self.I_max) / self.dt, 0)
self.Si = self.Si - self.Pe_dt
self.Ei_dt = 0 # if rainfall, evaporation = 0 as too moist
self.Si = self.Si - self.Pe_dt
self.Ei_dt = 0 # if rainfall, evaporation = 0 as too moist
else:
# Evaporation only when there is no rainfall
self.Pe_dt = 0 # nothing flows in so must be 0
self.Ei_dt = min(self.Ep_dt, self.Si / self.dt) # evaporation limited by storage
self.Si = self.Si - self.Ei_dt
self.Pe_dt = 0 # nothing flows in so must be 0
self.Ei_dt = min(self.Ep_dt, self.Si / self.dt) # evaporation limited by storage
self.Si = self.Si - self.Ei_dt

# split flow into Unsaturated Reservoir and Fast flow
if self.Pe_dt > 0:
cr = (self.Su / self.Su_max)**self.beta
Qiu_dt = (1 - cr ) * self.Pe_dt # flux from Ir to Ur
self.Su = self.Su + Qiu_dt
Quf_dt = cr * self.Pe_dt # flux from Su to Sf
cr = (self.Su / self.Su_max) ** self.beta
Qiu_dt = (1 - cr) * self.Pe_dt # flux from Ir to Ur
self.Su = self.Su + Qiu_dt
Quf_dt = cr * self.Pe_dt # flux from Su to Sf
else:
Quf_dt = 0

# Transpiration
self.Ep_dt = max(0, self.Ep_dt - self.Ei_dt) # Transpiration
self.Ep_dt = max(0, self.Ep_dt - self.Ei_dt) # Transpiration
self.Ea_dt = self.Ep_dt * (self.Su / (self.Su_max * self.Ce))
self.Ea_dt = min(self.Su, self.Ea_dt) # limited by water in soil
self.Su = self.Su - self.Ea_dt
self.Ea_dt = min(self.Su, self.Ea_dt) # limited by water in soil
self.Su = self.Su - self.Ea_dt

# Percolation
Qus_dt = self.P_max * (self.Su / self.Su_max) * self.dt # Flux from Su to Ss
self.Su = self.Su - Qus_dt
Qus_dt = self.P_max * (self.Su / self.Su_max) * self.dt # Flux from Su to Ss
self.Su = self.Su - Qus_dt

# Fast Reservoir
self.Sf = self.Sf + Quf_dt
Expand All @@ -161,53 +182,57 @@ def update(self) -> None:
def updating_dict_var_obj(self) -> None:
"""Function which makes getting the objects more readable- but adds more boiler plate.."""
self.dict_var_obj = {
"Imax": self.I_max,
"Ce": self.Ce,
"Sumax": self.Su_max,
"Beta": self.beta,
"Pmax": self.P_max,
"Tlag": self.T_lag,
"Kf": self.Kf,
"Ks": self.Ks,
"Si": self.Si,
"Su": self.Su,
"Sf": self.Sf,
"Ss": self.Ss,
"Ei_dt": self.Ei_dt,
"Ea_dt": self.Ea_dt,
"Qs_dt": self.Qs_dt,
"Qf_dt": self.Qf_dt,
"Q_tot_dt": self.Q_tot_dt,
"Q": self.Q,
}
"Imax": self.I_max,
"Ce": self.Ce,
"Sumax": self.Su_max,
"Beta": self.beta,
"Pmax": self.P_max,
"Tlag": self.T_lag,
"Kf": self.Kf,
"Ks": self.Ks,
"FM": self.FM,
"Si": self.Si,
"Su": self.Su,
"Sf": self.Sf,
"Ss": self.Ss,
"Sp": self.Sp,
"M_dt": self.M_dt,
"Ei_dt": self.Ei_dt,
"Ea_dt": self.Ea_dt,
"Qs_dt": self.Qs_dt,
"Qf_dt": self.Qf_dt,
"Q_tot_dt": self.Q_tot_dt,
"Q": self.Q,
}

def updating_obj_from_dict_var(self) -> None:
"""Function which inverts the dictionary above & sets objects correctly"""
param_names = ["Imax", "Ce", "Sumax", "Beta", "Pmax", "Tlag", "Kf", "Ks"]
stor_names = ["Si", "Su", "Sf", "Ss"]
param_names = ["Imax", "Ce", "Sumax", "Beta", "Pmax", "Tlag", "Kf", "Ks", "FM"]
stor_names = ["Si", "Su", "Sf", "Ss", "SP"]
self.set_pars([self.dict_var_obj[par] for par in param_names])
self.set_storage([self.dict_var_obj[stor] for stor in stor_names])

def weight_function(self):
"""Generates weights for convolution"""
"""Generates weights for convolution using generates a weibull weight function"""

n_max = int(np.ceil(self.T_lag))
if n_max == 1:
weights = float(1)
else:
weights = np.zeros(n_max)
th = self.T_lag/2
th = self.T_lag / 2
nh = int(np.floor(th))
for i in range(0, nh):
weights[i] = (float(i+1)-0.5)/th
weights[i] = (float(i + 1) - 0.5) / th
i = nh

weights[i] = (1 + (float(i+1) - 1) / th) * (th - int(np.floor(th)))/2 + (1 + (self.T_lag - float(i + 1)) / th) * (int(np.floor(th)) + 1 - th) / 2
for i in range(nh+1, int(np.floor(self.T_lag))):
weights[i] = (1 + (float(i + 1) - 1) / th) * (th - int(np.floor(th))) / 2 + (
1 + (self.T_lag - float(i + 1)) / th) * (int(np.floor(th)) + 1 - th) / 2
for i in range(nh + 1, int(np.floor(self.T_lag))):
weights[i] = (self.T_lag - float(i + 1) + 0.5) / th

if self.T_lag > int(np.floor(self.T_lag)):
weights[int(np.floor(self.T_lag))] = (self.T_lag - int(np.floor(self.T_lag)))**2 / (2*th)
weights[int(np.floor(self.T_lag))] = (self.T_lag - int(np.floor(self.T_lag))) ** 2 / (2 * th)

weights = weights / sum(weights)

Expand All @@ -230,7 +255,7 @@ def add_time_lag(self) -> None:
self.memory_vector_lag[-1] = 0

def set_empty_memory_vector_lag(self):
self.weights = self.weight_function() # generates weights using a weibull weight function
self.weights = self.weight_function()
return np.zeros(self.T_lag)

def get_component_name(self) -> str:
Expand All @@ -250,7 +275,8 @@ def get_value(self, var_name: str, dest: np.ndarray) -> np.ndarray:
if mem_index < len(self.memory_vector_lag):
dest[:] = self.memory_vector_lag[mem_index]
else:
raise IndexError(f'{mem_index} is out of range for memory vector size {len(self.memory_vector_lag)}')
raise IndexError(
f'{mem_index} is out of range for memory vector size {len(self.memory_vector_lag)}')

return dest
# otherwise return the variable from the dictionary
Expand Down Expand Up @@ -300,7 +326,8 @@ def set_value(self, var_name: str, src: np.ndarray) -> None:
if mem_index < len(self.memory_vector_lag):
self.memory_vector_lag[mem_index] = src[0]
else:
raise IndexError(f'{mem_index} is out of range for memory vector size {len(self.memory_vector_lag)}')
raise IndexError(
f'{mem_index} is out of range for memory vector size {len(self.memory_vector_lag)}')

# all other values can be set here
elif var_name in self.dict_var_obj:
Expand All @@ -319,25 +346,25 @@ def get_output_var_names(self) -> Tuple[str]:
# The BMI has to have some time-related functionality:
def get_start_time(self) -> float:
"""Return end time in seconds since 1 january 1970."""
return get_unixtime(self.Ts.isel(time=0).values) # type: ignore
return get_unixtime(self.time.isel(time=0).values) # type: ignore

def get_end_time(self) -> float:
"""Return end time in seconds since 1 january 1970."""
return get_unixtime(self.Ts.isel(time=-1).values) # type: ignore
return get_unixtime(self.time.isel(time=-1).values) # type: ignore

def get_current_time(self) -> float:
"""Return current time in seconds since 1 january 1970."""
# we get the timestep from the data, but the stopping condition requires it to go one beyond.
return get_unixtime(self.Ts.isel(time=self.current_timestep).values) # type: ignore
return get_unixtime(self.time.isel(time=self.current_timestep).values) # type: ignore

def set_tlag(self, T_lag_in) -> int:
"""Ensures T_lag is an integer of at minimum 1"""
T_lag = max(1, int(round(T_lag_in, 0)))
return T_lag

def get_time_step(self) -> float:
if len(self.Ts) > 1:
return float((self.Ts.values[1] - self.Ts.values[0]) / np.timedelta64(1, "s"))
if len(self.time) > 1:
return float((self.time.values[1] - self.time.values[0]) / np.timedelta64(1, "s"))
else:
message = "No time series defined"
warnings.warn(message=message, category=ImportWarning)
Expand All @@ -348,7 +375,7 @@ def get_time_units(self) -> str:

# TODO implement setting different timestep?
def get_value_at_indices(
self, name: str, dest: np.ndarray, inds: np.ndarray) -> np.ndarray:
self, name: str, dest: np.ndarray, inds: np.ndarray) -> np.ndarray:
raise NotImplementedError()

# TODO implement
Expand Down Expand Up @@ -444,10 +471,10 @@ def get_grid_face_nodes(self, grid: int, face_nodes: np.ndarray) -> np.ndarray:
raise NotImplementedError()

def get_grid_nodes_per_face(
self, grid: int, nodes_per_face: np.ndarray) -> np.ndarray:
self, grid: int, nodes_per_face: np.ndarray) -> np.ndarray:
raise NotImplementedError()


def get_unixtime(Ts: np.datetime64) -> int:
"""Get unix timestamp (seconds since 1 january 1970) from a np.datetime64."""
return np.datetime64(Ts).astype("datetime64[s]").astype("int")
return np.datetime64(Ts).astype("datetime64[s]").astype("int")

0 comments on commit 7ddb7df

Please sign in to comment.