From 60fa0c32e7bb8ad453039578df2b8b09790ce9ee Mon Sep 17 00:00:00 2001 From: Bart de Koning Date: Wed, 29 Jan 2025 11:28:36 +0100 Subject: [PATCH 1/8] Add source priority config options --- core/src/config.jl | 5 +++++ core/test/docs.toml | 9 +++++++-- docs/reference/usage.qmd | 1 + 3 files changed, 13 insertions(+), 2 deletions(-) diff --git a/core/src/config.jl b/core/src/config.jl index 7ea4ce0f4..a790d996f 100644 --- a/core/src/config.jl +++ b/core/src/config.jl @@ -131,6 +131,11 @@ end @option struct Allocation <: TableOption timestep::Float64 = 86400 use_allocation::Bool = false + default_source_priority_user_demand::Int = 1000 + default_source_priority_boundary::Int = 2000 + default_source_priority_level_demand::Int = 3000 + default_source_priority_flow_demand::Int = 4000 + default_source_priority_subnetwork_inlet::Int = 5000 end @option struct Experimental <: TableOption diff --git a/core/test/docs.toml b/core/test/docs.toml index 927ab0178..fe60216a1 100644 --- a/core/test/docs.toml +++ b/core/test/docs.toml @@ -21,8 +21,13 @@ ribasim_version = "2025.1.0" # required time = "basin/time.arrow" [allocation] -timestep = 86400 # optional (required if use_allocation = true), default 86400 -use_allocation = false # optional, default false +timestep = 86400 # optional (required if use_allocation = true), default 86400 +use_allocation = false # optional, default false +default_source_priority_user_demand = 1000 # optional, default 1000 +default_source_priority_boundary = 2000 # optional, default 2000 +default_source_priority_level_demand = 3000 # optional, default 3000 +default_source_priority_flow_demand = 4000 # optional, default 4000 +default_source_priority_subnetwork_inlet = 5000 # optional, default 5000 [solver] algorithm = "QNDF" # optional, default "QNDF" diff --git a/docs/reference/usage.qmd b/docs/reference/usage.qmd index d9f24c625..00703425e 100644 --- a/docs/reference/usage.qmd +++ b/docs/reference/usage.qmd @@ -61,6 +61,7 @@ While physically incorrect, it is useful for a first correctness check on a mode Currently there are the following allocation settings: - `use_allocation`: A boolean which says whether allocation should be used or not; - `timestep`: a float value in seconds which dictates the update interval for allocations. +- `default_source_priority_*`: an integer per source type for the allocation algorithm: `user_demand`, `boundary`, `level_demand`, `flow_demand`, `subnetwork_inlet`. Flow boundaries and level boundaries are combined in the single category `boundary`. ## Results settings From 7154ac986db3ac66dd2c7b4be95e3b18e5ddecbe Mon Sep 17 00:00:00 2001 From: Bart de Koning Date: Wed, 29 Jan 2025 11:45:37 +0100 Subject: [PATCH 2/8] Rename priority to demand_priority --- core/src/allocation_init.jl | 12 +- core/src/allocation_optim.jl | 169 ++++++++------- core/src/parameter.jl | 30 +-- core/src/read.jl | 44 ++-- core/src/schema.jl | 21 +- core/src/solve.jl | 6 +- core/src/util.jl | 34 +-- core/src/validation.jl | 25 ++- core/src/write.jl | 8 +- core/test/allocation_test.jl | 43 ++-- core/test/validation_test.jl | 14 +- python/ribasim/ribasim/config.py | 12 +- python/ribasim/ribasim/model.py | 18 +- python/ribasim/ribasim/schemas.py | 193 +++++++++++++++++- python/ribasim/tests/test_io.py | 20 +- .../ribasim_testmodels/allocation.py | 82 ++++---- .../ribasim_testmodels/invalid.py | 2 +- ribasim_qgis/core/nodes.py | 12 +- 18 files changed, 490 insertions(+), 255 deletions(-) diff --git a/core/src/allocation_init.jl b/core/src/allocation_init.jl index d80660861..6b5ebdb16 100644 --- a/core/src/allocation_init.jl +++ b/core/src/allocation_init.jl @@ -1,7 +1,7 @@ """Find the edges from the main network to a subnetwork.""" function find_subnetwork_connections!(p::Parameters)::Nothing (; allocation, graph, allocation) = p - n_priorities = length(allocation.priorities) + n_demand_priorities = length(allocation.demand_priorities) (; subnetwork_demands, subnetwork_allocateds) = allocation # Find edges (node_id, outflow_id) where the source node has subnetwork id 1 and the # destination node subnetwork id ≠1 @@ -12,10 +12,10 @@ function find_subnetwork_connections!(p::Parameters)::Nothing get_main_network_connections(p, graph[outflow_id].subnetwork_id) edge = (node_id, outflow_id) push!(main_network_source_edges, edge) - # Allocate memory for the demands and priorities + # Allocate memory for the demands and demand priorities # from the subnetwork via this edge - subnetwork_demands[edge] = zeros(n_priorities) - subnetwork_allocateds[edge] = zeros(n_priorities) + subnetwork_demands[edge] = zeros(n_demand_priorities) + subnetwork_allocateds[edge] = zeros(n_demand_priorities) end end end @@ -244,7 +244,7 @@ Add capacity constraints to the outflow edge of UserDemand nodes. The constraint indices are the UserDemand node IDs. Constraint: -flow over UserDemand edge outflow edge <= cumulative return flow from previous priorities +flow over UserDemand edge outflow edge <= cumulative return flow from previous demand priorities """ function add_constraints_user_source!( problem::JuMP.Model, @@ -487,7 +487,7 @@ function get_sources_in_order( subnetwork_id::Integer, )::OrderedDict{Tuple{NodeID, NodeID}, AllocationSource} # NOTE: return flow has to be done before other sources, to prevent that - # return flow is directly used within the same priority + # return flow is directly used within the same source priority (; basin, user_demand, graph, allocation) = p diff --git a/core/src/allocation_optim.jl b/core/src/allocation_optim.jl index 6e2404862..a70dca7e9 100644 --- a/core/src/allocation_optim.jl +++ b/core/src/allocation_optim.jl @@ -25,14 +25,14 @@ function add_objective_term!( end """ -Set the objective for the given priority. +Set the objective for the given demand priority. """ -function set_objective_priority!( +function set_objective_demand_priority!( allocation_model::AllocationModel, u::ComponentVector, p::Parameters, t::Float64, - priority_idx::Int, + demand_priority_idx::Int, )::Nothing (; problem, subnetwork_id, capacity) = allocation_model (; graph, user_demand, flow_demand, allocation, basin) = p @@ -49,7 +49,7 @@ function set_objective_priority!( # Loop over the connections between main and subnetwork for connections_subnetwork in main_network_connections[2:end] for connection in connections_subnetwork - d = subnetwork_demands[connection][priority_idx] + d = subnetwork_demands[connection][demand_priority_idx] F_inlet = F[connection] add_objective_term!(ex, d, F_inlet) end @@ -63,7 +63,7 @@ function set_objective_priority!( if to_node_id.type == NodeType.UserDemand # UserDemand user_demand_idx = to_node_id.idx - d = demand_reduced[user_demand_idx, priority_idx] + d = demand_reduced[user_demand_idx, demand_priority_idx] F_ud = F[edge] add_objective_term!(ex, d, F_ud) else @@ -71,9 +71,9 @@ function set_objective_priority!( has_external_demand(graph, to_node_id, :flow_demand) # FlowDemand if has_demand - flow_priority_idx = get_external_priority_idx(p, to_node_id) + flow_demand_priority_idx = get_external_demand_priority_idx(p, to_node_id) d = - priority_idx == flow_priority_idx ? + demand_priority_idx == flow_demand_priority_idx ? flow_demand.demand[demand_node_id.idx] : 0.0 F_fd = F_flow_buffer_in[to_node_id] @@ -85,9 +85,9 @@ function set_objective_priority!( # Terms for LevelDemand nodes F_basin_in = problem[:F_basin_in] for node_id in only(F_basin_in.axes) - basin_priority_idx = get_external_priority_idx(p, node_id) + basin_demand_priority_idx = get_external_demand_priority_idx(p, node_id) d = - basin_priority_idx == priority_idx ? + basin_demand_priority_idx == demand_priority_idx ? get_basin_demand(allocation_model, u, p, t, node_id) : 0.0 basin.demand[node_id.idx] = d F_ld = F_basin_in[node_id] @@ -106,7 +106,7 @@ Assign the allocations to the UserDemand or subnetwork as determined by the solu function assign_allocations!( allocation_model::AllocationModel, p::Parameters, - priority_idx::Int, + demand_priority_idx::Int, optimization_type::OptimizationType.T, )::Nothing (; subnetwork_id, capacity, flow) = allocation_model @@ -129,13 +129,14 @@ function assign_allocations!( if optimization_type == OptimizationType.collect_demands if edge in main_network_source_edges allocated = flow[edge] - subnetwork_demands[edge][priority_idx] += allocated + subnetwork_demands[edge][demand_priority_idx] += allocated end elseif optimization_type == OptimizationType.allocate user_demand_node_id = edge[2] if user_demand_node_id.type == NodeType.UserDemand allocated = flow[edge] - user_demand.allocated[user_demand_node_id.idx, priority_idx] = allocated + user_demand.allocated[user_demand_node_id.idx, demand_priority_idx] = + allocated end end end @@ -149,7 +150,7 @@ function assign_allocations!( continue end for edge_id in main_network_source_edges - subnetwork_allocateds[edge_id][priority_idx] = flow[edge_id] + subnetwork_allocateds[edge_id][demand_priority_idx] = flow[edge_id] end end end @@ -183,7 +184,7 @@ function set_initial_capacities_inlet!( # Set the source capacity to effectively unlimited if subnetwork demands are being collected Inf elseif optimization_type == OptimizationType.allocate - # Set the source capacity to the sum over priorities of the values allocated to the subnetwork over this edge + # Set the source capacity to the sum over demand priorities of the values allocated to the subnetwork over this edge sum(subnetwork_allocateds[edge_id]) end source = sources[edge_id] @@ -297,7 +298,7 @@ function set_initial_capacities_edge!( end """ -Before an allocation solve, subtract the flow used by allocation for the previous priority +Before an allocation solve, subtract the flow used by allocation for the previous demand priority from the edge capacities. """ function reduce_edge_capacities!(allocation_model::AllocationModel)::Nothing @@ -306,7 +307,7 @@ function reduce_edge_capacities!(allocation_model::AllocationModel)::Nothing F = problem[:F] for edge_id in only(constraints_capacity.axes) - # Before an allocation solve, subtract the flow used by allocation for the previous priority + # Before an allocation solve, subtract the flow used by allocation for the previous demand priority # from the edge capacities JuMP.set_normalized_rhs( constraints_capacity[edge_id], @@ -443,8 +444,9 @@ function set_initial_demands_user!( # for users for which the demand comes from there for id in node_id if demand_from_timeseries[id.idx] && graph[id].subnetwork_id == subnetwork_id - for priority_idx in eachindex(allocation.priorities) - demand[id.idx, priority_idx] = demand_itp[id.idx][priority_idx](t) + for demand_priority_idx in eachindex(allocation.demand_priorities) + demand[id.idx, demand_priority_idx] = + demand_itp[id.idx][demand_priority_idx](t) end end end @@ -498,12 +500,12 @@ end """ Before an allocation solve, subtract the flow trough the node with a flow demand -from the total flow demand (which will be used at the priority of the flow demand only). +from the total flow demand (which will be used at the demand priority of the flow demand only). """ function reduce_demands!( allocation_model::AllocationModel, p::Parameters, - priority_idx::Int, + demand_priority_idx::Int, user_demand::UserDemand, )::Nothing (; problem, subnetwork_id) = allocation_model @@ -516,10 +518,10 @@ function reduce_demands!( if graph[id].subnetwork_id == subnetwork_id d = max( 0.0, - demand_reduced[id.idx, priority_idx] - + demand_reduced[id.idx, demand_priority_idx] - JuMP.value(F[(inflow_id(graph, id), id)]), ) - demand_reduced[id.idx, priority_idx] = d + demand_reduced[id.idx, demand_priority_idx] = d end end return nothing @@ -574,7 +576,7 @@ end """ Reduce the flow demand based on flow trough the node with the demand. -Flow from any priority counts. +Flow from any demand priority counts. """ function reduce_demands!( allocation_model::AllocationModel, @@ -622,16 +624,16 @@ end """ Save the demands and allocated flows for UserDemand and Basin. -Note: Basin supply (negative demand) is only saved for the first priority. +Note: Basin supply (negative demand) is only saved for the first demand priority. """ function save_demands_and_allocations!( p::Parameters, allocation_model::AllocationModel, t::Float64, - priority_idx::Int, + demand_priority_idx::Int, )::Nothing (; graph, allocation, user_demand, flow_demand, basin) = p - (; record_demand, priorities, mean_realized_flows) = allocation + (; record_demand, demand_priorities, mean_realized_flows) = allocation (; subnetwork_id, sources, flow) = allocation_model node_ids = graph[].node_ids[subnetwork_id] @@ -642,23 +644,23 @@ function save_demands_and_allocations!( if node_id.type == NodeType.UserDemand # UserDemand nodes has_demand = true - demand = user_demand.demand[node_id.idx, priority_idx] - allocated = user_demand.allocated[node_id.idx, priority_idx] + demand = user_demand.demand[node_id.idx, demand_priority_idx] + allocated = user_demand.allocated[node_id.idx, demand_priority_idx] realized = mean_realized_flows[(inflow_id(graph, node_id), node_id)] elseif node_id.type == NodeType.Basin && has_external_demand(graph, node_id, :level_demand)[1] # Basins - basin_priority_idx = get_external_priority_idx(p, node_id) + basin_demand_priority_idx = get_external_demand_priority_idx(p, node_id) - if priority_idx == 1 || basin_priority_idx == priority_idx + if demand_priority_idx == 1 || basin_demand_priority_idx == demand_priority_idx has_demand = true demand = 0.0 - if priority_idx == 1 + if demand_priority_idx == 1 # Basin surplus demand -= sources[(node_id, node_id)].capacity[] end - if priority_idx == basin_priority_idx + if demand_priority_idx == basin_demand_priority_idx # Basin demand demand += basin.demand[node_id.idx] end @@ -672,9 +674,9 @@ function save_demands_and_allocations!( has_external_demand(graph, node_id, :flow_demand) if has_demand # Full demand, not the possibly reduced demand - flow_priority_idx = get_external_priority_idx(p, node_id) + flow_demand_priority_idx = get_external_demand_priority_idx(p, node_id) demand = - priority_idx == flow_priority_idx ? + demand_priority_idx == flow_demand_priority_idx ? flow_demand.demand[flow_demand_node_id.idx,] : 0.0 allocated = flow[(inflow_id(graph, node_id), node_id)] realized = mean_realized_flows[(inflow_id(graph, node_id), node_id)] @@ -687,7 +689,7 @@ function save_demands_and_allocations!( push!(record_demand.subnetwork_id, subnetwork_id) push!(record_demand.node_type, string(node_id.type)) push!(record_demand.node_id, Int32(node_id)) - push!(record_demand.priority, priorities[priority_idx]) + push!(record_demand.demand_priority, demand_priorities[demand_priority_idx]) push!(record_demand.demand, demand) push!(record_demand.allocated, allocated) push!(record_demand.realized, realized) @@ -703,7 +705,7 @@ function save_allocation_flows!( p::Parameters, t::Float64, allocation_model::AllocationModel, - priority::Int32, + demand_priority::Int32, optimization_type::OptimizationType.T, )::Nothing (; flow, subnetwork_id, sources) = allocation_model @@ -754,7 +756,7 @@ function save_allocation_flows!( push!(record_flow.to_node_type, string(id_to.type)) push!(record_flow.to_node_id, Int32(id_to)) push!(record_flow.subnetwork_id, subnetwork_id) - push!(record_flow.priority, priority) + push!(record_flow.demand_priority, demand_priority) push!(record_flow.flow_rate, flow_rate) push!(record_flow.optimization_type, string(optimization_type)) end @@ -771,7 +773,7 @@ function save_allocation_flows!( push!(record_flow.to_node_type, string(NodeType.Basin)) push!(record_flow.to_node_id, node_id) push!(record_flow.subnetwork_id, subnetwork_id) - push!(record_flow.priority, priority) + push!(record_flow.demand_priority, demand_priority) push!(record_flow.flow_rate, flow_rate) push!(record_flow.optimization_type, string(optimization_type)) end @@ -783,7 +785,7 @@ end function allocate_to_users_from_connected_basin!( allocation_model::AllocationModel, p::Parameters, - priority_idx::Int, + demand_priority_idx::Int, )::Nothing (; flow, problem, sources) = allocation_model (; graph, user_demand) = p @@ -797,8 +799,8 @@ function allocate_to_users_from_connected_basin!( upstream_basin_id = user_demand.inflow_edge[node_id.idx].edge[1] if has_external_demand(graph, upstream_basin_id, :level_demand)[1] - # The demand of the UserDemand node at the current priority - demand = user_demand.demand_reduced[node_id.idx, priority_idx] + # The demand of the UserDemand node at the current demand priority + demand = user_demand.demand_reduced[node_id.idx, demand_priority_idx] # The capacity of the upstream basin source = sources[(upstream_basin_id, upstream_basin_id)] @@ -809,7 +811,7 @@ function allocate_to_users_from_connected_basin!( allocated = min(demand, capacity) # Subtract the allocated amount from the user demand and basin capacity - user_demand.demand_reduced[node_id.idx, priority_idx] -= allocated + user_demand.demand_reduced[node_id.idx, demand_priority_idx] -= allocated source.capacity -= allocated # Add the allocated flow @@ -869,20 +871,20 @@ function set_source_capacity!( end """ -Solve the allocation problem for a single priority by optimizing for each source +Solve the allocation problem for a single demand priority by optimizing for each source in the subnetwork individually. """ function optimize_per_source!( allocation::Allocation, allocation_model::AllocationModel, - priority_idx::Integer, + demand_priority_idx::Integer, u::ComponentVector, p::Parameters, t::AbstractFloat, optimization_type::OptimizationType.T, )::Nothing (; problem, sources, subnetwork_id, flow) = allocation_model - (; priorities) = allocation + (; demand_priorities) = allocation F_basin_in = problem[:F_basin_in] F_basin_out = problem[:F_basin_out] @@ -893,7 +895,7 @@ function optimize_per_source!( end end - priority = priorities[priority_idx] + demand_priority = demand_priorities[demand_priority_idx] for source in values(sources) # Skip source when it has no capacity @@ -907,7 +909,7 @@ function optimize_per_source!( # of an existing objective function because this is not supported for # quadratic terms: # https://jump.dev/JuMP.jl/v1.16/manual/objective/#Modify-an-objective-coefficient - set_objective_priority!(allocation_model, u, p, t, priority_idx) + set_objective_demand_priority!(allocation_model, u, p, t, demand_priority_idx) # Set only the capacity of the current source to nonzero set_source_capacity!(allocation_model, source, optimization_type) @@ -916,11 +918,11 @@ function optimize_per_source!( @debug JuMP.solution_summary(problem) if JuMP.termination_status(problem) !== JuMP.OPTIMAL error( - "Allocation of subnetwork $subnetwork_id, priority $priority, source $source couldn't find optimal solution.", + "Allocation of subnetwork $subnetwork_id, demand priority $demand_priority, source $source couldn't find optimal solution.", ) end - # Add the values of the flows at this priority + # Add the values of the flows at this demand priority for edge in only(problem[:F].axes) flow[edge] += max(JuMP.value(problem[:F][edge]), 0.0) end @@ -934,7 +936,7 @@ function optimize_per_source!( for parameter in propertynames(p) demand_node = getfield(p, parameter) if demand_node isa AbstractDemandNode - reduce_demands!(allocation_model, p, priority_idx, demand_node) + reduce_demands!(allocation_model, p, demand_priority_idx, demand_node) end end @@ -969,53 +971,53 @@ function increase_allocateds!(basin::Basin, problem::JuMP.Model)::Nothing return nothing end -function optimize_priority!( +function optimize_demand_priority!( allocation_model::AllocationModel, u::ComponentVector, p::Parameters, t::Float64, - priority_idx::Int, + demand_priority_idx::Int, optimization_type::OptimizationType.T, )::Nothing (; flow) = allocation_model (; allocation, basin) = p - (; priorities) = allocation + (; demand_priorities) = allocation - # Start the values of the flows at this priority at 0.0 + # Start the values of the flows at this demand priority at 0.0 for edge in keys(flow.data) flow[edge] = 0.0 end - # Start the allocated amounts to basins at this priority at 0.0 + # Start the allocated amounts to basins at this demand priority at 0.0 basin.allocated .= 0.0 # Allocate to UserDemand nodes from the directly connected basin # This happens outside the JuMP optimization - allocate_to_users_from_connected_basin!(allocation_model, p, priority_idx) + allocate_to_users_from_connected_basin!(allocation_model, p, demand_priority_idx) - # Solve the allocation problem for this priority + # Solve the allocation problem for this demand priority optimize_per_source!( allocation, allocation_model, - priority_idx, + demand_priority_idx, u, p, t, optimization_type, ) - # Assign the allocations to the UserDemand or subnetwork for this priority - assign_allocations!(allocation_model, p, priority_idx, optimization_type) + # Assign the allocations to the UserDemand or subnetwork for this demand priority + assign_allocations!(allocation_model, p, demand_priority_idx, optimization_type) # Save the demands and allocated flows for all nodes that have these - save_demands_and_allocations!(p, allocation_model, t, priority_idx) + save_demands_and_allocations!(p, allocation_model, t, demand_priority_idx) # Save the flows over all edges in the subnetwork save_allocation_flows!( p, t, allocation_model, - priorities[priority_idx], + demand_priorities[demand_priority_idx], optimization_type, ) return nothing @@ -1079,16 +1081,23 @@ function collect_demands!( )::Nothing (; allocation) = p (; subnetwork_id) = allocation_model - (; priorities, subnetwork_demands) = allocation + (; demand_priorities, subnetwork_demands) = allocation ## Find internal sources optimization_type = OptimizationType.internal_sources set_initial_capacities_inlet!(allocation_model, p, optimization_type) set_initial_values!(allocation_model, u, p, t) - # Loop over priorities - for priority_idx in eachindex(priorities) - optimize_priority!(allocation_model, u, p, t, priority_idx, optimization_type) + # Loop over demand priorities + for demand_priority_idx in eachindex(demand_priorities) + optimize_demand_priority!( + allocation_model, + u, + p, + t, + demand_priority_idx, + optimization_type, + ) end ## Collect demand @@ -1109,9 +1118,16 @@ function collect_demands!( # from the main to subnetwork connections empty_sources!(allocation_model, allocation) - # Loop over priorities - for priority_idx in eachindex(priorities) - optimize_priority!(allocation_model, u, p, t, priority_idx, optimization_type) + # Loop over demand priorities + for demand_priority_idx in eachindex(demand_priorities) + optimize_demand_priority!( + allocation_model, + u, + p, + t, + demand_priority_idx, + optimization_type, + ) end end @@ -1123,14 +1139,21 @@ function allocate_demands!( )::Nothing optimization_type = OptimizationType.allocate (; allocation) = p - (; priorities) = allocation + (; demand_priorities) = allocation set_initial_capacities_inlet!(allocation_model, p, optimization_type) set_initial_values!(allocation_model, u, p, t) - # Loop over the priorities - for priority_idx in eachindex(priorities) - optimize_priority!(allocation_model, u, p, t, priority_idx, optimization_type) + # Loop over the demand priorities + for demand_priority_idx in eachindex(demand_priorities) + optimize_demand_priority!( + allocation_model, + u, + p, + t, + demand_priority_idx, + optimization_type, + ) end end diff --git a/core/src/parameter.jl b/core/src/parameter.jl index 0ecaec28a..c98727831 100644 --- a/core/src/parameter.jl +++ b/core/src/parameter.jl @@ -148,7 +148,7 @@ edge: The outflow edge of the source type: The type of source (edge, basin, main_to_sub, user_return, buffer) capacity: The initial capacity of the source as determined by the physical layer capacity_reduced: The capacity adjusted by passed optimizations -basin_flow_rate: The total outflow rate of a basin when optimized over all sources for one priority. +basin_flow_rate: The total outflow rate of a basin when optimized over all sources for one demand priority. Ignored when the source is not a basin. """ @kwdef mutable struct AllocationSource @@ -169,7 +169,7 @@ Store information for a subnetwork used for allocation. subnetwork_id: The ID of this allocation network capacity: The capacity per edge of the allocation network, as constrained by nodes that have a max_flow_rate -flow: The flows over all the edges in the subnetwork for a certain priority (used for allocation_flow output) +flow: The flows over all the edges in the subnetwork for a certain demand priority (used for allocation_flow output) sources: source data in preferred order of optimization problem: The JuMP.jl model for solving the allocation problem Δt_allocation: The time interval between consecutive allocation solves @@ -189,7 +189,7 @@ subnetwork_ids: The unique sorted allocation network IDs allocation_models: The allocation models for the main network and subnetworks corresponding to subnetwork_ids main_network_connections: (from_id, to_id) from the main network to the subnetwork per subnetwork -priorities: All used priority values. +demand_priorities: All used demand priority values. subnetwork_demands: The demand of an edge from the main network to a subnetwork subnetwork_allocateds: The allocated flow of an edge from the main network to a subnetwork mean_input_flows: Per subnetwork, flows averaged over Δt_allocation over edges that are allocation sources @@ -203,7 +203,7 @@ record_flow: A record of all flows computed by allocation optimization, eventual allocation_models::Vector{AllocationModel} = AllocationModel[] main_network_connections::Vector{Vector{Tuple{NodeID, NodeID}}} = Vector{Tuple{NodeID, NodeID}}[] - priorities::Vector{Int32} + demand_priorities::Vector{Int32} subnetwork_demands::Dict{Tuple{NodeID, NodeID}, Vector{Float64}} = Dict() subnetwork_allocateds::Dict{Tuple{NodeID, NodeID}, Vector{Float64}} = Dict() mean_input_flows::Vector{Dict{Tuple{NodeID, NodeID}, Float64}} @@ -213,7 +213,7 @@ record_flow: A record of all flows computed by allocation optimization, eventual subnetwork_id::Vector{Int32}, node_type::Vector{String}, node_id::Vector{Int32}, - priority::Vector{Int32}, + demand_priority::Vector{Int32}, demand::Vector{Float64}, allocated::Vector{Float64}, realized::Vector{Float64}, @@ -222,7 +222,7 @@ record_flow: A record of all flows computed by allocation optimization, eventual subnetwork_id = Int32[], node_type = String[], node_id = Int32[], - priority = Int32[], + demand_priority = Int32[], demand = Float64[], allocated = Float64[], realized = Float64[], @@ -235,7 +235,7 @@ record_flow: A record of all flows computed by allocation optimization, eventual to_node_type::Vector{String}, to_node_id::Vector{Int32}, subnetwork_id::Vector{Int32}, - priority::Vector{Int32}, + demand_priority::Vector{Int32}, flow_rate::Vector{Float64}, optimization_type::Vector{String}, } = (; @@ -246,7 +246,7 @@ record_flow: A record of all flows computed by allocation optimization, eventual to_node_type = String[], to_node_id = Int32[], subnetwork_id = Int32[], - priority = Int32[], + demand_priority = Int32[], flow_rate = Float64[], optimization_type = String[], ) @@ -846,14 +846,14 @@ inflow_edge: incoming flow edge outflow_edge: outgoing flow edge metadata The ID of the source node is always the ID of the UserDemand node active: whether this node is active and thus demands water -demand: water flux demand of UserDemand per priority (node_idx, priority_idx) - Each UserDemand has a demand for all priorities, +demand: water flux demand of UserDemand per demand priority (node_idx, demand_priority_idx) + Each UserDemand has a demand for all demand priorities, which is 0.0 if it is not provided explicitly. demand_reduced: the total demand reduced by allocated flows. This is used for goal programming, and requires separate memory from `demand` since demands can come from the BMI demand_itp: Timeseries interpolation objects for demands demand_from_timeseries: If false the demand comes from the BMI or is fixed -allocated: water flux currently allocated to UserDemand per priority (node_idx, priority_idx) +allocated: water flux currently allocated to UserDemand per demand priority (node_idx, demand_priority_idx) return_factor: the factor in [0,1] of how much of the abstracted water is given back to the system min_level: The level of the source Basin below which the UserDemand does not abstract concentration: matrix with boundary concentrations for each Basin and substance @@ -879,26 +879,26 @@ end node_id: node ID of the LevelDemand node min_level: The minimum target level of the connected basin(s) max_level: The maximum target level of the connected basin(s) -priority: If in a shortage state, the priority of the demand of the connected basin(s) +demand_priority: If in a shortage state, the priority of the demand of the connected basin(s) """ @kwdef struct LevelDemand <: AbstractDemandNode node_id::Vector{NodeID} min_level::Vector{ScalarInterpolation} = fill(-Inf, length(node_id)) max_level::Vector{ScalarInterpolation} = fill(Inf, length(node_id)) - priority::Vector{Int32} + demand_priority::Vector{Int32} end """ node_id: node ID of the FlowDemand node demand_itp: The time interpolation of the demand of the node demand: The current demand of the node -priority: The priority of the demand of the node +demand_priority: The priority of the demand of the node """ @kwdef struct FlowDemand <: AbstractDemandNode node_id::Vector{NodeID} demand_itp::Vector{ScalarInterpolation} demand::Vector{Float64} - priority::Vector{Int32} + demand_priority::Vector{Int32} end "Subgrid linearly interpolates basin levels." diff --git a/core/src/read.jl b/core/src/read.jl index 7e0b7712a..3f02107bb 100644 --- a/core/src/read.jl +++ b/core/src/read.jl @@ -1055,7 +1055,7 @@ function user_demand_static!( min_level::Vector{Float64}, static::StructVector{UserDemandStaticV1}, ids::Vector{Int32}, - priorities::Vector{Int32}, + demand_priorities::Vector{Int32}, )::Nothing for group in IterTools.groupby(row -> row.node_id, static) first_row = first(group) @@ -1072,16 +1072,16 @@ function user_demand_static!( min_level[user_demand_idx] = first_row.min_level for row in group - priority_idx = findsorted(priorities, row.priority) + demand_priority_idx = findsorted(demand_priorities, row.demand_priority) demand_row = coalesce(row.demand, 0.0) - demand_itp_old = demand_itp[user_demand_idx][priority_idx] - demand_itp[user_demand_idx][priority_idx] = LinearInterpolation( + demand_itp_old = demand_itp[user_demand_idx][demand_priority_idx] + demand_itp[user_demand_idx][demand_priority_idx] = LinearInterpolation( fill(demand_row, 2), demand_itp_old.t; extrapolate = true, cache_parameters = true, ) - demand[user_demand_idx, priority_idx] = demand_row + demand[user_demand_idx, demand_priority_idx] = demand_row end end return nothing @@ -1096,13 +1096,13 @@ function user_demand_time!( min_level::Vector{Float64}, time::StructVector{UserDemandTimeV1}, ids::Vector{Int32}, - priorities::Vector{Int32}, + demand_priorities::Vector{Int32}, config::Config, )::Bool errors = false t_end = seconds_since(config.endtime, config.starttime) - for group in IterTools.groupby(row -> (row.node_id, row.priority), time) + for group in IterTools.groupby(row -> (row.node_id, row.demand_priority), time) first_row = first(group) user_demand_idx = findsorted(ids, first_row.node_id) @@ -1120,7 +1120,7 @@ function user_demand_time!( min_level[user_demand_idx] = first_row.min_level - priority_idx = findsorted(priorities, first_row.priority) + demand_priority_idx = findsorted(demand_priorities, first_row.demand_priority) demand_p_itp = get_scalar_interpolation( config.starttime, t_end, @@ -1130,8 +1130,8 @@ function user_demand_time!( default_value = 0.0, interpolation_type = LinearInterpolation, ) - demand[user_demand_idx, priority_idx] = demand_p_itp(0.0) - demand_itp[user_demand_idx][priority_idx] = demand_p_itp + demand[user_demand_idx, demand_priority_idx] = demand_p_itp(0.0) + demand_itp[user_demand_idx][demand_priority_idx] = demand_p_itp end return errors end @@ -1149,21 +1149,21 @@ function UserDemand(db::DB, config::Config, graph::MetaGraph)::UserDemand end # Initialize vectors for UserDemand fields - priorities = get_all_priorities(db, config) + demand_priorities = get_all_demand_priorities(db, config) n_user = length(node_ids) - n_priority = length(priorities) + n_demand_priority = length(demand_priorities) active = fill(true, n_user) - demand = zeros(n_user, n_priority) - demand_reduced = zeros(n_user, n_priority) + demand = zeros(n_user, n_demand_priority) + demand_reduced = zeros(n_user, n_demand_priority) trivial_timespan = [0.0, prevfloat(Inf)] demand_itp = [ ScalarInterpolation[ LinearInterpolation(zeros(2), trivial_timespan; cache_parameters = true) for - i in eachindex(priorities) + i in eachindex(demand_priorities) ] for j in eachindex(node_ids) ] demand_from_timeseries = fill(false, n_user) - allocated = fill(Inf, n_user, n_priority) + allocated = fill(Inf, n_user, n_demand_priority) return_factor = [ LinearInterpolation(zeros(2), trivial_timespan; cache_parameters = true) for i in eachindex(node_ids) @@ -1179,7 +1179,7 @@ function UserDemand(db::DB, config::Config, graph::MetaGraph)::UserDemand min_level, static, ids, - priorities, + demand_priorities, ) # Process time table @@ -1192,7 +1192,7 @@ function UserDemand(db::DB, config::Config, graph::MetaGraph)::UserDemand min_level, time, ids, - priorities, + demand_priorities, config, ) @@ -1202,7 +1202,7 @@ function UserDemand(db::DB, config::Config, graph::MetaGraph)::UserDemand concentration[:, Substance.UserDemand] .= 1.0 set_concentrations!(concentration, concentration_time, substances, node_ids) - if errors || !valid_demand(node_ids, demand_itp, priorities) + if errors || !valid_demand(node_ids, demand_itp, demand_priorities) error("Errors occurred when parsing UserDemand data.") end @@ -1247,7 +1247,7 @@ function LevelDemand(db::DB, config::Config)::LevelDemand NodeID.(NodeType.LevelDemand, node_id, eachindex(node_id)), parsed_parameters.min_level, parsed_parameters.max_level, - parsed_parameters.priority, + parsed_parameters.demand_priority, ) end @@ -1275,7 +1275,7 @@ function FlowDemand(db::DB, config::Config)::FlowDemand node_id = NodeID.(NodeType.FlowDemand, node_id, eachindex(node_id)), demand_itp = parsed_parameters.demand, demand, - parsed_parameters.priority, + parsed_parameters.demand_priority, ) end @@ -1501,7 +1501,7 @@ function Allocation(db::DB, config::Config, graph::MetaGraph)::Allocation end return Allocation(; - priorities = get_all_priorities(db, config), + demand_priorities = get_all_demand_priorities(db, config), mean_input_flows, mean_realized_flows, ) diff --git a/core/src/schema.jl b/core/src/schema.jl index 1d4edde81..d7af5a880 100644 --- a/core/src/schema.jl +++ b/core/src/schema.jl @@ -36,6 +36,9 @@ @schema "ribasim.userdemand.static" UserDemandStatic @schema "ribasim.userdemand.time" UserDemandTime +# This schema is not specific to a node type +@schema "ribasim.allocationsourceorder" AllocationSourceOrder + const delimiter = " / " tablename(sv::Type{SchemaVersion{T, N}}) where {T, N} = tablename(sv()) tablename(sv::SchemaVersion{T, N}) where {T, N} = @@ -76,6 +79,12 @@ function nodetype( return Symbol(node[begin:length(n)]), k end +@version AllocationSourceOrderV1 begin + subnetwork_id::Int32 + node_id::Int32 + source_priority::Int32 +end + @version PumpStaticV1 begin node_id::Int32 active::Union{Missing, Bool} @@ -299,7 +308,7 @@ end demand::Union{Missing, Float64} return_factor::Float64 min_level::Float64 - priority::Union{Missing, Int32} + demand_priority::Union{Missing, Int32} end @version UserDemandTimeV1 begin @@ -308,7 +317,7 @@ end demand::Float64 return_factor::Float64 min_level::Float64 - priority::Union{Missing, Int32} + demand_priority::Union{Missing, Int32} end @version UserDemandConcentrationV1 begin @@ -322,7 +331,7 @@ end node_id::Int32 min_level::Union{Missing, Float64} max_level::Union{Missing, Float64} - priority::Union{Missing, Int32} + demand_priority::Union{Missing, Int32} end @version LevelDemandTimeV1 begin @@ -330,18 +339,18 @@ end time::DateTime min_level::Union{Missing, Float64} max_level::Union{Missing, Float64} - priority::Union{Missing, Int32} + demand_priority::Union{Missing, Int32} end @version FlowDemandStaticV1 begin node_id::Int demand::Float64 - priority::Union{Missing, Int32} + demand_priority::Union{Missing, Int32} end @version FlowDemandTimeV1 begin node_id::Int time::DateTime demand::Float64 - priority::Union{Missing, Int32} + demand_priority::Union{Missing, Int32} end diff --git a/core/src/solve.jl b/core/src/solve.jl index 274e8cdcf..7c1155dae 100644 --- a/core/src/solve.jl +++ b/core/src/solve.jl @@ -350,9 +350,9 @@ function formulate_flow!( # and the current demand. # If allocation is not optimized then allocated = Inf, so the result is always # effectively allocated = demand. - for priority_idx in eachindex(allocation.priorities) - alloc_prio = allocated[priority_idx] - demand_prio = get_demand(user_demand, id, priority_idx, t) + for demand_priority_idx in eachindex(allocation.demand_priorities) + alloc_prio = allocated[demand_priority_idx] + demand_prio = get_demand(user_demand, id, demand_priority_idx, t) alloc = min(alloc_prio, demand_prio) q += alloc end diff --git a/core/src/util.jl b/core/src/util.jl index 03b1b636d..0a291c609 100644 --- a/core/src/util.jl +++ b/core/src/util.jl @@ -358,10 +358,10 @@ function is_main_network(subnetwork_id::Int32)::Bool return subnetwork_id == 1 end -function get_all_priorities(db::DB, config::Config)::Vector{Int32} - priorities = Set{Int32}() +function get_all_demand_priorities(db::DB, config::Config)::Vector{Int32} + demand_priorities = Set{Int32}() is_valid = true - # TODO: Is there a way to automatically grab all tables with a priority column? + # TODO: Is there a way to automatically grab all tables with a demand priority column? for (type, name) in [ (UserDemandStaticV1, "UserDemand / static"), (UserDemandTimeV1, "UserDemand / time"), @@ -370,23 +370,23 @@ function get_all_priorities(db::DB, config::Config)::Vector{Int32} (FlowDemandStaticV1, "FlowDemand / static"), (FlowDemandTimeV1, "FlowDemand / time"), ] - priority_col = load_structvector(db, config, type).priority - priority_col = Int32.(coalesce.(priority_col, Int32(0))) - if valid_priorities(priority_col, config.allocation.use_allocation) - union!(priorities, priority_col) + demand_priority_col = load_structvector(db, config, type).demand_priority + demand_priority_col = Int32.(coalesce.(demand_priority_col, Int32(0))) + if valid_demand_priorities(demand_priority_col, config.allocation.use_allocation) + union!(demand_priorities, demand_priority_col) else is_valid = false - @error "Missing priority parameter(s) for a $name node in the allocation problem." + @error "Missing demand_priority parameter(s) for a $name node in the allocation problem." end end if is_valid - return sort(collect(priorities)) + return sort(collect(demand_priorities)) else error("Priority parameter is missing") end end -function get_external_priority_idx(p::Parameters, node_id::NodeID)::Int +function get_external_demand_priority_idx(p::Parameters, node_id::NodeID)::Int (; graph, level_demand, flow_demand, allocation) = p inneighbor_control_ids = inneighbor_labels_type(graph, node_id, EdgeType.control) if isempty(inneighbor_control_ids) @@ -395,14 +395,14 @@ function get_external_priority_idx(p::Parameters, node_id::NodeID)::Int inneighbor_control_id = only(inneighbor_control_ids) type = inneighbor_control_id.type if type == NodeType.LevelDemand - priority = level_demand.priority[inneighbor_control_id.idx] + demand_priority = level_demand.demand_priority[inneighbor_control_id.idx] elseif type == NodeType.FlowDemand - priority = flow_demand.priority[inneighbor_control_id.idx] + demand_priority = flow_demand.demand_priority[inneighbor_control_id.idx] else - error("Nodes of type $type have no priority.") + error("Nodes of type $type have no demand_priority.") end - return findsorted(allocation.priorities, priority) + return findsorted(allocation.demand_priorities, demand_priority) end """ @@ -1062,12 +1062,12 @@ function isoutofdomain(u, p, t) any(<(0), current_storage) end -function get_demand(user_demand, id, priority_idx, t)::Float64 +function get_demand(user_demand, id, demand_priority_idx, t)::Float64 (; demand_from_timeseries, demand_itp, demand) = user_demand if demand_from_timeseries[id.idx] - demand_itp[id.idx][priority_idx](t) + demand_itp[id.idx][demand_priority_idx](t) else - demand[id.idx, priority_idx] + demand[id.idx, demand_priority_idx] end end diff --git a/core/src/validation.jl b/core/src/validation.jl index 2ac800f0b..2eb5401bd 100644 --- a/core/src/validation.jl +++ b/core/src/validation.jl @@ -129,16 +129,16 @@ sort_by(::StructVector{FlowBoundaryConcentrationV1}) = x -> (x.node_id, x.substa sort_by(::StructVector{FlowBoundaryStaticV1}) = x -> (x.node_id) sort_by(::StructVector{FlowBoundaryTimeV1}) = x -> (x.node_id, x.time) -sort_by(::StructVector{FlowDemandStaticV1}) = x -> (x.node_id, x.priority) -sort_by(::StructVector{FlowDemandTimeV1}) = x -> (x.node_id, x.priority, x.time) +sort_by(::StructVector{FlowDemandStaticV1}) = x -> (x.node_id, x.demand_priority) +sort_by(::StructVector{FlowDemandTimeV1}) = x -> (x.node_id, x.demand_priority, x.time) sort_by(::StructVector{LevelBoundaryConcentrationV1}) = x -> (x.node_id, x.substance, x.time) sort_by(::StructVector{LevelBoundaryStaticV1}) = x -> (x.node_id) sort_by(::StructVector{LevelBoundaryTimeV1}) = x -> (x.node_id, x.time) -sort_by(::StructVector{LevelDemandStaticV1}) = x -> (x.node_id, x.priority) -sort_by(::StructVector{LevelDemandTimeV1}) = x -> (x.node_id, x.priority, x.time) +sort_by(::StructVector{LevelDemandStaticV1}) = x -> (x.node_id, x.demand_priority) +sort_by(::StructVector{LevelDemandTimeV1}) = x -> (x.node_id, x.demand_priority, x.time) sort_by(::StructVector{LinearResistanceStaticV1}) = x -> (x.node_id, x.control_state) @@ -156,8 +156,8 @@ sort_by(::StructVector{TabulatedRatingCurveStaticV1}) = sort_by(::StructVector{TabulatedRatingCurveTimeV1}) = x -> (x.node_id, x.time, x.level) sort_by(::StructVector{UserDemandConcentrationV1}) = x -> (x.node_id, x.substance, x.time) -sort_by(::StructVector{UserDemandStaticV1}) = x -> (x.node_id, x.priority) -sort_by(::StructVector{UserDemandTimeV1}) = x -> (x.node_id, x.priority, x.time) +sort_by(::StructVector{UserDemandStaticV1}) = x -> (x.node_id, x.demand_priority) +sort_by(::StructVector{UserDemandTimeV1}) = x -> (x.node_id, x.demand_priority, x.time) """ Depending on if a table can be sorted, either sort it or assert that it is sorted. @@ -371,14 +371,14 @@ end function valid_demand( node_id::Vector{NodeID}, demand_itp::Vector{Vector{ScalarInterpolation}}, - priorities::Vector{Int32}, + demand_priorities::Vector{Int32}, )::Bool errors = false for (col, id) in zip(demand_itp, node_id) - for (demand_p_itp, p_itp) in zip(col, priorities) + for (demand_p_itp, p_itp) in zip(col, demand_priorities) if any(demand_p_itp.u .< 0.0) - @error "Demand of $id with priority $p_itp should be non-negative" + @error "Demand of $id with demand_priority $p_itp should be non-negative" errors = true end end @@ -631,8 +631,11 @@ function valid_discrete_control(p::Parameters, config::Config)::Bool return !errors end -function valid_priorities(priorities::Vector{Int32}, use_allocation::Bool)::Bool - if use_allocation && any(iszero, priorities) +function valid_demand_priorities( + demand_priorities::Vector{Int32}, + use_allocation::Bool, +)::Bool + if use_allocation && any(iszero, demand_priorities) return false else return true diff --git a/core/src/write.jl b/core/src/write.jl index 91e2e2775..92872b1b6 100644 --- a/core/src/write.jl +++ b/core/src/write.jl @@ -311,7 +311,7 @@ function allocation_table( subnetwork_id::Vector{Int32}, node_type::Vector{String}, node_id::Vector{Int32}, - priority::Vector{Int32}, + demand_priority::Vector{Int32}, demand::Vector{Float64}, allocated::Vector{Float64}, realized::Vector{Float64}, @@ -325,7 +325,7 @@ function allocation_table( record_demand.subnetwork_id, record_demand.node_type, record_demand.node_id, - record_demand.priority, + record_demand.demand_priority, record_demand.demand, record_demand.allocated, record_demand.realized, @@ -342,7 +342,7 @@ function allocation_flow_table( to_node_type::Vector{String}, to_node_id::Vector{Int32}, subnetwork_id::Vector{Int32}, - priority::Vector{Int32}, + demand_priority::Vector{Int32}, flow_rate::Vector{Float64}, optimization_type::Vector{String}, } @@ -359,7 +359,7 @@ function allocation_flow_table( record_flow.to_node_type, record_flow.to_node_id, record_flow.subnetwork_id, - record_flow.priority, + record_flow.demand_priority, record_flow.flow_rate, record_flow.optimization_type, ) diff --git a/core/test/allocation_test.jl b/core/test/allocation_test.jl index 05cb76ce3..21f6e6055 100644 --- a/core/test/allocation_test.jl +++ b/core/test/allocation_test.jl @@ -19,7 +19,7 @@ t = 0.0 Ribasim.allocate_demands!(p, allocation_model, t, u) - # Last priority (= 2) flows + # Last demand priority (= 2) flows @test flow[(NodeID(:Basin, 2, p), NodeID(:Pump, 5, p))] ≈ 0.0 @test flow[(NodeID(:Basin, 2, p), NodeID(:UserDemand, 10, p))] ≈ 0.5 @test flow[(NodeID(:Basin, 8, p), NodeID(:UserDemand, 12, p))] ≈ 3.0 rtol = 1e-5 @@ -50,7 +50,7 @@ end (; user_demand) = p allocation_model = p.allocation.allocation_models[1] Ribasim.set_initial_values!(allocation_model, u, p, t) - Ribasim.set_objective_priority!(allocation_model, u, p, t, 1) + Ribasim.set_objective_demand_priority!(allocation_model, u, p, t, 1) objective = JuMP.objective_function(allocation_model.problem) @test objective isa JuMP.QuadExpr # Quadratic expression F = allocation_model.problem[:F] @@ -153,7 +153,14 @@ end main_source = allocation_model.sources[(NodeID(:FlowBoundary, 1, p), NodeID(:Basin, 2, p))] main_source.capacity_reduced = 4.5 - Ribasim.optimize_priority!(allocation_model, u, p, t, 1, OptimizationType.allocate) + Ribasim.optimize_demand_priority!( + allocation_model, + u, + p, + t, + 1, + OptimizationType.allocate, + ) # Main network objective function F = problem[:F] @@ -357,8 +364,10 @@ end flow_table_user_3.flow_rate, Ribasim.seconds_since.(flow_table_user_3.time, model.config.starttime), ) - df_user_3 = - record_demand[(record_demand.node_id .== 3) .&& (record_demand.priority .== 1), :] + df_user_3 = record_demand[ + (record_demand.node_id .== 3) .&& (record_demand.demand_priority .== 1), + :, + ] realized_numeric = diff(integral.(Ref(itp_user_3), df_user_3.time)) ./ Δt_allocation @test all(isapprox.(realized_numeric[3:end], df_user_3.realized[4:end], atol = 5e-4)) end @@ -416,7 +425,7 @@ end 2e-3 # Priority 1 - Ribasim.optimize_priority!( + Ribasim.optimize_demand_priority!( allocation_model, model.integrator.u, p, @@ -434,7 +443,7 @@ end @test flow_demand.demand[1] ≈ flow_demand.demand_itp[1](t) - 0.001 rtol = 1e-3 ## Priority 2 - Ribasim.optimize_priority!( + Ribasim.optimize_demand_priority!( allocation_model, model.integrator.u, p, @@ -448,7 +457,7 @@ end @test JuMP.value(only(F_flow_buffer_in)) ≈ only(flow_demand.demand) atol = 1e-10 ## Priority 3 - Ribasim.optimize_priority!( + Ribasim.optimize_demand_priority!( allocation_model, model.integrator.u, p, @@ -456,7 +465,7 @@ end 3, optimization_type, ) - # The flow from the source is used up in previous priorities + # The flow from the source is used up in previous demand priorities @test flow[(NodeID(:LevelBoundary, 1, p), node_id_with_flow_demand)] ≈ 0 atol = 1e-10 # So flow from the flow buffer is used for UserDemand #4 @test JuMP.value(F_flow_buffer_out[node_id_with_flow_demand]) ≈ 0.001 rtol = 1e-3 @@ -467,7 +476,7 @@ end 1e-10 ## Priority 4 - Ribasim.optimize_priority!( + Ribasim.optimize_demand_priority!( allocation_model, model.integrator.u, p, @@ -494,7 +503,7 @@ end :subnetwork_id, :node_type, :node_id, - :priority, + :demand_priority, :demand, :allocated, :realized, @@ -510,7 +519,7 @@ end :to_node_type, :to_node_id, :subnetwork_id, - :priority, + :demand_priority, :flow_rate, :optimization_type, ), @@ -576,12 +585,16 @@ end (; p) = model.integrator t = 0.0 u = model.integrator.u - priority_idx = 2 + demand_priority_idx = 2 allocation_model = first(p.allocation.allocation_models) Ribasim.set_initial_values!(allocation_model, u, p, t) - Ribasim.set_objective_priority!(allocation_model, u, p, t, priority_idx) - Ribasim.allocate_to_users_from_connected_basin!(allocation_model, p, priority_idx) + Ribasim.set_objective_demand_priority!(allocation_model, u, p, t, demand_priority_idx) + Ribasim.allocate_to_users_from_connected_basin!( + allocation_model, + p, + demand_priority_idx, + ) flow_data = allocation_model.flow.data @test flow_data[(NodeID(:FlowBoundary, 1, p), NodeID(:Basin, 2, p))] == 0.0 @test flow_data[(NodeID(:Basin, 2, p), NodeID(:UserDemand, 3, p))] == 0.0015 diff --git a/core/test/validation_test.jl b/core/test/validation_test.jl index 295d59542..705c3abe5 100644 --- a/core/test/validation_test.jl +++ b/core/test/validation_test.jl @@ -331,14 +331,14 @@ end with_logger(logger) do node_id = [NodeID(:UserDemand, 1, 1)] demand_itp = [[LinearInterpolation([-5.0, -5.0], [-1.8, 1.8])]] - priorities = Int32[1] - @test !valid_demand(node_id, demand_itp, priorities) + demand_priorities = Int32[1] + @test !valid_demand(node_id, demand_itp, demand_priorities) end @test length(logger.logs) == 1 @test logger.logs[1].level == Error @test logger.logs[1].message == - "Demand of UserDemand #1 with priority 1 should be non-negative" + "Demand of UserDemand #1 with demand_priority 1 should be non-negative" end @testitem "negative storage" begin @@ -438,7 +438,7 @@ end @test occursin("Pump #52 = ", output) end -@testitem "Missing priority when allocation is active" begin +@testitem "Missing demand priority when allocation is active" begin using Ribasim using Logging using IOCapture: capture @@ -454,11 +454,11 @@ end @test length(logger.logs) == 3 @test logger.logs[1].level == Error @test logger.logs[1].message == - "Missing priority parameter(s) for a UserDemand / static node in the allocation problem." + "Missing demand_priority parameter(s) for a UserDemand / static node in the allocation problem." @test logger.logs[2].message == - "Missing priority parameter(s) for a LevelDemand / static node in the allocation problem." + "Missing demand_priority parameter(s) for a LevelDemand / static node in the allocation problem." @test logger.logs[3].message == - "Missing priority parameter(s) for a FlowDemand / static node in the allocation problem." + "Missing demand_priority parameter(s) for a FlowDemand / static node in the allocation problem." end @testitem "Node ID not in Node table" begin diff --git a/python/ribasim/ribasim/config.py b/python/ribasim/ribasim/config.py index d86bf8ee6..30eecc0dd 100644 --- a/python/ribasim/ribasim/config.py +++ b/python/ribasim/ribasim/config.py @@ -343,11 +343,11 @@ class TabulatedRatingCurve(MultiNodeModel): class UserDemand(MultiNodeModel): static: TableModel[UserDemandStaticSchema] = Field( default_factory=TableModel[UserDemandStaticSchema], - json_schema_extra={"sort_keys": ["node_id", "priority"]}, + json_schema_extra={"sort_keys": ["node_id", "demand_priority"]}, ) time: TableModel[UserDemandTimeSchema] = Field( default_factory=TableModel[UserDemandTimeSchema], - json_schema_extra={"sort_keys": ["node_id", "priority", "time"]}, + json_schema_extra={"sort_keys": ["node_id", "demand_priority", "time"]}, ) concentration: TableModel[UserDemandConcentrationSchema] = Field( default_factory=TableModel[UserDemandConcentrationSchema], @@ -358,11 +358,11 @@ class UserDemand(MultiNodeModel): class LevelDemand(MultiNodeModel): static: TableModel[LevelDemandStaticSchema] = Field( default_factory=TableModel[LevelDemandStaticSchema], - json_schema_extra={"sort_keys": ["node_id", "priority"]}, + json_schema_extra={"sort_keys": ["node_id", "demand_priority"]}, ) time: TableModel[LevelDemandTimeSchema] = Field( default_factory=TableModel[LevelDemandTimeSchema], - json_schema_extra={"sort_keys": ["node_id", "priority", "time"]}, + json_schema_extra={"sort_keys": ["node_id", "demand_priority", "time"]}, ) @@ -384,11 +384,11 @@ class FlowBoundary(MultiNodeModel): class FlowDemand(MultiNodeModel): static: TableModel[FlowDemandStaticSchema] = Field( default_factory=TableModel[FlowDemandStaticSchema], - json_schema_extra={"sort_keys": ["node_id", "priority"]}, + json_schema_extra={"sort_keys": ["node_id", "demand_priority"]}, ) time: TableModel[FlowDemandTimeSchema] = Field( default_factory=TableModel[FlowDemandTimeSchema], - json_schema_extra={"sort_keys": ["node_id", "priority", "time"]}, + json_schema_extra={"sort_keys": ["node_id", "demand_priority", "time"]}, ) diff --git a/python/ribasim/ribasim/model.py b/python/ribasim/ribasim/model.py index 548368244..d7eb1fdcb 100644 --- a/python/ribasim/ribasim/model.py +++ b/python/ribasim/ribasim/model.py @@ -701,7 +701,13 @@ def _add_allocation(self, uds): alloc_flow_df = pd.read_feather( alloc_flow_path, - columns=["time", "edge_id", "flow_rate", "optimization_type", "priority"], + columns=[ + "time", + "edge_id", + "flow_rate", + "optimization_type", + "demand_priority", + ], dtype_backend="pyarrow", ) _time_in_ns(alloc_flow_df) @@ -711,7 +717,7 @@ def _add_allocation(self, uds): edge_lookup = _edge_lookup(uds) alloc_flow_df[edge_dim] = edge_lookup[alloc_flow_df["edge_id"]].to_numpy() - # "flow_rate_allocated" is the sum of all allocated flow rates over the priorities + # "flow_rate_allocated" is the sum of all allocated flow rates over the demand priorities allocate_df = alloc_flow_df.loc[ alloc_flow_df["optimization_type"] == "allocate" ] @@ -719,12 +725,12 @@ def _add_allocation(self, uds): allocate_df.groupby(["time", edge_dim])["flow_rate"].sum().to_xarray() ) - # also add the individual priorities and optimization types + # also add the individual demand priorities and optimization types # added as separate variables to ensure QGIS / MDAL compatibility - for (optimization_type, priority), group in alloc_flow_df.groupby( - ["optimization_type", "priority"] + for (optimization_type, demand_priority), group in alloc_flow_df.groupby( + ["optimization_type", "demand_priority"] ): - varname = f"{optimization_type}_priority_{priority}" + varname = f"{optimization_type}_priority_{demand_priority}" da = group.set_index(["time", edge_dim])["flow_rate"].to_xarray() uds[varname] = da diff --git a/python/ribasim/ribasim/schemas.py b/python/ribasim/ribasim/schemas.py index bb00752c2..03ad91fed 100644 --- a/python/ribasim/ribasim/schemas.py +++ b/python/ribasim/ribasim/schemas.py @@ -34,17 +34,37 @@ def migrate(cls, df: Any, schema_version: int) -> Any: return f(df, schema_version) +class AllocationSourceOrderSchema(_BaseSchema): + fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + + subnetwork_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( + nullable=False + ) + + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( + nullable=False, default=0 + ) + + source_priority: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( + nullable=False + ) + + class BasinConcentrationExternalSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + time: Series[Annotated[pd.ArrowDtype, pyarrow.timestamp("ms")]] = pa.Field( nullable=False ) + substance: Series[Annotated[pd.ArrowDtype, pyarrow.string()]] = pa.Field( nullable=False ) + concentration: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) @@ -52,12 +72,15 @@ class BasinConcentrationExternalSchema(_BaseSchema): class BasinConcentrationStateSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + substance: Series[Annotated[pd.ArrowDtype, pyarrow.string()]] = pa.Field( nullable=False ) + concentration: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) @@ -65,18 +88,23 @@ class BasinConcentrationStateSchema(_BaseSchema): class BasinConcentrationSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + time: Series[Annotated[pd.ArrowDtype, pyarrow.timestamp("ms")]] = pa.Field( nullable=False ) + substance: Series[Annotated[pd.ArrowDtype, pyarrow.string()]] = pa.Field( nullable=False ) + drainage: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) + precipitation: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) @@ -84,10 +112,13 @@ class BasinConcentrationSchema(_BaseSchema): class BasinProfileSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + area: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field(nullable=False) + level: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) @@ -95,9 +126,11 @@ class BasinProfileSchema(_BaseSchema): class BasinStateSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + level: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) @@ -105,18 +138,23 @@ class BasinStateSchema(_BaseSchema): class BasinStaticSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + drainage: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) + potential_evaporation: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = ( pa.Field(nullable=True) ) + infiltration: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) + precipitation: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) @@ -124,18 +162,23 @@ class BasinStaticSchema(_BaseSchema): class BasinSubgridTimeSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + subgrid_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False ) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + time: Series[Annotated[pd.ArrowDtype, pyarrow.timestamp("ms")]] = pa.Field( nullable=False ) + basin_level: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + subgrid_level: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) @@ -143,15 +186,19 @@ class BasinSubgridTimeSchema(_BaseSchema): class BasinSubgridSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + subgrid_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False ) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + basin_level: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + subgrid_level: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) @@ -159,21 +206,27 @@ class BasinSubgridSchema(_BaseSchema): class BasinTimeSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + time: Series[Annotated[pd.ArrowDtype, pyarrow.timestamp("ms")]] = pa.Field( nullable=False ) + drainage: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) + potential_evaporation: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = ( pa.Field(nullable=True) ) + infiltration: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) + precipitation: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) @@ -181,15 +234,19 @@ class BasinTimeSchema(_BaseSchema): class ContinuousControlFunctionSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + input: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + output: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + controlled_variable: Series[Annotated[pd.ArrowDtype, pyarrow.string()]] = pa.Field( nullable=False ) @@ -197,18 +254,23 @@ class ContinuousControlFunctionSchema(_BaseSchema): class ContinuousControlVariableSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + listen_node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False ) + variable: Series[Annotated[pd.ArrowDtype, pyarrow.string()]] = pa.Field( nullable=False ) + weight: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) + look_ahead: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) @@ -216,12 +278,15 @@ class ContinuousControlVariableSchema(_BaseSchema): class DiscreteControlConditionSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + compound_variable_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False ) + greater_than: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) @@ -229,12 +294,15 @@ class DiscreteControlConditionSchema(_BaseSchema): class DiscreteControlLogicSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + truth_state: Series[Annotated[pd.ArrowDtype, pyarrow.string()]] = pa.Field( nullable=False ) + control_state: Series[Annotated[pd.ArrowDtype, pyarrow.string()]] = pa.Field( nullable=False ) @@ -242,21 +310,27 @@ class DiscreteControlLogicSchema(_BaseSchema): class DiscreteControlVariableSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + compound_variable_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False ) + listen_node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False ) + variable: Series[Annotated[pd.ArrowDtype, pyarrow.string()]] = pa.Field( nullable=False ) + weight: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) + look_ahead: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) @@ -264,15 +338,19 @@ class DiscreteControlVariableSchema(_BaseSchema): class FlowBoundaryConcentrationSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + time: Series[Annotated[pd.ArrowDtype, pyarrow.timestamp("ms")]] = pa.Field( nullable=False ) + substance: Series[Annotated[pd.ArrowDtype, pyarrow.string()]] = pa.Field( nullable=False ) + concentration: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) @@ -280,10 +358,13 @@ class FlowBoundaryConcentrationSchema(_BaseSchema): class FlowBoundaryStaticSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + active: Series[Annotated[pd.ArrowDtype, pyarrow.bool_()]] = pa.Field(nullable=True) + flow_rate: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) @@ -291,12 +372,15 @@ class FlowBoundaryStaticSchema(_BaseSchema): class FlowBoundaryTimeSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + time: Series[Annotated[pd.ArrowDtype, pyarrow.timestamp("ms")]] = pa.Field( nullable=False ) + flow_rate: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) @@ -304,44 +388,55 @@ class FlowBoundaryTimeSchema(_BaseSchema): class FlowDemandStaticSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + demand: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) - priority: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( + + demand_priority: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=True ) class FlowDemandTimeSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + time: Series[Annotated[pd.ArrowDtype, pyarrow.timestamp("ms")]] = pa.Field( nullable=False ) + demand: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) - priority: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( + + demand_priority: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=True ) class LevelBoundaryConcentrationSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + time: Series[Annotated[pd.ArrowDtype, pyarrow.timestamp("ms")]] = pa.Field( nullable=False ) + substance: Series[Annotated[pd.ArrowDtype, pyarrow.string()]] = pa.Field( nullable=False ) + concentration: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) @@ -349,10 +444,13 @@ class LevelBoundaryConcentrationSchema(_BaseSchema): class LevelBoundaryStaticSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + active: Series[Annotated[pd.ArrowDtype, pyarrow.bool_()]] = pa.Field(nullable=True) + level: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) @@ -360,12 +458,15 @@ class LevelBoundaryStaticSchema(_BaseSchema): class LevelBoundaryTimeSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + time: Series[Annotated[pd.ArrowDtype, pyarrow.timestamp("ms")]] = pa.Field( nullable=False ) + level: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) @@ -373,51 +474,65 @@ class LevelBoundaryTimeSchema(_BaseSchema): class LevelDemandStaticSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + min_level: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) + max_level: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) - priority: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( + + demand_priority: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=True ) class LevelDemandTimeSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + time: Series[Annotated[pd.ArrowDtype, pyarrow.timestamp("ms")]] = pa.Field( nullable=False ) + min_level: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) + max_level: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) - priority: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( + + demand_priority: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=True ) class LinearResistanceStaticSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + active: Series[Annotated[pd.ArrowDtype, pyarrow.bool_()]] = pa.Field(nullable=True) + resistance: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + max_flow_rate: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) + control_state: Series[Annotated[pd.ArrowDtype, pyarrow.string()]] = pa.Field( nullable=True ) @@ -425,22 +540,29 @@ class LinearResistanceStaticSchema(_BaseSchema): class ManningResistanceStaticSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + active: Series[Annotated[pd.ArrowDtype, pyarrow.bool_()]] = pa.Field(nullable=True) + length: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + manning_n: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + profile_width: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + profile_slope: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + control_state: Series[Annotated[pd.ArrowDtype, pyarrow.string()]] = pa.Field( nullable=True ) @@ -448,25 +570,33 @@ class ManningResistanceStaticSchema(_BaseSchema): class OutletStaticSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + active: Series[Annotated[pd.ArrowDtype, pyarrow.bool_()]] = pa.Field(nullable=True) + flow_rate: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + min_flow_rate: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) + max_flow_rate: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) + min_upstream_level: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) + max_downstream_level: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = ( pa.Field(nullable=True) ) + control_state: Series[Annotated[pd.ArrowDtype, pyarrow.string()]] = pa.Field( nullable=True ) @@ -474,25 +604,33 @@ class OutletStaticSchema(_BaseSchema): class PidControlStaticSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + active: Series[Annotated[pd.ArrowDtype, pyarrow.bool_()]] = pa.Field(nullable=True) + listen_node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False ) + target: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + proportional: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + integral: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + derivative: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + control_state: Series[Annotated[pd.ArrowDtype, pyarrow.string()]] = pa.Field( nullable=True ) @@ -500,27 +638,35 @@ class PidControlStaticSchema(_BaseSchema): class PidControlTimeSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + listen_node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False ) + time: Series[Annotated[pd.ArrowDtype, pyarrow.timestamp("ms")]] = pa.Field( nullable=False ) + target: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + proportional: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + integral: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + derivative: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + control_state: Series[Annotated[pd.ArrowDtype, pyarrow.string()]] = pa.Field( nullable=True ) @@ -528,25 +674,33 @@ class PidControlTimeSchema(_BaseSchema): class PumpStaticSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + active: Series[Annotated[pd.ArrowDtype, pyarrow.bool_()]] = pa.Field(nullable=True) + flow_rate: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + min_flow_rate: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) + max_flow_rate: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) + min_upstream_level: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) + max_downstream_level: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = ( pa.Field(nullable=True) ) + control_state: Series[Annotated[pd.ArrowDtype, pyarrow.string()]] = pa.Field( nullable=True ) @@ -554,19 +708,25 @@ class PumpStaticSchema(_BaseSchema): class TabulatedRatingCurveStaticSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + active: Series[Annotated[pd.ArrowDtype, pyarrow.bool_()]] = pa.Field(nullable=True) + level: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + flow_rate: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + max_downstream_level: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = ( pa.Field(nullable=True) ) + control_state: Series[Annotated[pd.ArrowDtype, pyarrow.string()]] = pa.Field( nullable=True ) @@ -574,18 +734,23 @@ class TabulatedRatingCurveStaticSchema(_BaseSchema): class TabulatedRatingCurveTimeSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + time: Series[Annotated[pd.ArrowDtype, pyarrow.timestamp("ms")]] = pa.Field( nullable=False ) + level: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + flow_rate: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + max_downstream_level: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = ( pa.Field(nullable=True) ) @@ -593,15 +758,19 @@ class TabulatedRatingCurveTimeSchema(_BaseSchema): class UserDemandConcentrationSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + time: Series[Annotated[pd.ArrowDtype, pyarrow.timestamp("ms")]] = pa.Field( nullable=False ) + substance: Series[Annotated[pd.ArrowDtype, pyarrow.string()]] = pa.Field( nullable=False ) + concentration: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) @@ -609,41 +778,53 @@ class UserDemandConcentrationSchema(_BaseSchema): class UserDemandStaticSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + active: Series[Annotated[pd.ArrowDtype, pyarrow.bool_()]] = pa.Field(nullable=True) + demand: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=True ) + return_factor: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + min_level: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) - priority: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( + + demand_priority: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=True ) class UserDemandTimeSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=False, default=0 ) + time: Series[Annotated[pd.ArrowDtype, pyarrow.timestamp("ms")]] = pa.Field( nullable=False ) + demand: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + return_factor: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) + min_level: Series[Annotated[pd.ArrowDtype, pyarrow.float64()]] = pa.Field( nullable=False ) - priority: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( + + demand_priority: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( nullable=True ) diff --git a/python/ribasim/tests/test_io.py b/python/ribasim/tests/test_io.py index 62bd01014..08d1d2233 100644 --- a/python/ribasim/tests/test_io.py +++ b/python/ribasim/tests/test_io.py @@ -122,7 +122,7 @@ def test_extra_spatial_columns(): node, [ user_demand.Static( - demand=[1e-4], return_factor=0.9, min_level=0.9, priority=1 + demand=[1e-4], return_factor=0.9, min_level=0.9, demand_priority=1 ) ], ) @@ -141,7 +141,7 @@ def test_extra_spatial_columns(): Node(4, Point(1, -0.5), meta_id=3), [ user_demand.Static( - demand=[1e-4], return_factor=0.9, min_level=0.9, priority=1 + demand=[1e-4], return_factor=0.9, min_level=0.9, demand_priority=1 ) ], ) @@ -190,7 +190,7 @@ def test_node_autoincrement(): Node(20, Point(1, 0.5)), [ user_demand.Static( - demand=[1e-4], return_factor=0.9, min_level=0.9, priority=1 + demand=[1e-4], return_factor=0.9, min_level=0.9, demand_priority=1 ) ], ) @@ -234,7 +234,7 @@ def test_node_empty_geometry(): Node(), [ user_demand.Static( - demand=[1e-4], return_factor=0.9, min_level=0.9, priority=1 + demand=[1e-4], return_factor=0.9, min_level=0.9, demand_priority=1 ) ], ) @@ -243,7 +243,7 @@ def test_node_empty_geometry(): Node(2), [ user_demand.Static( - demand=[1e-4], return_factor=0.9, min_level=0.9, priority=1 + demand=[1e-4], return_factor=0.9, min_level=0.9, demand_priority=1 ) ], ) @@ -381,16 +381,16 @@ def test_arrow_dtype(): # Optional integer column df = flow_demand.Static( demand=[1, 2.2], - priority=[1, pd.NA], + demand_priority=[1, pd.NA], ).df - assert df["priority"].dtype == "int32[pyarrow]" - assert df["priority"].isna().iloc[1] + assert df["demand_priority"].dtype == "int32[pyarrow]" + assert df["demand_priority"].isna().iloc[1] # Missing optional integer column df = flow_demand.Static( demand=[1, 2.2], ).df - assert df["priority"].dtype == "int32[pyarrow]" - assert df["priority"].isna().all() + assert df["demand_priority"].dtype == "int32[pyarrow]" + assert df["demand_priority"].isna().all() diff --git a/python/ribasim_testmodels/ribasim_testmodels/allocation.py b/python/ribasim_testmodels/ribasim_testmodels/allocation.py index 76025df74..bd978c898 100644 --- a/python/ribasim_testmodels/ribasim_testmodels/allocation.py +++ b/python/ribasim_testmodels/ribasim_testmodels/allocation.py @@ -38,7 +38,7 @@ def user_demand_model() -> Model: Node(2, Point(1, 0.5)), [ user_demand.Static( - demand=[1e-4], return_factor=0.9, min_level=0.9, priority=1 + demand=[1e-4], return_factor=0.9, min_level=0.9, demand_priority=1 ) ], ) @@ -55,7 +55,7 @@ def user_demand_model() -> Model: demand=[0.0, 3e-4, 3e-4, 0.0], return_factor=0.4, min_level=0.5, - priority=1, + demand_priority=1, ) ], ) @@ -72,7 +72,7 @@ def user_demand_model() -> Model: min_level=0.0, demand=[0.0, 1e-4, 2e-4, 0.0], return_factor=[0.0, 0.1, 0.2, 0.3], - priority=1, + demand_priority=1, ) ], ) @@ -133,7 +133,7 @@ def subnetwork_model() -> Model: Node(10, Point(2, 0), subnetwork_id=2), [ user_demand.Static( - demand=[4.0], return_factor=0.9, min_level=0.9, priority=2 + demand=[4.0], return_factor=0.9, min_level=0.9, demand_priority=2 ) ], ) @@ -141,7 +141,7 @@ def subnetwork_model() -> Model: Node(11, Point(3, 3), subnetwork_id=2), [ user_demand.Static( - demand=[5.0], return_factor=0.5, min_level=0.9, priority=1 + demand=[5.0], return_factor=0.5, min_level=0.9, demand_priority=1 ) ], ) @@ -149,7 +149,7 @@ def subnetwork_model() -> Model: Node(12, Point(0, 4), subnetwork_id=2), [ user_demand.Static( - demand=[3.0], return_factor=0.9, min_level=0.9, priority=2 + demand=[3.0], return_factor=0.9, min_level=0.9, demand_priority=2 ) ], ) @@ -200,7 +200,7 @@ def looped_subnetwork_model() -> Model: Node(1, Point(0, 0), subnetwork_id=2), [ user_demand.Static( - demand=[1e-3], return_factor=0.9, min_level=0.9, priority=2 + demand=[1e-3], return_factor=0.9, min_level=0.9, demand_priority=2 ) ], ) @@ -224,7 +224,7 @@ def looped_subnetwork_model() -> Model: Node(12, Point(-2, 4), subnetwork_id=2), [ user_demand.Static( - demand=[1e-3], return_factor=0.9, min_level=0.9, priority=1 + demand=[1e-3], return_factor=0.9, min_level=0.9, demand_priority=1 ) ], ) @@ -244,7 +244,7 @@ def looped_subnetwork_model() -> Model: Node(18, Point(-1, 6), subnetwork_id=2), [ user_demand.Static( - demand=[1e-3], return_factor=0.9, min_level=0.9, priority=3 + demand=[1e-3], return_factor=0.9, min_level=0.9, demand_priority=3 ) ], ) @@ -255,7 +255,7 @@ def looped_subnetwork_model() -> Model: Node(20, Point(2, 6), subnetwork_id=2), [ user_demand.Static( - demand=[1e-3], return_factor=0.9, min_level=0.9, priority=3 + demand=[1e-3], return_factor=0.9, min_level=0.9, demand_priority=3 ) ], ) @@ -266,7 +266,7 @@ def looped_subnetwork_model() -> Model: Node(24, Point(3, 3), subnetwork_id=2), [ user_demand.Static( - demand=[1e-3], return_factor=0.9, min_level=0.9, priority=2 + demand=[1e-3], return_factor=0.9, min_level=0.9, demand_priority=2 ) ], ) @@ -333,7 +333,7 @@ def minimal_subnetwork_model() -> Model: Node(5, Point(-1, 4), subnetwork_id=2), [ user_demand.Static( - demand=[1e-3], return_factor=0.9, min_level=0.9, priority=1 + demand=[1e-3], return_factor=0.9, min_level=0.9, demand_priority=1 ) ], ) @@ -345,7 +345,7 @@ def minimal_subnetwork_model() -> Model: demand=[1e-3, 2e-3], return_factor=0.9, min_level=0.9, - priority=1, + demand_priority=1, ) ], ) @@ -384,7 +384,7 @@ def allocation_example_model() -> Model: Node(3, Point(1, 1), subnetwork_id=2), [ user_demand.Static( - demand=[1.5], return_factor=0.0, min_level=-1.0, priority=1 + demand=[1.5], return_factor=0.0, min_level=-1.0, demand_priority=1 ) ], ) @@ -397,7 +397,7 @@ def allocation_example_model() -> Model: Node(6, Point(3, 1), subnetwork_id=2), [ user_demand.Static( - demand=[1.0], return_factor=0.0, min_level=-1.0, priority=3 + demand=[1.0], return_factor=0.0, min_level=-1.0, demand_priority=3 ) ], ) @@ -492,7 +492,7 @@ def main_network_with_subnetworks_model() -> Model: Node(20, Point(2, 3), subnetwork_id=3), [ user_demand.Static( - demand=[4.0], return_factor=0.9, min_level=0.9, priority=2 + demand=[4.0], return_factor=0.9, min_level=0.9, demand_priority=2 ) ], ) @@ -500,7 +500,7 @@ def main_network_with_subnetworks_model() -> Model: Node(21, Point(3, 6), subnetwork_id=3), [ user_demand.Static( - demand=[5.0], return_factor=0.9, min_level=0.9, priority=1 + demand=[5.0], return_factor=0.9, min_level=0.9, demand_priority=1 ) ], ) @@ -508,7 +508,7 @@ def main_network_with_subnetworks_model() -> Model: Node(22, Point(0, 7), subnetwork_id=3), [ user_demand.Static( - demand=[3.0], return_factor=0.9, min_level=0.9, priority=2 + demand=[3.0], return_factor=0.9, min_level=0.9, demand_priority=2 ) ], ) @@ -534,7 +534,7 @@ def main_network_with_subnetworks_model() -> Model: demand=[1e-3, 2e-3], return_factor=0.9, min_level=0.9, - priority=1, + demand_priority=1, ) ], ) @@ -542,7 +542,7 @@ def main_network_with_subnetworks_model() -> Model: Node(34, Point(26, 3), subnetwork_id=7), [ user_demand.Static( - demand=[1e-3], return_factor=0.9, min_level=0.9, priority=2 + demand=[1e-3], return_factor=0.9, min_level=0.9, demand_priority=2 ) ], ) @@ -575,7 +575,7 @@ def main_network_with_subnetworks_model() -> Model: Node(45, Point(24, 7), subnetwork_id=7), [ user_demand.Static( - demand=[1e-3], return_factor=0.9, min_level=0.9, priority=1 + demand=[1e-3], return_factor=0.9, min_level=0.9, demand_priority=1 ) ], ) @@ -597,7 +597,7 @@ def main_network_with_subnetworks_model() -> Model: Node(51, Point(25, 9), subnetwork_id=7), [ user_demand.Static( - demand=[1e-3], return_factor=0.9, min_level=0.9, priority=3 + demand=[1e-3], return_factor=0.9, min_level=0.9, demand_priority=3 ) ], ) @@ -609,7 +609,7 @@ def main_network_with_subnetworks_model() -> Model: Node(53, Point(28, 9), subnetwork_id=7), [ user_demand.Static( - demand=[1e-3], return_factor=0.9, min_level=0.9, priority=3 + demand=[1e-3], return_factor=0.9, min_level=0.9, demand_priority=3 ) ], ) @@ -623,7 +623,7 @@ def main_network_with_subnetworks_model() -> Model: Node(57, Point(29, 6), subnetwork_id=7), [ user_demand.Static( - demand=[1e-3], return_factor=0.9, min_level=0.9, priority=2 + demand=[1e-3], return_factor=0.9, min_level=0.9, demand_priority=2 ) ], ) @@ -631,7 +631,7 @@ def main_network_with_subnetworks_model() -> Model: # Missing demand model.user_demand.add( Node(60, Point(21, -1), subnetwork_id=1), - [user_demand.Static(return_factor=[0.9], priority=2, min_level=0.0)], + [user_demand.Static(return_factor=[0.9], demand_priority=2, min_level=0.0)], ) model.edge.add(model.flow_boundary[1], model.basin[2]) @@ -747,13 +747,13 @@ def level_demand_model() -> Model: Node(3, Point(2, 0), subnetwork_id=2), [ user_demand.Static( - demand=[1.5e-3], return_factor=0.2, min_level=0.2, priority=2 + demand=[1.5e-3], return_factor=0.2, min_level=0.2, demand_priority=2 ) ], ) model.level_demand.add( Node(4, Point(1, -1), subnetwork_id=2), - [level_demand.Static(min_level=[1.0], max_level=1.5, priority=1)], + [level_demand.Static(min_level=[1.0], max_level=1.5, demand_priority=1)], ) model.basin.add( Node(5, Point(2, -1), subnetwork_id=2), @@ -763,7 +763,7 @@ def level_demand_model() -> Model: # Isolated LevelDemand + Basin pair to test optional min_level model.level_demand.add( Node(6, Point(3, -1), subnetwork_id=3), - [level_demand.Static(max_level=[1.0], priority=1)], + [level_demand.Static(max_level=[1.0], demand_priority=1)], ) model.basin.add( Node(7, Point(3, 0), subnetwork_id=3), @@ -814,7 +814,7 @@ def flow_demand_model() -> Model: Node(4, Point(3, 0), subnetwork_id=2), [ user_demand.Static( - priority=[3], demand=1e-3, return_factor=1.0, min_level=0.2 + demand_priority=[3], demand=1e-3, return_factor=1.0, min_level=0.2 ) ], ) @@ -822,7 +822,7 @@ def flow_demand_model() -> Model: Node(6, Point(2, -1), subnetwork_id=2), [ user_demand.Static( - priority=[1], demand=1e-3, return_factor=1.0, min_level=0.2 + demand_priority=[1], demand=1e-3, return_factor=1.0, min_level=0.2 ) ], ) @@ -830,14 +830,14 @@ def flow_demand_model() -> Model: Node(8, Point(3, -2), subnetwork_id=2), [ user_demand.Static( - priority=[4], demand=2e-3, return_factor=1.0, min_level=0.2 + demand_priority=[4], demand=2e-3, return_factor=1.0, min_level=0.2 ) ], ) model.flow_demand.add( Node(5, Point(1, -1), subnetwork_id=2), - [flow_demand.Static(demand=2e-3, priority=[2])], + [flow_demand.Static(demand=2e-3, demand_priority=[2])], ) model.edge.add( @@ -882,7 +882,7 @@ def linear_resistance_demand_model(): model.flow_demand.add( Node(4, Point(1, 1), subnetwork_id=2), - [flow_demand.Static(priority=[1], demand=2.0)], + [flow_demand.Static(demand_priority=[1], demand=2.0)], ) model.edge.add(model.basin[1], model.linear_resistance[2]) @@ -939,7 +939,7 @@ def fair_distribution_model(): Node(6, Point(2, 1), subnetwork_id=1), [ user_demand.Static( - priority=[1], demand=1.0, return_factor=1.0, min_level=0.2 + demand_priority=[1], demand=1.0, return_factor=1.0, min_level=0.2 ) ], ) @@ -948,7 +948,7 @@ def fair_distribution_model(): Node(7, Point(2, -1), subnetwork_id=1), [ user_demand.Static( - priority=[1], demand=2.0, return_factor=1.0, min_level=0.2 + demand_priority=[1], demand=2.0, return_factor=1.0, min_level=0.2 ) ], ) @@ -957,7 +957,7 @@ def fair_distribution_model(): Node(8, Point(4, 1), subnetwork_id=1), [ user_demand.Static( - priority=[1], demand=3.0, return_factor=1.0, min_level=0.2 + demand_priority=[1], demand=3.0, return_factor=1.0, min_level=0.2 ) ], ) @@ -966,7 +966,7 @@ def fair_distribution_model(): Node(9, Point(4, -1), subnetwork_id=1), [ user_demand.Time( - priority=1, + demand_priority=1, time=pd.date_range(start="2020-01", end="2021-01", freq="MS"), demand=np.linspace(1.0, 5.0, 13), return_factor=1.0, @@ -1057,7 +1057,7 @@ def allocation_training_model(): level_demand.Static( min_level=[2], max_level=5, - priority=1, + demand_priority=1, ) ], ) @@ -1089,7 +1089,7 @@ def allocation_training_model(): demand=[0.0, 0.0, 10, 12, 12, 0.0], return_factor=0, min_level=0, - priority=3, + demand_priority=3, time=[ "2022-01-01", "2022-03-31", @@ -1130,7 +1130,7 @@ def allocation_training_model(): demand=[2.0, 2.3, 2.3, 2.4, 3, 3, 4, 3, 2.5, 2.2, 2.0, 2.0], return_factor=0.4, min_level=0, - priority=2, + demand_priority=2, time=pd.date_range(start="2022-01-01", periods=12, freq="MS"), ) ], @@ -1145,7 +1145,7 @@ def allocation_training_model(): demand=[4, 4, 4.5, 5, 5, 6, 7.5, 8, 5, 4, 3, 2.0], return_factor=0.5, min_level=0, - priority=1, + demand_priority=1, time=pd.date_range(start="2022-01-01", periods=12, freq="MS"), ) ], diff --git a/python/ribasim_testmodels/ribasim_testmodels/invalid.py b/python/ribasim_testmodels/ribasim_testmodels/invalid.py index 1e0657f6e..8ab55af6b 100644 --- a/python/ribasim_testmodels/ribasim_testmodels/invalid.py +++ b/python/ribasim_testmodels/ribasim_testmodels/invalid.py @@ -172,7 +172,7 @@ def invalid_unstable_model() -> Model: def invalid_priorities_model() -> Model: - """Model with allocation active but missing priority parameter(s).""" + """Model with allocation active but missing demand_priority parameter(s).""" model = Model( starttime="2020-01-01 00:00:00", endtime="2021-01-01 00:00:00", diff --git a/ribasim_qgis/core/nodes.py b/ribasim_qgis/core/nodes.py index 058f96328..1cfeb89aa 100644 --- a/ribasim_qgis/core/nodes.py +++ b/ribasim_qgis/core/nodes.py @@ -882,7 +882,7 @@ def attributes(cls) -> list[QgsField]: QgsField("active", QVariant.Bool), QgsField("demand", QVariant.Double), QgsField("return_factor", QVariant.Double), - QgsField("priority", QVariant.Int), + QgsField("demand_priority", QVariant.Int), ] @@ -902,7 +902,7 @@ def attributes(cls) -> list[QgsField]: QgsField("time", QVariant.DateTime), QgsField("demand", QVariant.Double), QgsField("return_factor", QVariant.Double), - QgsField("priority", QVariant.Int), + QgsField("demand_priority", QVariant.Int), ] @@ -940,7 +940,7 @@ def attributes(cls) -> list[QgsField]: QgsField("node_id", QVariant.Int), QgsField("min_level", QVariant.Double), QgsField("max_level", QVariant.Double), - QgsField("priority", QVariant.Int), + QgsField("demand_priority", QVariant.Int), ] @@ -960,7 +960,7 @@ def attributes(cls) -> list[QgsField]: QgsField("time", QVariant.DateTime), QgsField("min_level", QVariant.Double), QgsField("max_level", QVariant.Double), - QgsField("priority", QVariant.Int), + QgsField("demand_priority", QVariant.Int), ] @@ -978,7 +978,7 @@ def attributes(cls) -> list[QgsField]: return [ QgsField("node_id", QVariant.Int), QgsField("demand", QVariant.Double), - QgsField("priority", QVariant.Int), + QgsField("demand_priority", QVariant.Int), ] @@ -997,7 +997,7 @@ def attributes(cls) -> list[QgsField]: QgsField("node_id", QVariant.Int), QgsField("time", QVariant.DateTime), QgsField("demand", QVariant.Double), - QgsField("priority", QVariant.Int), + QgsField("demand_priority", QVariant.Int), ] From 0cca9cbdc99dffeecb20e364d9b9451c1e82965d Mon Sep 17 00:00:00 2001 From: Bart de Koning Date: Wed, 29 Jan 2025 13:33:29 +0100 Subject: [PATCH 3/8] Introduce `AllocationSourcePriority` table --- core/src/schema.jl | 9 ------- python/ribasim/ribasim/__init__.py | 11 +++++++- .../ribasim/allocation/source_priorities.py | 26 +++++++++++++++++++ python/ribasim/ribasim/config.py | 2 +- python/ribasim/ribasim/input_base.py | 17 +++++------- python/ribasim/ribasim/model.py | 5 ++++ python/ribasim/ribasim/schemas.py | 16 ------------ utils/gen_python.jl | 2 +- 8 files changed, 50 insertions(+), 38 deletions(-) create mode 100644 python/ribasim/ribasim/allocation/source_priorities.py diff --git a/core/src/schema.jl b/core/src/schema.jl index d7af5a880..03a0cff2e 100644 --- a/core/src/schema.jl +++ b/core/src/schema.jl @@ -36,9 +36,6 @@ @schema "ribasim.userdemand.static" UserDemandStatic @schema "ribasim.userdemand.time" UserDemandTime -# This schema is not specific to a node type -@schema "ribasim.allocationsourceorder" AllocationSourceOrder - const delimiter = " / " tablename(sv::Type{SchemaVersion{T, N}}) where {T, N} = tablename(sv()) tablename(sv::SchemaVersion{T, N}) where {T, N} = @@ -79,12 +76,6 @@ function nodetype( return Symbol(node[begin:length(n)]), k end -@version AllocationSourceOrderV1 begin - subnetwork_id::Int32 - node_id::Int32 - source_priority::Int32 -end - @version PumpStaticV1 begin node_id::Int32 active::Union{Missing, Bool} diff --git a/python/ribasim/ribasim/__init__.py b/python/ribasim/ribasim/__init__.py index 9f3c41138..73ed7f9b1 100644 --- a/python/ribasim/ribasim/__init__.py +++ b/python/ribasim/ribasim/__init__.py @@ -2,8 +2,17 @@ # Keep synced write_schema_version in ribasim_qgis/core/geopackage.py __schema_version__ = 3 +from ribasim.allocation.source_priorities import AllocationSourcePriorityTable from ribasim.config import Allocation, Logging, Node, Solver from ribasim.geometry.edge import EdgeTable from ribasim.model import Model -__all__ = ["EdgeTable", "Allocation", "Logging", "Model", "Solver", "Node"] +__all__ = [ + "AllocationSourcePriorityTable", + "EdgeTable", + "Allocation", + "Logging", + "Model", + "Solver", + "Node", +] diff --git a/python/ribasim/ribasim/allocation/source_priorities.py b/python/ribasim/ribasim/allocation/source_priorities.py new file mode 100644 index 000000000..8d5ff87a2 --- /dev/null +++ b/python/ribasim/ribasim/allocation/source_priorities.py @@ -0,0 +1,26 @@ +from typing import Annotated + +import pandas as pd +import pandera as pa +import pyarrow +from pandera.dtypes import Int32 +from pandera.typing import Index, Series +from ribasim.input_base import TableModel +from ribasim.schemas import _BaseSchema + + +class AllocationSourcePrioritySchema(_BaseSchema): + fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) + subnetwork_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( + nullable=False + ) + node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( + nullable=False, default=0 + ) + source_priority: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( + nullable=False + ) + + +class AllocationSourcePriorityTable(TableModel[AllocationSourcePrioritySchema]): + pass diff --git a/python/ribasim/ribasim/config.py b/python/ribasim/ribasim/config.py index 30eecc0dd..b207aa25b 100644 --- a/python/ribasim/ribasim/config.py +++ b/python/ribasim/ribasim/config.py @@ -233,7 +233,7 @@ def add( Raises ------ ValueError - When the given node ID already exists for this node type + When the given node ID already exists """ if tables is None: tables = [] diff --git a/python/ribasim/ribasim/input_base.py b/python/ribasim/ribasim/input_base.py index 8d8d1c259..10bc754d4 100644 --- a/python/ribasim/ribasim/input_base.py +++ b/python/ribasim/ribasim/input_base.py @@ -197,16 +197,13 @@ def tablename(cls) -> str: cls_string = str(cls.tableschema()) names: list[str] = re.sub("([A-Z]+)", r" \1", cls_string).split()[:-1] names_lowered = [name.lower() for name in names] - if len(names) == 1: - return names[0] - else: - for n in range(1, len(names_lowered) + 1): - node_name_snake_case = "_".join(names_lowered[:n]) - if node_name_snake_case in node_names_snake_case: - node_name = "".join(names[:n]) - table_name = "_".join(names_lowered[n:]) - return node_name + delimiter + table_name - raise ValueError(f"Found no known node name in {cls_string}") + for n in range(1, len(names_lowered) + 1): + node_name_snake_case = "_".join(names_lowered[:n]) + if node_name_snake_case in node_names_snake_case: + node_name = "".join(names[:n]) + table_name = "_".join(names_lowered[n:]) + return node_name + delimiter + table_name + return names[0] @model_validator(mode="before") @classmethod diff --git a/python/ribasim/ribasim/model.py b/python/ribasim/ribasim/model.py index d7eb1fdcb..ecba27596 100644 --- a/python/ribasim/ribasim/model.py +++ b/python/ribasim/ribasim/model.py @@ -21,6 +21,7 @@ ) import ribasim +from ribasim.allocation.source_priorities import AllocationSourcePriorityTable from ribasim.config import ( Allocation, Basin, @@ -107,6 +108,10 @@ class Model(FileModel): terminal: Terminal = Field(default_factory=Terminal) user_demand: UserDemand = Field(default_factory=UserDemand) + allocation_source_priority_table: AllocationSourcePriorityTable = Field( + default_factory=AllocationSourcePriorityTable + ) + edge: EdgeTable = Field(default_factory=EdgeTable) use_validation: bool = Field(default=True, exclude=True) diff --git a/python/ribasim/ribasim/schemas.py b/python/ribasim/ribasim/schemas.py index 03ad91fed..71bacbabf 100644 --- a/python/ribasim/ribasim/schemas.py +++ b/python/ribasim/ribasim/schemas.py @@ -34,22 +34,6 @@ def migrate(cls, df: Any, schema_version: int) -> Any: return f(df, schema_version) -class AllocationSourceOrderSchema(_BaseSchema): - fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) - - subnetwork_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( - nullable=False - ) - - node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( - nullable=False, default=0 - ) - - source_priority: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( - nullable=False - ) - - class BasinConcentrationExternalSchema(_BaseSchema): fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) diff --git a/utils/gen_python.jl b/utils/gen_python.jl index d102dd223..649663d61 100644 --- a/utils/gen_python.jl +++ b/utils/gen_python.jl @@ -14,7 +14,7 @@ pythontype(::Type{<:Bool}) = "Series[Annotated[pd.ArrowDtype, pyarrow.bool_()]]" pythontype(::Type{<:Enum}) = "Series[Annotated[pd.ArrowDtype, pyarrow.string()]]" pythontype(::Type{<:DateTime}) = "Series[Annotated[pd.ArrowDtype, pyarrow.timestamp('ms')]]" -isnullable(_) = "False" +isnullable(::Any) = "False" isnullable(::Type{T}) where {T >: Union{Missing}} = "True" function strip_prefix(T::DataType) From 40fb7e0ad194868951a2e8a3caa429636257a99c Mon Sep 17 00:00:00 2001 From: Bart de Koning Date: Wed, 29 Jan 2025 13:44:09 +0100 Subject: [PATCH 4/8] Some more priority specifications --- docs/guide/examples.ipynb | 12 ++++++------ docs/tutorial/irrigation-demand.ipynb | 2 +- docs/tutorial/reservoir.ipynb | 4 ++-- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/guide/examples.ipynb b/docs/guide/examples.ipynb index 3e09e5986..5bf44884b 100644 --- a/docs/guide/examples.ipynb +++ b/docs/guide/examples.ipynb @@ -1156,7 +1156,7 @@ " Node(6, Point(3.0, 1.0), subnetwork_id=1),\n", " [\n", " user_demand.Static(\n", - " demand=[1.5], return_factor=[0.0], min_level=[-1.0], priority=[1]\n", + " demand=[1.5], return_factor=[0.0], min_level=[-1.0], demand_priority=[1]\n", " )\n", " ],\n", ")\n", @@ -1167,7 +1167,7 @@ " demand=[0.0, 1.0, 1.2, 1.2],\n", " return_factor=[0.0, 0.0, 0.0, 0.0],\n", " min_level=[-1.0, -1.0, -1.0, -1.0],\n", - " priority=[1, 1, 2, 2],\n", + " demand_priority=[1, 1, 2, 2],\n", " time=2 * [\"2020-01-01\", \"2020-01-20\"],\n", " )\n", " ],\n", @@ -1288,7 +1288,7 @@ ")\n", "df_allocation_wide = df_allocation.pivot_table(\n", " index=\"time\",\n", - " columns=[\"node_type\", \"node_id\", \"priority\"],\n", + " columns=[\"node_type\", \"node_id\", \"demand_priority\"],\n", " values=[\"demand\", \"allocated\", \"realized\"],\n", ")\n", "df_allocation_wide = df_allocation_wide.loc[:, (df_allocation_wide != 0).any(axis=0)]\n", @@ -1297,7 +1297,7 @@ "\n", "df_allocation_wide[\"demand\"].plot(ax=axs[0], ls=\":\")\n", "df_allocation_wide[\"allocated\"].plot(ax=axs[1], ls=\"--\")\n", - "df_allocation_wide.xs(1, level=\"priority\", axis=1)[\"realized\"].plot(\n", + "df_allocation_wide.xs(1, level=\"demand_priority\", axis=1)[\"realized\"].plot(\n", " ax=axs[2], color=[\"C0\", \"C2\", \"C3\"]\n", ")\n", "\n", @@ -1443,7 +1443,7 @@ "source": [ "model.level_demand.add(\n", " Node(4, Point(1.0, -1.0), subnetwork_id=2),\n", - " [level_demand.Static(priority=[1], min_level=[1.0], max_level=[1.5])],\n", + " [level_demand.Static(demand_priority=[1], min_level=[1.0], max_level=[1.5])],\n", ")" ] }, @@ -1464,7 +1464,7 @@ " Node(3, Point(2.0, 0.0), subnetwork_id=2),\n", " [\n", " user_demand.Static(\n", - " priority=[2], demand=[1.5e-3], return_factor=[0.2], min_level=[0.2]\n", + " demand_priority=[2], demand=[1.5e-3], return_factor=[0.2], min_level=[0.2]\n", " )\n", " ],\n", ")" diff --git a/docs/tutorial/irrigation-demand.ipynb b/docs/tutorial/irrigation-demand.ipynb index 18010d1ae..b8f32fc5e 100644 --- a/docs/tutorial/irrigation-demand.ipynb +++ b/docs/tutorial/irrigation-demand.ipynb @@ -183,7 +183,7 @@ " demand=[0.0, 0.0, 10, 12, 12, 0.0],\n", " return_factor=0,\n", " min_level=0,\n", - " priority=1,\n", + " demand_priority=1,\n", " time=[\n", " starttime,\n", " \"2022-03-31\",\n", diff --git a/docs/tutorial/reservoir.ipynb b/docs/tutorial/reservoir.ipynb index 03c1673ed..98df431bf 100644 --- a/docs/tutorial/reservoir.ipynb +++ b/docs/tutorial/reservoir.ipynb @@ -124,7 +124,7 @@ " demand=[0.0, 0.0, 10, 12, 12, 0.0],\n", " return_factor=0,\n", " min_level=0,\n", - " priority=1,\n", + " demand_priority=1,\n", " time=[\n", " starttime,\n", " \"2022-03-31\",\n", @@ -197,7 +197,7 @@ " demand=[0.07, 0.08, 0.09, 0.10, 0.12, 0.14, 0.15, 0.14, 0.12, 0.10, 0.09, 0.08],\n", " return_factor=0.6,\n", " min_level=0,\n", - " priority=1,\n", + " demand_priority=1,\n", " time=pd.date_range(start=\"2022-01-01\", periods=12, freq=\"MS\"),\n", " )\n", " ],\n", From 88f4af6fe5cc3e02bcb72783f396dc262437940f Mon Sep 17 00:00:00 2001 From: Bart de Koning Date: Wed, 29 Jan 2025 14:40:34 +0100 Subject: [PATCH 5/8] Docs fix --- docs/concept/allocation.qmd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/concept/allocation.qmd b/docs/concept/allocation.qmd index 91318fd84..89aaeca88 100644 --- a/docs/concept/allocation.qmd +++ b/docs/concept/allocation.qmd @@ -252,7 +252,7 @@ allocation_model = p.allocation.allocation_models[1] priority_idx = 1 Ribasim.set_initial_values!(allocation_model, u, p, t) -Ribasim.set_objective_priority!(allocation_model, u, p, t, priority_idx) +Ribasim.set_objective_demand_priority!(allocation_model, u, p, t, priority_idx) println(p.allocation.allocation_models[1].problem) ``` From 054e977dba8f2756ca0cd16d61b9e13315f5c19a Mon Sep 17 00:00:00 2001 From: Bart de Koning Date: Wed, 29 Jan 2025 15:21:17 +0100 Subject: [PATCH 6/8] Move source_priority to Node table, make source priority defaults a subsection of the allocation config --- core/src/config.jl | 14 ++++++---- python/ribasim/ribasim/__init__.py | 2 -- .../ribasim/allocation/source_priorities.py | 26 ------------------- python/ribasim/ribasim/geometry/node.py | 3 +++ python/ribasim/ribasim/input_base.py | 17 +++++++----- python/ribasim/ribasim/model.py | 5 ---- 6 files changed, 22 insertions(+), 45 deletions(-) delete mode 100644 python/ribasim/ribasim/allocation/source_priorities.py diff --git a/core/src/config.jl b/core/src/config.jl index a790d996f..94227803c 100644 --- a/core/src/config.jl +++ b/core/src/config.jl @@ -128,14 +128,18 @@ end verbosity::LogLevel = Info end +@option struct DefaultSourcePriority <: TableOption + user_demand::Int32 = 1000 + boundary::Int32 = 2000 # boundary = {flow_boundary, level_boundary} + level_demand::Int32 = 3000 + flow_demand::Int32 = 4000 + subnetwork_inlet::Int32 = 5000 +end + @option struct Allocation <: TableOption timestep::Float64 = 86400 use_allocation::Bool = false - default_source_priority_user_demand::Int = 1000 - default_source_priority_boundary::Int = 2000 - default_source_priority_level_demand::Int = 3000 - default_source_priority_flow_demand::Int = 4000 - default_source_priority_subnetwork_inlet::Int = 5000 + default_source_priority::DefaultSourcePriority = DefaultSourcePriority() end @option struct Experimental <: TableOption diff --git a/python/ribasim/ribasim/__init__.py b/python/ribasim/ribasim/__init__.py index 73ed7f9b1..84191897e 100644 --- a/python/ribasim/ribasim/__init__.py +++ b/python/ribasim/ribasim/__init__.py @@ -2,13 +2,11 @@ # Keep synced write_schema_version in ribasim_qgis/core/geopackage.py __schema_version__ = 3 -from ribasim.allocation.source_priorities import AllocationSourcePriorityTable from ribasim.config import Allocation, Logging, Node, Solver from ribasim.geometry.edge import EdgeTable from ribasim.model import Model __all__ = [ - "AllocationSourcePriorityTable", "EdgeTable", "Allocation", "Logging", diff --git a/python/ribasim/ribasim/allocation/source_priorities.py b/python/ribasim/ribasim/allocation/source_priorities.py deleted file mode 100644 index 8d5ff87a2..000000000 --- a/python/ribasim/ribasim/allocation/source_priorities.py +++ /dev/null @@ -1,26 +0,0 @@ -from typing import Annotated - -import pandas as pd -import pandera as pa -import pyarrow -from pandera.dtypes import Int32 -from pandera.typing import Index, Series -from ribasim.input_base import TableModel -from ribasim.schemas import _BaseSchema - - -class AllocationSourcePrioritySchema(_BaseSchema): - fid: Index[Int32] = pa.Field(default=1, check_name=True, coerce=True) - subnetwork_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( - nullable=False - ) - node_id: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( - nullable=False, default=0 - ) - source_priority: Series[Annotated[pd.ArrowDtype, pyarrow.int32()]] = pa.Field( - nullable=False - ) - - -class AllocationSourcePriorityTable(TableModel[AllocationSourcePrioritySchema]): - pass diff --git a/python/ribasim/ribasim/geometry/node.py b/python/ribasim/ribasim/geometry/node.py index 691dc4d19..3b2eac19e 100644 --- a/python/ribasim/ribasim/geometry/node.py +++ b/python/ribasim/ribasim/geometry/node.py @@ -25,6 +25,9 @@ class NodeSchema(_GeoBaseSchema): subnetwork_id: Series[pd.Int32Dtype] = pa.Field( default=pd.NA, nullable=True, coerce=True ) + source_priority: Series[pd.Int32Dtype] = pa.Field( + default=pd.NA, nullable=True, coerce=True + ) geometry: GeoSeries[Point] = pa.Field(default=None, nullable=True) @classmethod diff --git a/python/ribasim/ribasim/input_base.py b/python/ribasim/ribasim/input_base.py index 10bc754d4..8d8d1c259 100644 --- a/python/ribasim/ribasim/input_base.py +++ b/python/ribasim/ribasim/input_base.py @@ -197,13 +197,16 @@ def tablename(cls) -> str: cls_string = str(cls.tableschema()) names: list[str] = re.sub("([A-Z]+)", r" \1", cls_string).split()[:-1] names_lowered = [name.lower() for name in names] - for n in range(1, len(names_lowered) + 1): - node_name_snake_case = "_".join(names_lowered[:n]) - if node_name_snake_case in node_names_snake_case: - node_name = "".join(names[:n]) - table_name = "_".join(names_lowered[n:]) - return node_name + delimiter + table_name - return names[0] + if len(names) == 1: + return names[0] + else: + for n in range(1, len(names_lowered) + 1): + node_name_snake_case = "_".join(names_lowered[:n]) + if node_name_snake_case in node_names_snake_case: + node_name = "".join(names[:n]) + table_name = "_".join(names_lowered[n:]) + return node_name + delimiter + table_name + raise ValueError(f"Found no known node name in {cls_string}") @model_validator(mode="before") @classmethod diff --git a/python/ribasim/ribasim/model.py b/python/ribasim/ribasim/model.py index ecba27596..d7eb1fdcb 100644 --- a/python/ribasim/ribasim/model.py +++ b/python/ribasim/ribasim/model.py @@ -21,7 +21,6 @@ ) import ribasim -from ribasim.allocation.source_priorities import AllocationSourcePriorityTable from ribasim.config import ( Allocation, Basin, @@ -108,10 +107,6 @@ class Model(FileModel): terminal: Terminal = Field(default_factory=Terminal) user_demand: UserDemand = Field(default_factory=UserDemand) - allocation_source_priority_table: AllocationSourcePriorityTable = Field( - default_factory=AllocationSourcePriorityTable - ) - edge: EdgeTable = Field(default_factory=EdgeTable) use_validation: bool = Field(default=True, exclude=True) From 85d6d63716800e2cc84d97806df749d63345dd31 Mon Sep 17 00:00:00 2001 From: Bart de Koning Date: Wed, 29 Jan 2025 15:44:38 +0100 Subject: [PATCH 7/8] Add migration functions --- python/ribasim/ribasim/__init__.py | 2 +- python/ribasim/ribasim/migrations.py | 14 ++++++++++++++ ribasim_qgis/core/geopackage.py | 2 +- 3 files changed, 16 insertions(+), 2 deletions(-) diff --git a/python/ribasim/ribasim/__init__.py b/python/ribasim/ribasim/__init__.py index 84191897e..44849ddec 100644 --- a/python/ribasim/ribasim/__init__.py +++ b/python/ribasim/ribasim/__init__.py @@ -1,6 +1,6 @@ __version__ = "2025.1.0" # Keep synced write_schema_version in ribasim_qgis/core/geopackage.py -__schema_version__ = 3 +__schema_version__ = 4 from ribasim.config import Allocation, Logging, Node, Solver from ribasim.geometry.edge import EdgeTable diff --git a/python/ribasim/ribasim/migrations.py b/python/ribasim/ribasim/migrations.py index d122b7e69..49493b111 100644 --- a/python/ribasim/ribasim/migrations.py +++ b/python/ribasim/ribasim/migrations.py @@ -90,3 +90,17 @@ def outletstaticschema_migration(df: DataFrame, schema_version: int) -> DataFram df.rename(columns={"min_crest_level": "min_upstream_level"}, inplace=True) return df + + +for node_type in ["UserDemand", "LevelDemand", "FlowDemand"]: + for table_type in ["static", "time"]: + + def migration_func(df: DataFrame, schema_version: int) -> DataFrame: + if schema_version < 4: + warnings.warn( + f"Migrating outdated {node_type} / {table_type} table.", UserWarning + ) + df.rename(columns={"priority": "demand_priority"}, inplace=True) + return df + + globals()[f"{node_type.lower()}{table_type}_migration"] = migration_func diff --git a/ribasim_qgis/core/geopackage.py b/ribasim_qgis/core/geopackage.py index 7af9c789a..f6a04c3f9 100644 --- a/ribasim_qgis/core/geopackage.py +++ b/ribasim_qgis/core/geopackage.py @@ -52,7 +52,7 @@ def layers(path: Path) -> list[str]: # Keep version synced __schema_version__ in ribasim/__init__.py -def write_schema_version(path: Path, version: int = 3) -> None: +def write_schema_version(path: Path, version: int = 4) -> None: """Write the schema version to the geopackage.""" with sqlite3_cursor(path) as cursor: cursor.execute( From 2a19b32e3ea17f40451beadb39271555bbdbd36f Mon Sep 17 00:00:00 2001 From: Bart de Koning Date: Thu, 30 Jan 2025 08:49:34 +0100 Subject: [PATCH 8/8] Pass node rows to `get_sources_in_order` --- core/src/allocation_init.jl | 4 +++- core/src/read.jl | 11 +++++++--- python/ribasim/ribasim/config.py | 21 +++++++++++++++++++ .../ribasim_testmodels/allocation.py | 2 +- 4 files changed, 33 insertions(+), 5 deletions(-) diff --git a/core/src/allocation_init.jl b/core/src/allocation_init.jl index 6b5ebdb16..319f5cc43 100644 --- a/core/src/allocation_init.jl +++ b/core/src/allocation_init.jl @@ -484,6 +484,7 @@ TODO: Get preferred source order from input function get_sources_in_order( problem::JuMP.Model, p::Parameters, + node_rows::SQLite.Query, subnetwork_id::Integer, )::OrderedDict{Tuple{NodeID, NodeID}, AllocationSource} # NOTE: return flow has to be done before other sources, to prevent that @@ -552,11 +553,12 @@ An AllocationModel object. function AllocationModel( subnetwork_id::Int32, p::Parameters, + node_rows::SQLite.Query, Δt_allocation::Float64, )::AllocationModel capacity = get_capacity(p, subnetwork_id) problem = allocation_problem(p, capacity, subnetwork_id) - sources = get_sources_in_order(problem, p, subnetwork_id) + sources = get_sources_in_order(problem, p, node_rows, subnetwork_id) flow = JuMP.Containers.SparseAxisArray(Dict(only(problem[:F].axes) .=> 0.0)) return AllocationModel(; subnetwork_id, capacity, flow, sources, problem, Δt_allocation) diff --git a/core/src/read.jl b/core/src/read.jl index 3f02107bb..5e7a59df2 100644 --- a/core/src/read.jl +++ b/core/src/read.jl @@ -246,7 +246,7 @@ const conservative_nodetypes = Set{NodeType.T}([ NodeType.ManningResistance, ]) -function initialize_allocation!(p::Parameters, config::Config)::Nothing +function initialize_allocation!(p::Parameters, db::DB, config::Config)::Nothing (; graph, allocation) = p (; subnetwork_ids, allocation_models, main_network_connections) = allocation subnetwork_ids_ = sort(collect(keys(graph[].node_ids))) @@ -269,10 +269,15 @@ function initialize_allocation!(p::Parameters, config::Config)::Nothing find_subnetwork_connections!(p) end + node_rows = execute( + db, + "SELECT node_id, node_type, subnetwork_id, source_priority FROM Node ORDER BY subnetwork_id, source_priority", + ) + for subnetwork_id in subnetwork_ids_ push!( allocation_models, - AllocationModel(subnetwork_id, p, config.allocation.timestep), + AllocationModel(subnetwork_id, p, node_rows, config.allocation.timestep), ) end return nothing @@ -1569,7 +1574,7 @@ function Parameters(db::DB, config::Config)::Parameters # Allocation data structures if config.allocation.use_allocation - initialize_allocation!(p, config) + initialize_allocation!(p, db, config) end return p end diff --git a/python/ribasim/ribasim/config.py b/python/ribasim/ribasim/config.py index b207aa25b..a74eee106 100644 --- a/python/ribasim/ribasim/config.py +++ b/python/ribasim/ribasim/config.py @@ -55,6 +55,20 @@ from ribasim.utils import _concat, _pascal_to_snake +class DefaultSourcePriority(ChildModel): + """ + Specify per source node type what its default source priority is. + + flow_boundary and level_boundary nodes are combined into the single category 'boundary'. + """ + + user_demand: int = 1000 + boundary: int = 2000 + level_demand: int = 3000 + flow_demand: int = 4000 + subnetwork_inlet: int = 5000 + + class Allocation(ChildModel): """ Defines the allocation optimization algorithm options. @@ -70,6 +84,7 @@ class Allocation(ChildModel): timestep: float = 86400.0 use_allocation: bool = False + default_source_priority: DefaultSourcePriority = DefaultSourcePriority() class Results(ChildModel): @@ -173,12 +188,15 @@ class Node(pydantic.BaseModel): An optional name of the node. subnetwork_id : int Optionally adds this node to a subnetwork, which is input for the allocation algorithm. + source_priority : int + Optionally adds a source priority to this node, which is input for the allocation algorithm. """ node_id: NonNegativeInt | None = None geometry: Point name: str = "" subnetwork_id: int | None = None + source_priority: int | None = None model_config = ConfigDict(arbitrary_types_allowed=True, extra="allow") @@ -203,6 +221,9 @@ def into_geodataframe(self, node_type: str, node_id: int) -> GeoDataFrame: "node_type": pd.Series([node_type], dtype=str), "name": pd.Series([self.name], dtype=str), "subnetwork_id": pd.Series([self.subnetwork_id], dtype=pd.Int32Dtype()), + "source_priority": pd.Series( + [self.source_priority], dtype=pd.Int32Dtype() + ), **extra, }, geometry=[self.geometry], diff --git a/python/ribasim_testmodels/ribasim_testmodels/allocation.py b/python/ribasim_testmodels/ribasim_testmodels/allocation.py index bd978c898..4b09a9787 100644 --- a/python/ribasim_testmodels/ribasim_testmodels/allocation.py +++ b/python/ribasim_testmodels/ribasim_testmodels/allocation.py @@ -881,7 +881,7 @@ def linear_resistance_demand_model(): ) model.flow_demand.add( - Node(4, Point(1, 1), subnetwork_id=2), + Node(4, Point(1, 1), subnetwork_id=2, source_priority=1), [flow_demand.Static(demand_priority=[1], demand=2.0)], )