From 57ad0e19d4f24d328c7637d4f35b7d4026d3da9c Mon Sep 17 00:00:00 2001 From: Maarten Pronk Date: Thu, 30 Jan 2025 14:44:38 +0100 Subject: [PATCH] Rename Edge to Link (#2023) *everywhere* This is automatic rename, and then slowly staging the changes. Main exceptions found so far: - UGrid uses edge_dimension and similar names - Networkx has a Graph edges attribute and methods - Graphs.jl has an edges method and keywords @visr The only breaking change are that output arrow tables now have `link_id` instead of `edge_id`. I don't think there's a way around that. --- core/ext/RibasimMakieExt.jl | 12 +- core/regression_test/regression_test.jl | 6 +- core/src/Ribasim.jl | 2 +- core/src/allocation_init.jl | 154 ++++---- core/src/allocation_optim.jl | 188 +++++----- core/src/callback.jl | 78 ++-- core/src/concentration.jl | 42 +-- core/src/graph.jl | 108 +++--- core/src/model.jl | 6 +- core/src/parameter.jl | 134 +++---- core/src/read.jl | 60 +-- core/src/solve.jl | 70 ++-- core/src/util.jl | 104 +++--- core/src/validation.jl | 42 +-- core/src/write.jl | 26 +- core/test/allocation_test.jl | 18 +- core/test/control_test.jl | 20 +- core/test/main_test.jl | 2 +- core/test/run_models_test.jl | 6 +- core/test/validation_test.jl | 44 +-- docs/concept/allocation.qmd | 32 +- docs/concept/core.qmd | 8 +- docs/concept/equations.qmd | 4 +- docs/concept/modelconcept.qmd | 6 +- docs/dev/addnode.qmd | 2 +- docs/dev/allocation.qmd | 60 +-- docs/dev/qgis_test_plan.qmd | 20 +- docs/dev/scripts/plot_trace.jl | 4 +- docs/guide/examples.ipynb | 174 ++++----- docs/guide/qgis.qmd | 14 +- docs/known_issues.qmd | 4 +- docs/reference/node/discrete-control.qmd | 2 +- docs/reference/node/flow-boundary.qmd | 2 +- docs/reference/node/level-demand.qmd | 2 +- docs/reference/node/outlet.qmd | 2 +- docs/reference/node/pid-control.qmd | 4 +- docs/reference/node/pump.qmd | 2 +- docs/reference/node/user-demand.qmd | 2 +- docs/reference/usage.qmd | 54 +-- docs/reference/validation.qmd | 2 +- docs/tutorial/irrigation-demand.ipynb | 26 +- docs/tutorial/natural-flow.ipynb | 24 +- docs/tutorial/reservoir.ipynb | 26 +- python/ribasim/ribasim/__init__.py | 6 +- python/ribasim/ribasim/config.py | 2 +- python/ribasim/ribasim/delwaq/generate.py | 106 +++--- python/ribasim/ribasim/delwaq/util.py | 32 +- python/ribasim/ribasim/geometry/__init__.py | 4 +- .../ribasim/geometry/{edge.py => link.py} | 116 +++--- python/ribasim/ribasim/input_base.py | 4 +- python/ribasim/ribasim/migrations.py | 18 +- python/ribasim/ribasim/model.py | 121 +++--- .../styles/{EdgeStyle.qml => LinkStyle.qml} | 28 +- python/ribasim/ribasim/styles/README.md | 2 +- python/ribasim/ribasim/utils.py | 12 +- python/ribasim/ribasim/validation.py | 4 +- python/ribasim/tests/test_input_base.py | 4 +- python/ribasim/tests/test_io.py | 62 ++-- .../tests/{test_edge.py => test_link.py} | 30 +- python/ribasim/tests/test_migrations.py | 5 +- python/ribasim/tests/test_model.py | 24 +- python/ribasim/tests/test_validation.py | 30 +- .../ribasim_testmodels/__init__.py | 4 +- .../ribasim_testmodels/allocation.py | 348 +++++++++--------- .../ribasim_testmodels/backwater.py | 8 +- .../ribasim_testmodels/basic.py | 48 +-- .../ribasim_testmodels/continuous_control.py | 16 +- .../ribasim_testmodels/discrete_control.py | 86 ++--- .../ribasim_testmodels/doc_example.py | 32 +- .../ribasim_testmodels/equations.py | 24 +- .../ribasim_testmodels/invalid.py | 34 +- .../ribasim_testmodels/pid_control.py | 26 +- .../ribasim_testmodels/time.py | 4 +- .../ribasim_testmodels/trivial.py | 4 +- .../ribasim_testmodels/two_basin.py | 6 +- ribasim_qgis/core/geopackage.py | 2 +- ribasim_qgis/core/nodes.py | 16 +- ribasim_qgis/core/topology.py | 84 ++--- ribasim_qgis/tests/ui/test_load_plugin.py | 4 +- ribasim_qgis/widgets/dataset_widget.py | 52 +-- ribasim_qgis/widgets/nodes_widget.py | 2 +- ribasim_qgis/widgets/ribasim_widget.py | 4 +- utils/templates/validation.py.jinja | 4 +- 83 files changed, 1518 insertions(+), 1498 deletions(-) rename python/ribasim/ribasim/geometry/{edge.py => link.py} (67%) rename python/ribasim/ribasim/styles/{EdgeStyle.qml => LinkStyle.qml} (98%) rename python/ribasim/tests/{test_edge.py => test_link.py} (58%) diff --git a/core/ext/RibasimMakieExt.jl b/core/ext/RibasimMakieExt.jl index 7c21330ae..418e9455a 100644 --- a/core/ext/RibasimMakieExt.jl +++ b/core/ext/RibasimMakieExt.jl @@ -23,10 +23,10 @@ function Ribasim.plot_basin_data(model::Model) f end -function Ribasim.plot_flow!(model::Model, ax::Axis, edge_metadata::Ribasim.EdgeMetadata) +function Ribasim.plot_flow!(model::Model, ax::Axis, link_metadata::Ribasim.LinkMetadata) flow_data = DataFrame(Ribasim.flow_table(model)) - flow_data = filter(:edge_id => ==(edge_metadata.id), flow_data) - label = "$(edge_metadata.edge[1]) → $(edge_metadata.edge[2])" + flow_data = filter(:link_id => ==(link_metadata.id), flow_data) + label = "$(link_metadata.link[1]) → $(link_metadata.link[2])" scatterlines!(ax, flow_data.time, flow_data.flow_rate; label) return nothing end @@ -34,12 +34,12 @@ end function Ribasim.plot_flow(model::Model; skip_conservative_out = true) f = Figure() ax = Axis(f[1, 1]; xlabel = "time", ylabel = "flow rate [m³s⁻¹]") - for edge_metadata in values(model.integrator.p.graph.edge_data) + for link_metadata in values(model.integrator.p.graph.edge_data) if skip_conservative_out && - edge_metadata.edge[1].type in Ribasim.conservative_nodetypes + link_metadata.link[1].type in Ribasim.conservative_nodetypes continue end - Ribasim.plot_flow!(model, ax, edge_metadata) + Ribasim.plot_flow!(model, ax, link_metadata) end axislegend(ax) f diff --git a/core/regression_test/regression_test.jl b/core/regression_test/regression_test.jl index cb5b4bb91..8f2b2aa66 100644 --- a/core/regression_test/regression_test.jl +++ b/core/regression_test/regression_test.jl @@ -96,7 +96,7 @@ end # Testbench for flow.arrow @test flow.time == flow_bench.time - @test flow.edge_id == flow_bench.edge_id + @test flow.link_id == flow_bench.edge_id @test flow.from_node_id == flow_bench.from_node_id @test flow.to_node_id == flow_bench.to_node_id @test all(q -> abs(q) < 0.01, flow.flow_rate - flow_bench.flow_rate) @@ -167,7 +167,7 @@ end # Testbench for flow.arrow @test flow.time == flow_bench.time - @test flow.edge_id == flow_bench.edge_id + @test flow.link_id == flow_bench.edge_id @test flow.from_node_id == flow_bench.from_node_id @test flow.to_node_id == flow_bench.to_node_id @test all(q -> abs(q) < 0.01, flow.flow_rate - flow_bench.flow_rate) @@ -234,7 +234,7 @@ end # Testbench for flow.arrow @test flow.time == flow_bench.time - @test flow.edge_id == flow_bench.edge_id + @test flow.link_id == flow_bench.edge_id @test flow.from_node_id == flow_bench.from_node_id @test flow.to_node_id == flow_bench.to_node_id diff --git a/core/src/Ribasim.jl b/core/src/Ribasim.jl index a8bba63f0..ef06382dc 100644 --- a/core/src/Ribasim.jl +++ b/core/src/Ribasim.jl @@ -115,7 +115,7 @@ using DiffEqCallbacks: SavedValues, SavingCallback -# The network defined by the Node and Edge table is converted to a graph internally. +# The network defined by the Node and Link table is converted to a graph internally. using Graphs: DiGraph, Edge, edges, inneighbors, nv, outneighbors, induced_subgraph, is_connected # Convenience functionality built on top of Graphs. Used to store e.g. node and edge metadata diff --git a/core/src/allocation_init.jl b/core/src/allocation_init.jl index d80660861..b4585022f 100644 --- a/core/src/allocation_init.jl +++ b/core/src/allocation_init.jl @@ -1,21 +1,21 @@ -"""Find the edges from the main network to a subnetwork.""" +"""Find the links from the main network to a subnetwork.""" function find_subnetwork_connections!(p::Parameters)::Nothing (; allocation, graph, allocation) = p n_priorities = length(allocation.priorities) (; subnetwork_demands, subnetwork_allocateds) = allocation - # Find edges (node_id, outflow_id) where the source node has subnetwork id 1 and the + # Find links (node_id, outflow_id) where the source node has subnetwork id 1 and the # destination node subnetwork id ≠1 for node_id in graph[].node_ids[1] for outflow_id in outflow_ids(graph, node_id) if (graph[outflow_id].subnetwork_id != 1) - main_network_source_edges = + main_network_source_links = get_main_network_connections(p, graph[outflow_id].subnetwork_id) - edge = (node_id, outflow_id) - push!(main_network_source_edges, edge) + link = (node_id, outflow_id) + push!(main_network_source_links, link) # Allocate memory for the demands and priorities - # from the subnetwork via this edge - subnetwork_demands[edge] = zeros(n_priorities) - subnetwork_allocateds[edge] = zeros(n_priorities) + # from the subnetwork via this link + subnetwork_demands[link] = zeros(n_priorities) + subnetwork_allocateds[link] = zeros(n_priorities) end end end @@ -38,9 +38,9 @@ function get_main_network_connections( end """ -Get the fixed capacity (∈[0,∞]) of the edges in the subnetwork in a JuMP.Containers.SparseAxisArray, +Get the fixed capacity (∈[0,∞]) of the links in the subnetwork in a JuMP.Containers.SparseAxisArray, which is a type of sparse arrays that in this case takes NodeID in stead of Int as indices. -E.g. capacity[(node_a, node_b)] gives the capacity of edge (node_a, node_b). +E.g. capacity[(node_a, node_b)] gives the capacity of link (node_a, node_b). """ function get_subnetwork_capacity( p::Parameters, @@ -52,42 +52,42 @@ function get_subnetwork_capacity( dict = Dict{Tuple{NodeID, NodeID}, Float64}() capacity = JuMP.Containers.SparseAxisArray(dict) - for edge_metadata in values(graph.edge_data) - # Only flow edges are used for allocation - if edge_metadata.type != EdgeType.flow + for link_metadata in values(graph.edge_data) + # Only flow links are used for allocation + if link_metadata.type != LinkType.flow continue end - # If this edge is part of this subnetwork - # edges between the main network and a subnetwork are added in add_subnetwork_connections! - if edge_metadata.edge ⊆ node_ids_subnetwork - id_src, id_dst = edge_metadata.edge + # If this link is part of this subnetwork + # links between the main network and a subnetwork are added in add_subnetwork_connections! + if link_metadata.link ⊆ node_ids_subnetwork + id_src, id_dst = link_metadata.link - capacity_edge = Inf + capacity_link = Inf - # Find flow constraints for this edge + # Find flow constraints for this link if is_flow_constraining(id_src.type) node_src = getfield(p, graph[id_src].type) capacity_node_src = node_src.max_flow_rate[id_src.idx] - capacity_edge = min(capacity_edge, capacity_node_src) + capacity_link = min(capacity_link, capacity_node_src) end if is_flow_constraining(id_dst.type) node_dst = getfield(p, graph[id_dst].type) capacity_node_dst = node_dst.max_flow_rate[id_dst.idx] - capacity_edge = min(capacity_edge, capacity_node_dst) + capacity_link = min(capacity_link, capacity_node_dst) end # Set the capacity - capacity[edge_metadata.edge] = capacity_edge + capacity[link_metadata.link] = capacity_link - # If allowed by the nodes from this edge, - # allow allocation flow in opposite direction of the edge + # If allowed by the nodes from this link, + # allow allocation flow in opposite direction of the link if !( is_flow_direction_constraining(id_src.type) || is_flow_direction_constraining(id_dst.type) ) - capacity[reverse(edge_metadata.edge)] = capacity_edge + capacity[reverse(link_metadata.link)] = capacity_link end end end @@ -99,7 +99,7 @@ const boundary_source_nodetypes = Set{NodeType.T}([NodeType.LevelBoundary, NodeType.FlowBoundary]) """ -Add the edges connecting the main network work to a subnetwork to both the main network +Add the links connecting the main network work to a subnetwork to both the main network and subnetwork allocation network (defined by their capacity objects). """ function add_subnetwork_connections!( @@ -127,9 +127,9 @@ function add_subnetwork_connections!( end """ -Get the capacity of all edges in the subnetwork in a JuMP +Get the capacity of all links in the subnetwork in a JuMP dictionary wrapper. The keys of this dictionary define -the which edges are used in the allocation optimization problem. +the which links are used in the allocation optimization problem. """ function get_capacity( p::Parameters, @@ -143,15 +143,15 @@ end """ Add the flow variables F to the allocation problem. -The variable indices are (edge_source_id, edge_dst_id). +The variable indices are (link_source_id, link_dst_id). Non-negativivity constraints are also immediately added to the flow variables. """ function add_variables_flow!( problem::JuMP.Model, capacity::JuMP.Containers.SparseAxisArray{Float64, 2, Tuple{NodeID, NodeID}}, )::Nothing - edges = keys(capacity.data) - problem[:F] = JuMP.@variable(problem, F[edge = edges] >= 0.0) + links = keys(capacity.data) + problem[:F] = JuMP.@variable(problem, F[link = links] >= 0.0) return nothing end @@ -208,10 +208,10 @@ end """ Add the flow capacity constraints to the allocation problem. Only finite capacities get a constraint. -The constraint indices are (edge_source_id, edge_dst_id). +The constraint indices are (link_source_id, link_dst_id). Constraint: -flow over edge <= edge capacity +flow over link <= link capacity """ function add_constraints_capacity!( problem::JuMP.Model, @@ -219,32 +219,32 @@ function add_constraints_capacity!( p::Parameters, subnetwork_id::Int32, )::Nothing - main_network_source_edges = get_main_network_connections(p, subnetwork_id) + main_network_source_links = get_main_network_connections(p, subnetwork_id) F = problem[:F] - # Find the edges within the subnetwork with finite capacity - edge_ids_finite_capacity = Tuple{NodeID, NodeID}[] - for (edge, c) in capacity.data - if !isinf(c) && edge ∉ main_network_source_edges - push!(edge_ids_finite_capacity, edge) + # Find the links within the subnetwork with finite capacity + link_ids_finite_capacity = Tuple{NodeID, NodeID}[] + for (link, c) in capacity.data + if !isinf(c) && link ∉ main_network_source_links + push!(link_ids_finite_capacity, link) end end problem[:capacity] = JuMP.@constraint( problem, - [edge = edge_ids_finite_capacity], - F[edge] <= capacity[edge...], + [link = link_ids_finite_capacity], + F[link] <= capacity[link...], base_name = "capacity" ) return nothing end """ -Add capacity constraints to the outflow edge of UserDemand nodes. +Add capacity constraints to the outflow link of UserDemand nodes. The constraint indices are the UserDemand node IDs. Constraint: -flow over UserDemand edge outflow edge <= cumulative return flow from previous priorities +flow over UserDemand link outflow link <= cumulative return flow from previous priorities """ function add_constraints_user_source!( problem::JuMP.Model, @@ -270,25 +270,25 @@ end """ Add the boundary source constraints to the allocation problem. The actual threshold values will be set before each allocation solve. -The constraint indices are (edge_source_id, edge_dst_id). +The constraint indices are (link_source_id, link_dst_id). Constraint: -flow over source edge <= source flow in physical layer +flow over source link <= source flow in physical layer """ function add_constraints_boundary_source!( problem::JuMP.Model, p::Parameters, subnetwork_id::Int32, )::Nothing - # Source edges (without the basins) - edges_source = - [edge for edge in source_edges_subnetwork(p, subnetwork_id) if edge[1] != edge[2]] + # Source links (without the basins) + links_source = + [link for link in source_links_subnetwork(p, subnetwork_id) if link[1] != link[2]] F = problem[:F] problem[:source_boundary] = JuMP.@constraint( problem, - [edge_id = edges_source], - F[edge_id] <= 0.0, + [link_id = links_source], + F[link_id] <= 0.0, base_name = "source_boundary" ) return nothing @@ -297,10 +297,10 @@ end """ Add main network source constraints to the allocation problem. The actual threshold values will be set before each allocation solve. -The constraint indices are (edge_source_id, edge_dst_id). +The constraint indices are (link_source_id, link_dst_id). Constraint: -flow over main network to subnetwork connection edge <= either 0 or allocated amount from the main network +flow over main network to subnetwork connection link <= either 0 or allocated amount from the main network """ function add_constraints_main_network_source!( problem::JuMP.Model, @@ -310,12 +310,12 @@ function add_constraints_main_network_source!( F = problem[:F] (; main_network_connections, subnetwork_ids) = p.allocation subnetwork_id = searchsortedfirst(subnetwork_ids, subnetwork_id) - edges_source = main_network_connections[subnetwork_id] + links_source = main_network_connections[subnetwork_id] problem[:source_main_network] = JuMP.@constraint( problem, - [edge_id = edges_source], - F[edge_id] <= 0.0, + [link_id = links_source], + F[link_id] <= 0.0, base_name = "source_main_network" ) return nothing @@ -344,7 +344,7 @@ function add_constraints_conservation_node!( inflows = Dict{NodeID, Set{JuMP.VariableRef}}() outflows = Dict{NodeID, Set{JuMP.VariableRef}}() - edges_allocation = only(F.axes) + links_allocation = only(F.axes) for node_id in node_ids @@ -362,15 +362,15 @@ function add_constraints_conservation_node!( inflows[node_id] = inflows_node outflows[node_id] = outflows_node - # Find in- and outflow allocation edges of this node + # Find in- and outflow allocation links of this node for neighbor_id in inoutflow_ids(graph, node_id) - edge_in = (neighbor_id, node_id) - if edge_in in edges_allocation - push!(inflows_node, F[edge_in]) + link_in = (neighbor_id, node_id) + if link_in in links_allocation + push!(inflows_node, F[link_in]) end - edge_out = (node_id, neighbor_id) - if edge_out in edges_allocation - push!(outflows_node, F[edge_out]) + link_out = (node_id, neighbor_id) + if link_out in links_allocation + push!(outflows_node, F[link_out]) end end @@ -495,42 +495,42 @@ function get_sources_in_order( # User return flow for node_id in sort(only(problem[:source_user].axes)) - edge = user_demand.outflow_edge[node_id.idx].edge - sources[edge] = AllocationSource(; edge, type = AllocationSourceType.user_return) + link = user_demand.outflow_link[node_id.idx].link + sources[link] = AllocationSource(; link, type = AllocationSourceType.user_return) end # Boundary node sources - for edge in sort( + for link in sort( only(problem[:source_boundary].axes); - by = edge -> (edge[1].value, edge[2].value), + by = link -> (link[1].value, link[2].value), ) - sources[edge] = AllocationSource(; edge, type = AllocationSourceType.boundary_node) + sources[link] = AllocationSource(; link, type = AllocationSourceType.boundary_node) end # Basins with level demand for node_id in basin.node_id if (graph[node_id].subnetwork_id == subnetwork_id) && has_external_demand(graph, node_id, :level_demand)[1] - edge = (node_id, node_id) - sources[edge] = AllocationSource(; edge, type = AllocationSourceType.basin) + link = (node_id, node_id) + sources[link] = AllocationSource(; link, type = AllocationSourceType.basin) end end # Main network to subnetwork connections - for edge in sort( + for link in sort( collect(keys(allocation.subnetwork_demands)); - by = edge -> (edge[1].value, edge[2].value), + by = link -> (link[1].value, link[2].value), ) - if graph[edge[2]].subnetwork_id == subnetwork_id - sources[edge] = - AllocationSource(; edge, type = AllocationSourceType.main_to_sub) + if graph[link[2]].subnetwork_id == subnetwork_id + sources[link] = + AllocationSource(; link, type = AllocationSourceType.main_to_sub) end end # Buffers for node_id in sort(only(problem[:F_flow_buffer_out].axes)) - edge = (node_id, node_id) - sources[edge] = AllocationSource(; edge, type = AllocationSourceType.buffer) + link = (node_id, node_id) + sources[link] = AllocationSource(; link, type = AllocationSourceType.buffer) end sources diff --git a/core/src/allocation_optim.jl b/core/src/allocation_optim.jl index 6e2404862..bfc1acb36 100644 --- a/core/src/allocation_optim.jl +++ b/core/src/allocation_optim.jl @@ -57,14 +57,14 @@ function set_objective_priority!( end # Terms for UserDemand nodes and FlowDemand nodes - for edge in keys(capacity.data) - to_node_id = edge[2] + for link in keys(capacity.data) + to_node_id = link[2] if to_node_id.type == NodeType.UserDemand # UserDemand user_demand_idx = to_node_id.idx d = demand_reduced[user_demand_idx, priority_idx] - F_ud = F[edge] + F_ud = F[link] add_objective_term!(ex, d, F_ud) else has_demand, demand_node_id = @@ -117,24 +117,24 @@ function assign_allocations!( subnetwork_ids, main_network_connections, ) = allocation - main_network_source_edges = get_main_network_connections(p, subnetwork_id) - for edge in keys(capacity.data) - # If this edge does not exist in the physical model then it comes from a - # bidirectional edge, and thus does not have directly allocating flow - if !haskey(graph, edge...) + main_network_source_links = get_main_network_connections(p, subnetwork_id) + for link in keys(capacity.data) + # If this link does not exist in the physical model then it comes from a + # bidirectional link, and thus does not have directly allocating flow + if !haskey(graph, link...) continue end - # If this edge is a source edge from the main network to a subnetwork, - # and demands are being collected, add its flow to the demand of this edge + # If this link is a source link from the main network to a subnetwork, + # and demands are being collected, add its flow to the demand of this link if optimization_type == OptimizationType.collect_demands - if edge in main_network_source_edges - allocated = flow[edge] - subnetwork_demands[edge][priority_idx] += allocated + if link in main_network_source_links + allocated = flow[link] + subnetwork_demands[link][priority_idx] += allocated end elseif optimization_type == OptimizationType.allocate - user_demand_node_id = edge[2] + user_demand_node_id = link[2] if user_demand_node_id.type == NodeType.UserDemand - allocated = flow[edge] + allocated = flow[link] user_demand.allocated[user_demand_node_id.idx, priority_idx] = allocated end end @@ -143,13 +143,13 @@ function assign_allocations!( # Write the flows to the subnetworks as allocated flows # in the allocation object if is_main_network(subnetwork_id) - for (subnetwork_id, main_network_source_edges) in + for (subnetwork_id, main_network_source_links) in zip(subnetwork_ids, main_network_connections) if is_main_network(subnetwork_id) continue end - for edge_id in main_network_source_edges - subnetwork_allocateds[edge_id][priority_idx] = flow[edge_id] + for link_id in main_network_source_links + subnetwork_allocateds[link_id][priority_idx] = flow[link_id] end end end @@ -173,9 +173,9 @@ function set_initial_capacities_inlet!( (; subnetwork_id) = allocation_model (; subnetwork_allocateds) = allocation - main_network_source_edges = get_main_network_connections(p, subnetwork_id) + main_network_source_links = get_main_network_connections(p, subnetwork_id) - for edge_id in main_network_source_edges + for link_id in main_network_source_links source_capacity = if optimization_type == OptimizationType.internal_sources # Set the source capacity to 0 if optimization is being done for the internal subnetwork sources 0.0 @@ -183,10 +183,10 @@ function set_initial_capacities_inlet!( # Set the source capacity to effectively unlimited if subnetwork demands are being collected Inf elseif optimization_type == OptimizationType.allocate - # Set the source capacity to the sum over priorities of the values allocated to the subnetwork over this edge - sum(subnetwork_allocateds[edge_id]) + # Set the source capacity to the sum over priorities of the values allocated to the subnetwork over this link + sum(subnetwork_allocateds[link_id]) end - source = sources[edge_id] + source = sources[link_id] @assert source.type == AllocationSourceType.main_to_sub source.capacity = source_capacity end @@ -206,9 +206,9 @@ function set_initial_capacities_source!( mean_input_flows_subnetwork_ = mean_input_flows_subnetwork(p, subnetwork_id) - for edge in keys(mean_input_flows_subnetwork_) - source = sources[edge] - source.capacity = mean_input_flows_subnetwork_[edge] + for link in keys(mean_input_flows_subnetwork_) + source = sources[link] + source.capacity = mean_input_flows_subnetwork_[link] end return nothing end @@ -217,7 +217,7 @@ end Reduce the capacity of a source by the amount of flow taken from them in the latest optimization. """ function reduce_source_capacity!(problem::JuMP.Model, source::AllocationSource)::Nothing - (; edge) = source + (; link) = source used_capacity = if source.type in ( @@ -225,11 +225,11 @@ function reduce_source_capacity!(problem::JuMP.Model, source::AllocationSource): AllocationSourceType.main_to_sub, AllocationSourceType.user_return, ) - JuMP.value(problem[:F][edge]) + JuMP.value(problem[:F][link]) elseif source.type == AllocationSourceType.basin - JuMP.value(problem[:F_basin_out][edge[1]]) + JuMP.value(problem[:F_basin_out][link[1]]) elseif source.type == AllocationSourceType.buffer - JuMP.value(problem[:F_flow_buffer_out][edge[1]]) + JuMP.value(problem[:F_flow_buffer_out][link[1]]) else error("Unknown source type") end @@ -251,15 +251,15 @@ function increase_source_capacities!( (; user_demand) = p for source in values(sources) - (; edge) = source + (; link) = source additional_capacity = if source.type == AllocationSourceType.user_return - id_user_demand = edge[1] - inflow_edge = user_demand.inflow_edge[id_user_demand.idx].edge + id_user_demand = link[1] + inflow_link = user_demand.inflow_link[id_user_demand.idx].link user_demand.return_factor[id_user_demand.idx](t) * - JuMP.value(problem[:F][inflow_edge]) + JuMP.value(problem[:F][inflow_link]) elseif source.type == AllocationSourceType.buffer - id_connector_node = edge[1] + id_connector_node = link[1] JuMP.value(problem[:F_flow_buffer_in][id_connector_node]) else continue @@ -271,26 +271,26 @@ function increase_source_capacities!( end """ -Set the capacities of the allocation flow edges as determined by -the smallest max_flow_rate of a node on this edge +Set the capacities of the allocation flow links as determined by +the smallest max_flow_rate of a node on this link """ -function set_initial_capacities_edge!( +function set_initial_capacities_link!( allocation_model::AllocationModel, p::Parameters, )::Nothing (; problem, capacity, subnetwork_id) = allocation_model constraints_capacity = problem[:capacity] - main_network_source_edges = get_main_network_connections(p, subnetwork_id) + main_network_source_links = get_main_network_connections(p, subnetwork_id) - for (edge_id, c) in capacity.data + for (link_id, c) in capacity.data - # These edges have no capacity constraints: + # These links have no capacity constraints: # - With infinite capacity # - Being a source from the main network to a subnetwork - if isinf(c) || edge_id ∈ main_network_source_edges + if isinf(c) || link_id ∈ main_network_source_links continue end - JuMP.set_normalized_rhs(constraints_capacity[edge_id], c) + JuMP.set_normalized_rhs(constraints_capacity[link_id], c) end return nothing @@ -298,21 +298,21 @@ end """ Before an allocation solve, subtract the flow used by allocation for the previous priority -from the edge capacities. +from the link capacities. """ -function reduce_edge_capacities!(allocation_model::AllocationModel)::Nothing +function reduce_link_capacities!(allocation_model::AllocationModel)::Nothing (; problem) = allocation_model constraints_capacity = problem[:capacity] F = problem[:F] - for edge_id in only(constraints_capacity.axes) + for link_id in only(constraints_capacity.axes) # Before an allocation solve, subtract the flow used by allocation for the previous priority - # from the edge capacities + # from the link capacities JuMP.set_normalized_rhs( - constraints_capacity[edge_id], + constraints_capacity[link_id], max( 0.0, - JuMP.normalized_rhs(constraints_capacity[edge_id]) - JuMP.value(F[edge_id]), + JuMP.normalized_rhs(constraints_capacity[link_id]) - JuMP.value(F[link_id]), ), ) end @@ -338,7 +338,7 @@ function get_basin_data( @assert node_id.type == NodeType.Basin influx = mean_input_flows_subnetwork(p, subnetwork_id)[(node_id, node_id)] storage_basin = basin.current_properties.current_storage[parent(u)][node_id.idx] - control_inneighbors = inneighbor_labels_type(graph, node_id, EdgeType.control) + control_inneighbors = inneighbor_labels_type(graph, node_id, LinkType.control) if isempty(control_inneighbors) level_demand_idx = 0 else @@ -489,7 +489,7 @@ function set_initial_capacities_returnflow!( constraints_outflow = problem[:source_user] for node_id in only(constraints_outflow.axes) - source = sources[user_demand.outflow_edge[node_id.idx].edge] + source = sources[user_demand.outflow_link[node_id.idx].link] @assert source.type == AllocationSourceType.user_return source.capacity = 0.0 end @@ -594,7 +594,7 @@ function reduce_demands!( end node_with_demand_id = - only(outneighbor_labels_type(graph, node_id, EdgeType.control)) + only(outneighbor_labels_type(graph, node_id, LinkType.control)) flow_demand.demand[node_id.idx] = max( 0.0, @@ -697,7 +697,7 @@ function save_demands_and_allocations!( end """ -Save the allocation flows per basin and physical edge. +Save the allocation flows per basin and physical link. """ function save_allocation_flows!( p::Parameters, @@ -710,13 +710,13 @@ function save_allocation_flows!( (; allocation, graph) = p (; record_flow) = allocation - edges_allocation = keys(flow.data) + links_allocation = keys(flow.data) skip = false - # Loop over all tuples of 2 consecutive edges so that they can be processed - # simultaneously if they represent the same edge in both directions - for (edge_1, edge_2) in IterTools.partition(edges_allocation, 2, 1) + # Loop over all tuples of 2 consecutive links so that they can be processed + # simultaneously if they represent the same link in both directions + for (link_1, link_2) in IterTools.partition(links_allocation, 2, 1) if skip skip = false continue @@ -724,31 +724,31 @@ function save_allocation_flows!( flow_rate = 0.0 - if haskey(graph, edge_1...) - flow_rate += flow[edge_1] + if haskey(graph, link_1...) + flow_rate += flow[link_1] sign_2 = -1.0 - edge_metadata = graph[edge_1...] + link_metadata = graph[link_1...] else - edge_1_reverse = reverse(edge_1) - flow_rate -= flow[edge_1_reverse] + link_1_reverse = reverse(link_1) + flow_rate -= flow[link_1_reverse] sign_2 = 1.0 - edge_metadata = graph[edge_1_reverse...] + link_metadata = graph[link_1_reverse...] end - # Check whether the next edge is the current one reversed - # and the edge does not have a UserDemand end - if edge_2 == reverse(edge_1) && - !(edge_1[1].type == NodeType.UserDemand || edge_1[2].type == NodeType.UserDemand) - # If so, these edges are both processed in this iteration - flow_rate += sign_2 * flow[edge_2] + # Check whether the next link is the current one reversed + # and the link does not have a UserDemand end + if link_2 == reverse(link_1) && + !(link_1[1].type == NodeType.UserDemand || link_1[2].type == NodeType.UserDemand) + # If so, these links are both processed in this iteration + flow_rate += sign_2 * flow[link_2] skip = true end - id_from = edge_metadata.edge[1] - id_to = edge_metadata.edge[2] + id_from = link_metadata.link[1] + id_to = link_metadata.link[2] push!(record_flow.time, t) - push!(record_flow.edge_id, edge_metadata.id) + push!(record_flow.link_id, link_metadata.id) push!(record_flow.from_node_type, string(id_from.type)) push!(record_flow.from_node_id, Int32(id_from)) push!(record_flow.to_node_type, string(id_to.type)) @@ -765,7 +765,7 @@ function save_allocation_flows!( has_external_demand(graph, node_id, :level_demand)[1] flow_rate = sources[(node_id, node_id)].basin_flow_rate push!(record_flow.time, t) - push!(record_flow.edge_id, 0) + push!(record_flow.link_id, 0) push!(record_flow.from_node_type, string(NodeType.Basin)) push!(record_flow.from_node_id, node_id) push!(record_flow.to_node_type, string(NodeType.Basin)) @@ -794,7 +794,7 @@ function allocate_to_users_from_connected_basin!( # Check whether the upstream basin has a level demand # and thus can act as a source - upstream_basin_id = user_demand.inflow_edge[node_id.idx].edge[1] + upstream_basin_id = user_demand.inflow_link[node_id.idx].link[1] if has_external_demand(graph, upstream_basin_id, :level_demand)[1] # The demand of the UserDemand node at the current priority @@ -837,7 +837,7 @@ function set_source_capacity!( constraints_source_buffer = problem[:flow_buffer_outflow] for source in values(sources) - (; edge) = source + (; link) = source capacity_effective = if source == source_current if optimization_type == OptimizationType.collect_demands && @@ -851,15 +851,15 @@ function set_source_capacity!( end constraint = if source.type == AllocationSourceType.boundary_node - constraints_source_boundary[edge] + constraints_source_boundary[link] elseif source.type == AllocationSourceType.main_to_sub - constraints_source_main_network[edge] + constraints_source_main_network[link] elseif source.type == AllocationSourceType.basin - constraints_source_basin[edge[1]] + constraints_source_basin[link[1]] elseif source.type == AllocationSourceType.user_return - constraints_source_user_out[edge[1]] + constraints_source_user_out[link[1]] elseif source.type == AllocationSourceType.buffer - constraints_source_buffer[edge[1]] + constraints_source_buffer[link[1]] end JuMP.set_normalized_rhs(constraint, capacity_effective) @@ -921,14 +921,14 @@ function optimize_per_source!( end # Add the values of the flows at this priority - for edge in only(problem[:F].axes) - flow[edge] += max(JuMP.value(problem[:F][edge]), 0.0) + for link in only(problem[:F].axes) + flow[link] += max(JuMP.value(problem[:F][link]), 0.0) end # Adjust capacities for the optimization for the next source increase_source_capacities!(allocation_model, p, t) reduce_source_capacity!(problem, source) - reduce_edge_capacities!(allocation_model) + reduce_link_capacities!(allocation_model) # Adjust demands for next optimization (in case of internal_sources -> collect_demands) for parameter in propertynames(p) @@ -939,9 +939,9 @@ function optimize_per_source!( end # Add to the basin cumulative flow rate - for (edge, source) in sources + for (link, source) in sources if source.type == AllocationSourceType.basin - node_id = edge[1] + node_id = link[1] source.basin_flow_rate += JuMP.value(F_basin_out[node_id]) - JuMP.value(F_basin_in[node_id]) end @@ -982,8 +982,8 @@ function optimize_priority!( (; priorities) = allocation # Start the values of the flows at this priority at 0.0 - for edge in keys(flow.data) - flow[edge] = 0.0 + for link in keys(flow.data) + flow[link] = 0.0 end # Start the allocated amounts to basins at this priority at 0.0 @@ -1010,7 +1010,7 @@ function optimize_priority!( # Save the demands and allocated flows for all nodes that have these save_demands_and_allocations!(p, allocation_model, t, priority_idx) - # Save the flows over all edges in the subnetwork + # Save the flows over all links in the subnetwork save_allocation_flows!( p, t, @@ -1031,7 +1031,7 @@ function set_initial_values!( t::Float64, )::Nothing set_initial_capacities_source!(allocation_model, p) - set_initial_capacities_edge!(allocation_model, p) + set_initial_capacities_link!(allocation_model, p) set_initial_capacities_basin!(allocation_model, u, p, t) set_initial_capacities_buffer!(allocation_model) set_initial_capacities_returnflow!(allocation_model, p) @@ -1047,7 +1047,7 @@ function set_initial_values!( end """ -Set the capacities of all edges that denote a source to 0.0. +Set the capacities of all links that denote a source to 0.0. """ function empty_sources!(allocation_model::AllocationModel, allocation::Allocation)::Nothing (; problem) = allocation_model @@ -1057,8 +1057,8 @@ function empty_sources!(allocation_model::AllocationModel, allocation::Allocatio [:source_boundary, :source_user, :basin_outflow, :flow_buffer_outflow] constraint_set = problem[constraint_set_name] for key in only(constraint_set.axes) - # Do not set the capacity to 0.0 if the edge - # is a main to subnetwork connection edge + # Do not set the capacity to 0.0 if the link + # is a main to subnetwork connection link if key ∉ keys(subnetwork_demands) JuMP.set_normalized_rhs(constraint_set[key], 0.0) end @@ -1094,11 +1094,11 @@ function collect_demands!( ## Collect demand optimization_type = OptimizationType.collect_demands - main_network_source_edges = get_main_network_connections(p, subnetwork_id) + main_network_source_links = get_main_network_connections(p, subnetwork_id) # Reset the subnetwork demands to 0.0 for main_network_connection in keys(subnetwork_demands) - if main_network_connection in main_network_source_edges + if main_network_connection in main_network_source_links subnetwork_demands[main_network_connection] .= 0.0 end end diff --git a/core/src/callback.jl b/core/src/callback.jl index f73792abb..44303b0d1 100644 --- a/core/src/callback.jl +++ b/core/src/callback.jl @@ -133,14 +133,14 @@ function update_cumulative_flows!(u, t, integrator)::Nothing end # Exact boundary flow over time step - for (id, flow_rate, active, edge) in zip( + for (id, flow_rate, active, link) in zip( flow_boundary.node_id, flow_boundary.flow_rate, flow_boundary.active, - flow_boundary.outflow_edges, + flow_boundary.outflow_links, ) if active - outflow_id = edge[1].edge[2] + outflow_id = link[1].link[2] volume = integral(flow_rate, tprev, t) flow_boundary.cumulative_flow[id.idx] += volume flow_boundary.cumulative_flow_saveat[id.idx] += volume @@ -150,24 +150,24 @@ function update_cumulative_flows!(u, t, integrator)::Nothing # Update realized flows for allocation input for subnetwork_id in allocation.subnetwork_ids mean_input_flows_subnetwork_ = mean_input_flows_subnetwork(p, subnetwork_id) - for edge in keys(mean_input_flows_subnetwork_) - mean_input_flows_subnetwork_[edge] += flow_update_on_edge(integrator, edge) + for link in keys(mean_input_flows_subnetwork_) + mean_input_flows_subnetwork_[link] += flow_update_on_link(integrator, link) end end # Update realized flows for allocation output - for edge in keys(allocation.mean_realized_flows) - allocation.mean_realized_flows[edge] += flow_update_on_edge(integrator, edge) - if edge[1] == edge[2] - basin_id = edge[1] + for link in keys(allocation.mean_realized_flows) + allocation.mean_realized_flows[link] += flow_update_on_link(integrator, link) + if link[1] == link[2] + basin_id = link[1] @assert basin_id.type == NodeType.Basin for inflow_id in basin.inflow_ids[basin_id.idx] - allocation.mean_realized_flows[edge] += - flow_update_on_edge(integrator, (inflow_id, basin_id)) + allocation.mean_realized_flows[link] += + flow_update_on_link(integrator, (inflow_id, basin_id)) end for outflow_id in basin.outflow_ids[basin_id.idx] - allocation.mean_realized_flows[edge] -= - flow_update_on_edge(integrator, (basin_id, outflow_id)) + allocation.mean_realized_flows[link] -= + flow_update_on_link(integrator, (basin_id, outflow_id)) end end end @@ -198,14 +198,14 @@ function update_concentrations!(u, t, integrator)::Nothing end # Exact boundary flow over time step - for (id, flow_rate, active, edge) in zip( + for (id, flow_rate, active, link) in zip( flow_boundary.node_id, flow_boundary.flow_rate, flow_boundary.active, - flow_boundary.outflow_edges, + flow_boundary.outflow_links, ) if active - outflow_id = edge[1].edge[2] + outflow_id = link[1].link[2] volume = integral(flow_rate, tprev, t) @views mass[outflow_id.idx, :] .+= flow_boundary.concentration[id.idx, :] .* volume @@ -253,18 +253,18 @@ function update_concentrations!(u, t, integrator)::Nothing end """ -Given an edge (from_id, to_id), compute the cumulative flow over that -edge over the latest timestep. If from_id and to_id are both the same Basin, +Given an link (from_id, to_id), compute the cumulative flow over that +link over the latest timestep. If from_id and to_id are both the same Basin, the function returns the sum of the Basin forcings. """ -function flow_update_on_edge( +function flow_update_on_link( integrator::DEIntegrator, - edge_src::Tuple{NodeID, NodeID}, + link_src::Tuple{NodeID, NodeID}, )::Float64 (; u, uprev, p, t, tprev, dt) = integrator (; basin, flow_boundary) = p (; vertical_flux) = basin - from_id, to_id = edge_src + from_id, to_id = link_src if from_id == to_id @assert from_id.type == to_id.type == NodeType.Basin idx = from_id.idx @@ -279,7 +279,7 @@ function flow_update_on_edge( 0.0 end else - flow_idx = get_state_index(u, edge_src) + flow_idx = get_state_index(u, link_src) u[flow_idx] - uprev[flow_idx] end end @@ -298,13 +298,13 @@ function save_basin_state(u, t, integrator) end """ -Save all cumulative forcings and flows over edges over the latest timestep, +Save all cumulative forcings and flows over links over the latest timestep, Both computed by the solver and integrated exactly. Also computes the total horizontal inflow and outflow per Basin. """ function save_flow(u, t, integrator) (; p) = integrator - (; basin, state_inflow_edge, state_outflow_edge, flow_boundary, u_prev_saveat) = p + (; basin, state_inflow_link, state_outflow_link, flow_boundary, u_prev_saveat) = p Δt = get_Δt(integrator) flow_mean = (u - u_prev_saveat) / Δt @@ -315,9 +315,9 @@ function save_flow(u, t, integrator) outflow_mean = zeros(length(basin.node_id)) # Flow contributions from horizontal flow states - for (flow, inflow_edge, outflow_edge) in - zip(flow_mean, state_inflow_edge, state_outflow_edge) - inflow_id = inflow_edge.edge[1] + for (flow, inflow_link, outflow_link) in + zip(flow_mean, state_inflow_link, state_outflow_link) + inflow_id = inflow_link.link[1] if inflow_id.type == NodeType.Basin if flow > 0 outflow_mean[inflow_id.idx] += flow @@ -326,7 +326,7 @@ function save_flow(u, t, integrator) end end - outflow_id = outflow_edge.edge[2] + outflow_id = outflow_link.link[2] if outflow_id.type == NodeType.Basin if flow > 0 inflow_mean[outflow_id.idx] += flow @@ -340,10 +340,10 @@ function save_flow(u, t, integrator) flow_boundary_mean = copy(flow_boundary.cumulative_flow_saveat) ./ Δt flow_boundary.cumulative_flow_saveat .= 0.0 - for (outflow_edges, id) in zip(flow_boundary.outflow_edges, flow_boundary.node_id) + for (outflow_links, id) in zip(flow_boundary.outflow_links, flow_boundary.node_id) flow = flow_boundary_mean[id.idx] - for outflow_edge in outflow_edges - outflow_id = outflow_edge.edge[2] + for outflow_link in outflow_links + outflow_id = outflow_link.link[2] if outflow_id.type == NodeType.Basin inflow_mean[outflow_id.idx] += flow end @@ -807,14 +807,14 @@ function update_allocation!(integrator)::Nothing # Divide by the allocation Δt to get the mean input flows from the cumulative flows (; Δt_allocation) = allocation_models[1] for mean_input_flows_subnetwork in values(mean_input_flows) - for edge in keys(mean_input_flows_subnetwork) - mean_input_flows_subnetwork[edge] /= Δt_allocation + for link in keys(mean_input_flows_subnetwork) + mean_input_flows_subnetwork[link] /= Δt_allocation end end # Divide by the allocation Δt to get the mean realized flows from the cumulative flows - for edge in keys(mean_realized_flows) - mean_realized_flows[edge] /= Δt_allocation + for link in keys(mean_realized_flows) + mean_realized_flows[link] /= Δt_allocation end # If a main network is present, collect demands of subnetworks @@ -833,12 +833,12 @@ function update_allocation!(integrator)::Nothing # Reset the mean flows for mean_flows in mean_input_flows - for edge in keys(mean_flows) - mean_flows[edge] = 0.0 + for link in keys(mean_flows) + mean_flows[link] = 0.0 end end - for edge in keys(mean_realized_flows) - mean_realized_flows[edge] = 0.0 + for link in keys(mean_realized_flows) + mean_realized_flows[link] = 0.0 end end diff --git a/core/src/concentration.jl b/core/src/concentration.jl index 2ab3043a7..4cea74033 100644 --- a/core/src/concentration.jl +++ b/core/src/concentration.jl @@ -6,13 +6,13 @@ function mass_updates_user_demand!(integrator::DEIntegrator)::Nothing (; basin, user_demand) = integrator.p (; concentration_state, mass) = basin.concentration_data - @views for (inflow_edge, outflow_edge) in - zip(user_demand.inflow_edge, user_demand.outflow_edge) - from_node = inflow_edge.edge[1] - to_node = outflow_edge.edge[2] - userdemand_idx = outflow_edge.edge[1].idx + @views for (inflow_link, outflow_link) in + zip(user_demand.inflow_link, user_demand.outflow_link) + from_node = inflow_link.link[1] + to_node = outflow_link.link[2] + userdemand_idx = outflow_link.link[1].idx if from_node.type == NodeType.Basin - flow = flow_update_on_edge(integrator, inflow_edge.edge) + flow = flow_update_on_link(integrator, inflow_link.link) if flow < 0 mass[from_node.idx, :] .-= concentration_state[to_node.idx, :] .* flow mass[from_node.idx, :] .-= @@ -20,7 +20,7 @@ function mass_updates_user_demand!(integrator::DEIntegrator)::Nothing end end if to_node.type == NodeType.Basin - flow = flow_update_on_edge(integrator, outflow_edge.edge) + flow = flow_update_on_link(integrator, outflow_link.link) if flow > 0 mass[to_node.idx, :] .+= concentration_state[from_node.idx, :] .* flow mass[to_node.idx, :] .+= @@ -35,14 +35,14 @@ end Process all mass inflows to basins """ function mass_inflows_basin!(integrator::DEIntegrator)::Nothing - (; basin, state_inflow_edge, state_outflow_edge, level_boundary) = integrator.p + (; basin, state_inflow_link, state_outflow_link, level_boundary) = integrator.p (; cumulative_in, concentration_state, mass) = basin.concentration_data - for (inflow_edge, outflow_edge) in zip(state_inflow_edge, state_outflow_edge) - from_node = inflow_edge.edge[1] - to_node = outflow_edge.edge[2] + for (inflow_link, outflow_link) in zip(state_inflow_link, state_outflow_link) + from_node = inflow_link.link[1] + to_node = outflow_link.link[2] @views if from_node.type == NodeType.Basin - flow = flow_update_on_edge(integrator, inflow_edge.edge) + flow = flow_update_on_link(integrator, inflow_link.link) if flow < 0 cumulative_in[from_node.idx] -= flow if to_node.type == NodeType.Basin @@ -55,7 +55,7 @@ function mass_inflows_basin!(integrator::DEIntegrator)::Nothing user_demand.concentration[to_node.idx, :] .* flow elseif to_node.type == NodeType.Terminal && to_node.value == 0 # UserDemand inflow is discoupled from its outflow, - # and the unset flow edge defaults to Terminal #0 + # and the unset flow link defaults to Terminal #0 nothing else @warn "Unsupported outflow from $(to_node.type) #$(to_node.value) to $(from_node.type) #$(from_node.value) with flow $flow" @@ -64,7 +64,7 @@ function mass_inflows_basin!(integrator::DEIntegrator)::Nothing end if to_node.type == NodeType.Basin - flow = flow_update_on_edge(integrator, outflow_edge.edge) + flow = flow_update_on_link(integrator, outflow_link.link) if flow > 0 cumulative_in[to_node.idx] += flow @views if from_node.type == NodeType.Basin @@ -77,7 +77,7 @@ function mass_inflows_basin!(integrator::DEIntegrator)::Nothing user_demand.concentration[from_node.idx, :] .* flow elseif from_node.type == NodeType.Terminal && from_node.value == 0 # UserDemand outflow is discoupled from its inflow, - # and the unset flow edge defaults to Terminal #0 + # and the unset flow link defaults to Terminal #0 nothing else @warn "Unsupported outflow from $(from_node.type) #$(from_node.value) to $(to_node.type) #$(to_node.value) with flow $flow" @@ -92,20 +92,20 @@ end Process all mass outflows from Basins """ function mass_outflows_basin!(integrator::DEIntegrator)::Nothing - (; state_inflow_edge, state_outflow_edge, basin) = integrator.p + (; state_inflow_link, state_outflow_link, basin) = integrator.p (; mass, concentration_state) = basin.concentration_data - @views for (inflow_edge, outflow_edge) in zip(state_inflow_edge, state_outflow_edge) - from_node = inflow_edge.edge[1] - to_node = outflow_edge.edge[2] + @views for (inflow_link, outflow_link) in zip(state_inflow_link, state_outflow_link) + from_node = inflow_link.link[1] + to_node = outflow_link.link[2] if from_node.type == NodeType.Basin - flow = flow_update_on_edge(integrator, inflow_edge.edge) + flow = flow_update_on_link(integrator, inflow_link.link) if flow > 0 mass[from_node.idx, :] .-= concentration_state[from_node.idx, :] .* flow end end if to_node.type == NodeType.Basin - flow = flow_update_on_edge(integrator, outflow_edge.edge) + flow = flow_update_on_link(integrator, outflow_link.link) if flow < 0 mass[to_node.idx, :] .+= concentration_state[to_node.idx, :] .* flow end diff --git a/core/src/graph.jl b/core/src/graph.jl index f3ea0780b..596d58b1d 100644 --- a/core/src/graph.jl +++ b/core/src/graph.jl @@ -2,8 +2,8 @@ Return a directed metagraph with data of nodes (NodeMetadata): [`NodeMetadata`](@ref) -and data of edges (EdgeMetadata): -[`EdgeMetadata`](@ref) +and data of links (LinkMetadata): +[`LinkMetadata`](@ref) """ function create_graph(db::DB, config::Config)::MetaGraph node_table = get_node_ids(db) @@ -11,33 +11,33 @@ function create_graph(db::DB, config::Config)::MetaGraph db, "SELECT node_id, node_type, subnetwork_id FROM Node ORDER BY node_type, node_id", ) - edge_rows = execute( + link_rows = execute( db, """ SELECT - Edge.edge_id, + Link.link_id, FromNode.node_id AS from_node_id, FromNode.node_type AS from_node_type, ToNode.node_id AS to_node_id, ToNode.node_type AS to_node_type, - Edge.edge_type - FROM Edge - LEFT JOIN Node AS FromNode ON FromNode.node_id = Edge.from_node_id - LEFT JOIN Node AS ToNode ON ToNode.node_id = Edge.to_node_id + Link.link_type + FROM Link + LEFT JOIN Node AS FromNode ON FromNode.node_id = Link.from_node_id + LEFT JOIN Node AS ToNode ON ToNode.node_id = Link.to_node_id """, ) # Node IDs per subnetwork node_ids = Dict{Int32, Set{NodeID}}() - # The metadata of the flow edges in the order in which they are in the input + # The metadata of the flow links in the order in which they are in the input # and will be in the output - flow_edges = EdgeMetadata[] - # Dictionary from flow edge to index in flow vector + flow_links = LinkMetadata[] + # Dictionary from flow link to index in flow vector graph = MetaGraph( DiGraph(); label_type = NodeID, vertex_data_type = NodeMetadata, - edge_data_type = EdgeMetadata, + edge_data_type = LinkMetadata, graph_data = nothing, ) for row in node_rows @@ -56,36 +56,36 @@ function create_graph(db::DB, config::Config)::MetaGraph end errors = false - for (; edge_id, from_node_type, from_node_id, to_node_type, to_node_id, edge_type) in - edge_rows + for (; link_id, from_node_type, from_node_id, to_node_type, to_node_id, link_type) in + link_rows try # hasfield does not work - edge_type = getfield(EdgeType, Symbol(edge_type)) + link_type = getfield(LinkType, Symbol(link_type)) catch - error("Invalid edge type $edge_type.") + error("Invalid link type $link_type.") end id_src = NodeID(from_node_type, from_node_id, node_table) id_dst = NodeID(to_node_type, to_node_id, node_table) - edge_metadata = - EdgeMetadata(; id = edge_id, type = edge_type, edge = (id_src, id_dst)) - if edge_type == EdgeType.flow - push!(flow_edges, edge_metadata) + link_metadata = + LinkMetadata(; id = link_id, type = link_type, link = (id_src, id_dst)) + if link_type == LinkType.flow + push!(flow_links, link_metadata) end if haskey(graph, id_src, id_dst) errors = true - @error "Duplicate edge" id_src id_dst + @error "Duplicate link" id_src id_dst end - graph[id_src, id_dst] = edge_metadata + graph[id_src, id_dst] = link_metadata end if errors - error("Invalid edges found") + error("Invalid links found") end if incomplete_subnetwork(graph, node_ids) error("Incomplete connectivity in subnetwork") end - graph_data = (; node_ids, flow_edges, config.solver.saveat) + graph_data = (; node_ids, flow_links, config.solver.saveat) @reset graph.graph_data = graph_data return graph @@ -94,28 +94,28 @@ end abstract type AbstractNeighbors end """ -Iterate over incoming neighbors of a given label in a MetaGraph, only for edges of edge_type +Iterate over incoming neighbors of a given label in a MetaGraph, only for links of link_type """ struct InNeighbors{T} <: AbstractNeighbors graph::T label::NodeID - edge_type::EdgeType.T + link_type::LinkType.T end """ -Iterate over outgoing neighbors of a given label in a MetaGraph, only for edges of edge_type +Iterate over outgoing neighbors of a given label in a MetaGraph, only for links of link_type """ struct OutNeighbors{T} <: AbstractNeighbors graph::T label::NodeID - edge_type::EdgeType.T + link_type::LinkType.T end Base.IteratorSize(::Type{<:AbstractNeighbors}) = Base.SizeUnknown() Base.eltype(::Type{<:AbstractNeighbors}) = NodeID function Base.iterate(iter::InNeighbors, state = 1) - (; graph, label, edge_type) = iter + (; graph, label, link_type) = iter code = code_for(graph, label) local label_in while true @@ -123,7 +123,7 @@ function Base.iterate(iter::InNeighbors, state = 1) x === nothing && return nothing code_in, state = x label_in = label_for(graph, code_in) - if graph[label_in, label].type == edge_type + if graph[label_in, label].type == link_type break end end @@ -131,7 +131,7 @@ function Base.iterate(iter::InNeighbors, state = 1) end function Base.iterate(iter::OutNeighbors, state = 1) - (; graph, label, edge_type) = iter + (; graph, label, link_type) = iter code = code_for(graph, label) local label_out while true @@ -139,7 +139,7 @@ function Base.iterate(iter::OutNeighbors, state = 1) x === nothing && return nothing code_out, state = x label_out = label_for(graph, code_out) - if graph[label, label_out].type == edge_type + if graph[label, label_out].type == link_type break end end @@ -148,73 +148,73 @@ end """ Get the inneighbor node IDs of the given node ID (label) -over the given edge type in the graph. +over the given link type in the graph. """ function inneighbor_labels_type( graph::MetaGraph, label::NodeID, - edge_type::EdgeType.T, + link_type::LinkType.T, )::InNeighbors - return InNeighbors(graph, label, edge_type) + return InNeighbors(graph, label, link_type) end """ Get the outneighbor node IDs of the given node ID (label) -over the given edge type in the graph. +over the given link type in the graph. """ function outneighbor_labels_type( graph::MetaGraph, label::NodeID, - edge_type::EdgeType.T, + link_type::LinkType.T, )::OutNeighbors - return OutNeighbors(graph, label, edge_type) + return OutNeighbors(graph, label, link_type) end """ Get the in- and outneighbor node IDs of the given node ID (label) -over the given edge type in the graph. +over the given link type in the graph. """ function all_neighbor_labels_type( graph::MetaGraph, label::NodeID, - edge_type::EdgeType.T, + link_type::LinkType.T, )::Iterators.Flatten return Iterators.flatten(( - outneighbor_labels_type(graph, label, edge_type), - inneighbor_labels_type(graph, label, edge_type), + outneighbor_labels_type(graph, label, link_type), + inneighbor_labels_type(graph, label, link_type), )) end """ -Get the outneighbors over flow edges. +Get the outneighbors over flow links. """ function outflow_ids(graph::MetaGraph, id::NodeID)::OutNeighbors - return outneighbor_labels_type(graph, id, EdgeType.flow) + return outneighbor_labels_type(graph, id, LinkType.flow) end """ -Get the inneighbors over flow edges. +Get the inneighbors over flow links. """ function inflow_ids(graph::MetaGraph, id::NodeID)::InNeighbors - return inneighbor_labels_type(graph, id, EdgeType.flow) + return inneighbor_labels_type(graph, id, LinkType.flow) end """ -Get the in- and outneighbors over flow edges. +Get the in- and outneighbors over flow links. """ function inoutflow_ids(graph::MetaGraph, id::NodeID)::Iterators.Flatten - return all_neighbor_labels_type(graph, id, EdgeType.flow) + return all_neighbor_labels_type(graph, id, LinkType.flow) end """ -Get the unique outneighbor over a flow edge. +Get the unique outneighbor over a flow link. """ function outflow_id(graph::MetaGraph, id::NodeID)::NodeID return only(outflow_ids(graph, id)) end """ -Get the unique inneighbor over a flow edge. +Get the unique inneighbor over a flow link. """ function inflow_id(graph::MetaGraph, id::NodeID)::NodeID return only(inflow_ids(graph, id)) @@ -222,7 +222,7 @@ end """ Get the specific q from the input vector `flow` which has the same components as -the state vector, given an edge (inflow_id, outflow_id). +the state vector, given an link (inflow_id, outflow_id). `flow` can be either instantaneous or integrated/averaged. Instantaneous FlowBoundary flows can be obtained from the parameters, but integrated/averaged FlowBoundary flows must be provided via `boundary_flow`. """ @@ -230,11 +230,11 @@ function get_flow( flow::ComponentVector, p::Parameters, t::Number, - edge::Tuple{NodeID, NodeID}; + link::Tuple{NodeID, NodeID}; boundary_flow = nothing, ) (; flow_boundary) = p - from_id = edge[1] + from_id = link[1] if from_id.type == NodeType.FlowBoundary if boundary_flow === nothing flow_boundary.active[from_id.idx] ? flow_boundary.flow_rate[from_id.idx](t) : @@ -243,7 +243,7 @@ function get_flow( boundary_flow[from_id.idx] end else - flow[get_state_index(flow, edge)] + flow[get_state_index(flow, link)] end end diff --git a/core/src/model.jl b/core/src/model.jl index f029aa244..6c24af338 100644 --- a/core/src/model.jl +++ b/core/src/model.jl @@ -51,8 +51,8 @@ function Model(config::Config)::Model if !valid_nodes(db) error("Invalid nodes found.") end - if !valid_edge_types(db) - error("Invalid edge types found.") + if !valid_link_types(db) + error("Invalid link types found.") end local parameters, tstops @@ -105,7 +105,7 @@ function Model(config::Config)::Model u0 = build_state_vector(parameters) du0 = zero(u0) - parameters = set_state_flow_edges(parameters, u0) + parameters = set_state_flow_links(parameters, u0) parameters = build_flow_to_storage(parameters, u0) @reset parameters.u_prev_saveat = zero(u0) diff --git a/core/src/parameter.jl b/core/src/parameter.jl index 0ecaec28a..308773417 100644 --- a/core/src/parameter.jl +++ b/core/src/parameter.jl @@ -13,8 +13,8 @@ const SolverStats = @NamedTuple{ rejected_timesteps::Int, } -# EdgeType.flow and NodeType.FlowBoundary -@enumx EdgeType flow control none +# LinkType.flow and NodeType.FlowBoundary +@enumx LinkType flow control none @eval @enumx NodeType $(config.nodetypes...) @enumx ContinuousControlType None Continuous PID @enumx Substance Continuity = 1 Initial = 2 LevelBoundary = 3 FlowBoundary = 4 UserDemand = @@ -144,15 +144,15 @@ cache(len::Int)::Cache = LazyBufferCache(Returns(len); initializer! = set_zero!) """ Data structure for a single source within an allocation subnetwork. -edge: The outflow edge of the source -type: The type of source (edge, basin, main_to_sub, user_return, buffer) +link: The outflow link of the source +type: The type of source (link, basin, main_to_sub, user_return, buffer) capacity: The initial capacity of the source as determined by the physical layer capacity_reduced: The capacity adjusted by passed optimizations basin_flow_rate: The total outflow rate of a basin when optimized over all sources for one priority. Ignored when the source is not a basin. """ @kwdef mutable struct AllocationSource - const edge::Tuple{NodeID, NodeID} + const link::Tuple{NodeID, NodeID} const type::AllocationSourceType.T capacity::Float64 = 0.0 capacity_reduced::Float64 = 0.0 @@ -160,16 +160,16 @@ basin_flow_rate: The total outflow rate of a basin when optimized over all sourc end function Base.show(io::IO, source::AllocationSource) - (; edge, type) = source - print(io, "AllocationSource of type $type at edge $edge") + (; link, type) = source + print(io, "AllocationSource of type $type at link $link") end """ Store information for a subnetwork used for allocation. subnetwork_id: The ID of this allocation network -capacity: The capacity per edge of the allocation network, as constrained by nodes that have a max_flow_rate -flow: The flows over all the edges in the subnetwork for a certain priority (used for allocation_flow output) +capacity: The capacity per link of the allocation network, as constrained by nodes that have a max_flow_rate +flow: The flows over all the links in the subnetwork for a certain priority (used for allocation_flow output) sources: source data in preferred order of optimization problem: The JuMP.jl model for solving the allocation problem Δt_allocation: The time interval between consecutive allocation solves @@ -190,10 +190,10 @@ allocation_models: The allocation models for the main network and subnetworks co subnetwork_ids main_network_connections: (from_id, to_id) from the main network to the subnetwork per subnetwork priorities: All used priority values. -subnetwork_demands: The demand of an edge from the main network to a subnetwork -subnetwork_allocateds: The allocated flow of an edge from the main network to a subnetwork -mean_input_flows: Per subnetwork, flows averaged over Δt_allocation over edges that are allocation sources -mean_realized_flows: Flows averaged over Δt_allocation over edges that realize a demand +subnetwork_demands: The demand of an link from the main network to a subnetwork +subnetwork_allocateds: The allocated flow of an link from the main network to a subnetwork +mean_input_flows: Per subnetwork, flows averaged over Δt_allocation over links that are allocation sources +mean_realized_flows: Flows averaged over Δt_allocation over links that realize a demand record_demand: A record of demands and allocated flows for nodes that have these record_flow: A record of all flows computed by allocation optimization, eventually saved to output file @@ -229,7 +229,7 @@ record_flow: A record of all flows computed by allocation optimization, eventual ) record_flow::@NamedTuple{ time::Vector{Float64}, - edge_id::Vector{Int32}, + link_id::Vector{Int32}, from_node_type::Vector{String}, from_node_id::Vector{Int32}, to_node_type::Vector{String}, @@ -240,7 +240,7 @@ record_flow: A record of all flows computed by allocation optimization, eventual optimization_type::Vector{String}, } = (; time = Float64[], - edge_id = Int32[], + link_id = Int32[], from_node_type = String[], from_node_id = Int32[], to_node_type = String[], @@ -265,18 +265,18 @@ subnetwork_id: Allocation network ID (0 if not in subnetwork) end """ -Type for storing metadata of edges in the graph: -id: ID of the edge (only used for labeling flow output) -type: type of the edge -edge: (from node ID, to node ID) +Type for storing metadata of links in the graph: +id: ID of the link (only used for labeling flow output) +type: type of the link +link: (from node ID, to node ID) """ -@kwdef struct EdgeMetadata +@kwdef struct LinkMetadata id::Int32 - type::EdgeType.T - edge::Tuple{NodeID, NodeID} + type::LinkType.T + link::Tuple{NodeID, NodeID} end -Base.length(::EdgeMetadata) = 1 +Base.length(::LinkMetadata) = 1 """ The update of a parameter given by a value and a reference to the target @@ -304,7 +304,7 @@ end """ In-memory storage of saved mean flows for writing to results. -- `flow`: The mean flows on all edges and state-dependent forcings +- `flow`: The mean flows on all links and state-dependent forcings - `inflow`: The sum of the mean flows coming into each Basin - `outflow`: The sum of the mean flows going out of each Basin - `flow_boundary`: The exact integrated mean flows of flow boundaries @@ -464,9 +464,9 @@ Rating curve from level to flow rate. The rating curve is a lookup table with li interpolation in between. Relations can be updated in time. node_id: node ID of the TabulatedRatingCurve node -inflow_edge: incoming flow edge metadata +inflow_link: incoming flow link metadata The ID of the destination node is always the ID of the TabulatedRatingCurve node -outflow_edge: outgoing flow edge metadata +outflow_link: outgoing flow link metadata The ID of the source node is always the ID of the TabulatedRatingCurve node active: whether this node is active and thus contributes flows max_downstream_level: The downstream level above which the TabulatedRatingCurve flow goes to zero @@ -476,8 +476,8 @@ control_mapping: dictionary from (node_id, control_state) to Q(h) and/or active """ @kwdef struct TabulatedRatingCurve <: AbstractParameterNode node_id::Vector{NodeID} - inflow_edge::Vector{EdgeMetadata} - outflow_edge::Vector{EdgeMetadata} + inflow_link::Vector{LinkMetadata} + outflow_link::Vector{LinkMetadata} active::Vector{Bool} max_downstream_level::Vector{Float64} = fill(Inf, length(node_id)) interpolations::Vector{ScalarInterpolation} @@ -487,9 +487,9 @@ end """ node_id: node ID of the LinearResistance node -inflow_edge: incoming flow edge metadata +inflow_link: incoming flow link metadata The ID of the destination node is always the ID of the LinearResistance node -outflow_edge: outgoing flow edge metadata +outflow_link: outgoing flow link metadata The ID of the source node is always the ID of the LinearResistance node active: whether this node is active and thus contributes flows resistance: the resistance to flow; `Q_unlimited = Δh/resistance` @@ -498,8 +498,8 @@ control_mapping: dictionary from (node_id, control_state) to resistance and/or a """ @kwdef struct LinearResistance <: AbstractParameterNode node_id::Vector{NodeID} - inflow_edge::Vector{EdgeMetadata} - outflow_edge::Vector{EdgeMetadata} + inflow_link::Vector{LinkMetadata} + outflow_link::Vector{LinkMetadata} active::Vector{Bool} resistance::Vector{Float64} max_flow_rate::Vector{Float64} @@ -510,9 +510,9 @@ end This is a simple Manning-Gauckler reach connection. node_id: node ID of the ManningResistance node -inflow_edge: incoming flow edge metadata +inflow_link: incoming flow link metadata The ID of the destination node is always the ID of the ManningResistance node -outflow_edge: outgoing flow edge metadata +outflow_link: outgoing flow link metadata The ID of the source node is always the ID of the ManningResistance node length: reach length manning_n: roughness; Manning's n in (SI units). @@ -546,8 +546,8 @@ Requirements: """ @kwdef struct ManningResistance <: AbstractParameterNode node_id::Vector{NodeID} - inflow_edge::Vector{EdgeMetadata} - outflow_edge::Vector{EdgeMetadata} + inflow_link::Vector{LinkMetadata} + outflow_link::Vector{LinkMetadata} active::Vector{Bool} length::Vector{Float64} manning_n::Vector{Float64} @@ -575,7 +575,7 @@ end """ node_id: node ID of the FlowBoundary node -outflow_edges: The outgoing flow edge metadata +outflow_links: The outgoing flow link metadata active: whether this node is active and thus contributes flow cumulative_flow: The exactly integrated cumulative boundary flow since the start of the simulation cumulative_flow_saveat: The exactly integrated cumulative boundary flow since the last saveat @@ -585,7 +585,7 @@ concentration_time: Data source for concentration updates """ @kwdef struct FlowBoundary{C} <: AbstractParameterNode node_id::Vector{NodeID} - outflow_edges::Vector{Vector{EdgeMetadata}} + outflow_links::Vector{Vector{LinkMetadata}} active::Vector{Bool} cumulative_flow::Vector{Float64} = zeros(length(node_id)) cumulative_flow_saveat::Vector{Float64} = zeros(length(node_id)) @@ -596,9 +596,9 @@ end """ node_id: node ID of the Pump node -inflow_edge: incoming flow edge metadata +inflow_link: incoming flow link metadata The ID of the destination node is always the ID of the Pump node -outflow_edge: outgoing flow edge metadata +outflow_link: outgoing flow link metadata The ID of the source node is always the ID of the Pump node active: whether this node is active and thus contributes flow flow_rate: target flow rate @@ -611,8 +611,8 @@ continuous_control_type: one of None, ContinuousControl, PidControl """ @kwdef struct Pump <: AbstractParameterNode node_id::Vector{NodeID} - inflow_edge::Vector{EdgeMetadata} = [] - outflow_edge::Vector{EdgeMetadata} = [] + inflow_link::Vector{LinkMetadata} = [] + outflow_link::Vector{LinkMetadata} = [] active::Vector{Bool} = fill(true, length(node_id)) flow_rate::Cache = cache(length(node_id)) min_flow_rate::Vector{Float64} = zeros(length(node_id)) @@ -625,8 +625,8 @@ continuous_control_type: one of None, ContinuousControl, PidControl function Pump( node_id, - inflow_edge, - outflow_edge, + inflow_link, + outflow_link, active, flow_rate, min_flow_rate, @@ -639,8 +639,8 @@ continuous_control_type: one of None, ContinuousControl, PidControl if valid_flow_rates(node_id, flow_rate[Float64[]], control_mapping) return new( node_id, - inflow_edge, - outflow_edge, + inflow_link, + outflow_link, active, flow_rate, min_flow_rate, @@ -658,9 +658,9 @@ end """ node_id: node ID of the Outlet node -inflow_edge: incoming flow edge metadata. +inflow_link: incoming flow link metadata. The ID of the destination node is always the ID of the Outlet node -outflow_edge: outgoing flow edge metadata. +outflow_link: outgoing flow link metadata. The ID of the source node is always the ID of the Outlet node active: whether this node is active and thus contributes flow flow_rate: target flow rate @@ -673,8 +673,8 @@ continuous_control_type: one of None, ContinuousControl, PidControl """ @kwdef struct Outlet <: AbstractParameterNode node_id::Vector{NodeID} - inflow_edge::Vector{EdgeMetadata} = [] - outflow_edge::Vector{EdgeMetadata} = [] + inflow_link::Vector{LinkMetadata} = [] + outflow_link::Vector{LinkMetadata} = [] active::Vector{Bool} = fill(true, length(node_id)) flow_rate::Cache = cache(length(node_id)) min_flow_rate::Vector{Float64} = zeros(length(node_id)) @@ -687,8 +687,8 @@ continuous_control_type: one of None, ContinuousControl, PidControl function Outlet( node_id, - inflow_edge, - outflow_edge, + inflow_link, + outflow_link, active, flow_rate, min_flow_rate, @@ -701,8 +701,8 @@ continuous_control_type: one of None, ContinuousControl, PidControl if valid_flow_rates(node_id, flow_rate[Float64[]], control_mapping) return new( node_id, - inflow_edge, - outflow_edge, + inflow_link, + outflow_link, active, flow_rate, min_flow_rate, @@ -841,9 +841,9 @@ end """ node_id: node ID of the UserDemand node -inflow_edge: incoming flow edge +inflow_link: incoming flow link The ID of the destination node is always the ID of the UserDemand node -outflow_edge: outgoing flow edge metadata +outflow_link: outgoing flow link metadata The ID of the source node is always the ID of the UserDemand node active: whether this node is active and thus demands water demand: water flux demand of UserDemand per priority (node_idx, priority_idx) @@ -861,8 +861,8 @@ concentration_time: Data source for concentration updates """ @kwdef struct UserDemand{C} <: AbstractDemandNode node_id::Vector{NodeID} - inflow_edge::Vector{EdgeMetadata} = [] - outflow_edge::Vector{EdgeMetadata} = [] + inflow_link::Vector{LinkMetadata} = [] + outflow_link::Vector{LinkMetadata} = [] active::Vector{Bool} = fill(true, length(node_id)) demand::Matrix{Float64} demand_reduced::Matrix{Float64} @@ -933,10 +933,10 @@ end The metadata of the graph (the fields of the NamedTuple) can be accessed e.g. using graph[].flow. node_ids: mapping subnetwork ID -> node IDs in that subnetwork -edges_source: mapping subnetwork ID -> metadata of allocation - source edges in that subnetwork -flow_edges: The metadata of all flow edges - of the flow over that edge +links_source: mapping subnetwork ID -> metadata of allocation + source links in that subnetwork +flow_links: The metadata of all flow links + of the flow over that link saveat: The time interval between saves of output data (storage, flow, ...) """ const ModelGraph = MetaGraph{ @@ -944,10 +944,10 @@ const ModelGraph = MetaGraph{ DiGraph{Int64}, NodeID, NodeMetadata, - EdgeMetadata, + LinkMetadata, @NamedTuple{ node_ids::Dict{Int32, Set{NodeID}}, - flow_edges::Vector{EdgeMetadata}, + flow_links::Vector{LinkMetadata}, saveat::Float64, }, MetaGraphsNext.var"#11#13", @@ -974,9 +974,9 @@ const ModelGraph = MetaGraph{ const level_demand::LevelDemand const flow_demand::FlowDemand const subgrid::Subgrid - # Per state the in- and outflow edges associated with that state (if they exist) - const state_inflow_edge::C9 = ComponentVector() - const state_outflow_edge::C10 = ComponentVector() + # Per state the in- and outflow links associated with that state (if they exist) + const state_inflow_link::C9 = ComponentVector() + const state_outflow_link::C10 = ComponentVector() all_nodes_active::Bool = false tprev::Float64 = 0.0 # Sparse matrix for combining flows into storages diff --git a/core/src/read.jl b/core/src/read.jl index 7e0b7712a..92c48f2c0 100644 --- a/core/src/read.jl +++ b/core/src/read.jl @@ -295,8 +295,8 @@ function LinearResistance(db::DB, config::Config, graph::MetaGraph)::LinearResis return LinearResistance(; node_id, - inflow_edge = inflow_edge.(Ref(graph), node_id), - outflow_edge = outflow_edge.(Ref(graph), node_id), + inflow_link = inflow_link.(Ref(graph), node_id), + outflow_link = outflow_link.(Ref(graph), node_id), parsed_parameters.active, parsed_parameters.resistance, parsed_parameters.max_flow_rate, @@ -419,8 +419,8 @@ function TabulatedRatingCurve( return TabulatedRatingCurve(; node_id = node_ids, - inflow_edge = inflow_edge.(Ref(graph), node_ids), - outflow_edge = outflow_edge.(Ref(graph), node_ids), + inflow_link = inflow_link.(Ref(graph), node_ids), + outflow_link = outflow_link.(Ref(graph), node_ids), active, max_downstream_level, interpolations, @@ -448,8 +448,8 @@ function ManningResistance( return ManningResistance(; node_id, - inflow_edge = inflow_edge.(Ref(graph), node_id), - outflow_edge = outflow_edge.(Ref(graph), node_id), + inflow_link = inflow_link.(Ref(graph), node_id), + outflow_link = outflow_link.(Ref(graph), node_id), parsed_parameters.active, parsed_parameters.length, parsed_parameters.manning_n, @@ -533,7 +533,7 @@ function FlowBoundary(db::DB, config::Config, graph::MetaGraph)::FlowBoundary return FlowBoundary(; node_id = node_ids, - outflow_edges = outflow_edges.(Ref(graph), node_ids), + outflow_links = outflow_links.(Ref(graph), node_ids), parsed_parameters.active, parsed_parameters.flow_rate, concentration, @@ -564,8 +564,8 @@ function Pump(db::DB, config::Config, graph::MetaGraph)::Pump return Pump(; node_id, - inflow_edge = inflow_edge.(Ref(graph), node_id), - outflow_edge = outflow_edge.(Ref(graph), node_id), + inflow_link = inflow_link.(Ref(graph), node_id), + outflow_link = outflow_link.(Ref(graph), node_id), parsed_parameters.active, flow_rate, parsed_parameters.min_flow_rate, @@ -604,8 +604,8 @@ function Outlet(db::DB, config::Config, graph::MetaGraph)::Outlet return Outlet(; node_id, - inflow_edge = inflow_edge.(Ref(graph), node_id), - outflow_edge = outflow_edge.(Ref(graph), node_id), + inflow_link = inflow_link.(Ref(graph), node_id), + outflow_link = outflow_link.(Ref(graph), node_id), parsed_parameters.active, flow_rate, parsed_parameters.min_flow_rate, @@ -900,7 +900,7 @@ function DiscreteControl(db::DB, config::Config, graph::MetaGraph)::DiscreteCont end controlled_nodes = - collect.(outneighbor_labels_type.(Ref(graph), node_id, EdgeType.control)) + collect.(outneighbor_labels_type.(Ref(graph), node_id, LinkType.control)) return DiscreteControl(; node_id, @@ -1020,7 +1020,7 @@ function PidControl(db::DB, config::Config, graph::MetaGraph)::PidControl controlled_basins = Set{NodeID}() for id in node_ids - controlled_node = only(outneighbor_labels_type(graph, id, EdgeType.control)) + controlled_node = only(outneighbor_labels_type(graph, id, LinkType.control)) for id_inout in inoutflow_ids(graph, controlled_node) if id_inout.type == NodeType.Basin push!(controlled_basins, id_inout) @@ -1208,8 +1208,8 @@ function UserDemand(db::DB, config::Config, graph::MetaGraph)::UserDemand return UserDemand(; node_id = node_ids, - inflow_edge = inflow_edge.(Ref(graph), node_ids), - outflow_edge = outflow_edge.(Ref(graph), node_ids), + inflow_link = inflow_link.(Ref(graph), node_ids), + outflow_link = outflow_link.(Ref(graph), node_ids), active, demand, demand_reduced, @@ -1456,16 +1456,16 @@ function Allocation(db::DB, config::Config, graph::MetaGraph)::Allocation push!(mean_input_flows, Dict{Tuple{NodeID, NodeID}, Float64}()) end - # Find edges which serve as sources in allocation - for edge_metadata in values(graph.edge_data) - (; edge) = edge_metadata - id_source, _ = edge + # Find links which serve as sources in allocation + for link_metadata in values(graph.edge_data) + (; link) = link_metadata + id_source, _ = link if id_source.type in boundary_source_nodetypes (; subnetwork_id) = graph[id_source] # Check whether the source node is part of a subnetwork if subnetwork_id ≠ 0 subnetwork_idx = searchsortedfirst(subnetwork_ids, subnetwork_id) - mean_input_flows[subnetwork_idx][edge] = 0.0 + mean_input_flows[subnetwork_idx][link] = 0.0 end end end @@ -1479,21 +1479,21 @@ function Allocation(db::DB, config::Config, graph::MetaGraph)::Allocation end end - # Find edges that realize a demand - for edge_metadata in values(graph.edge_data) - (; type, edge) = edge_metadata + # Find links that realize a demand + for link_metadata in values(graph.edge_data) + (; type, link) = link_metadata - src_id, dst_id = edge + src_id, dst_id = link user_demand_inflow = - (type == EdgeType.flow) && (dst_id.type == NodeType.UserDemand) + (type == LinkType.flow) && (dst_id.type == NodeType.UserDemand) level_demand_inflow = - (type == EdgeType.control) && (src_id.type == NodeType.LevelDemand) + (type == LinkType.control) && (src_id.type == NodeType.LevelDemand) flow_demand_inflow = - (type == EdgeType.flow) && + (type == LinkType.flow) && has_external_demand(graph, dst_id, :flow_demand)[1] if user_demand_inflow || flow_demand_inflow - mean_realized_flows[edge] = 0.0 + mean_realized_flows[link] = 0.0 elseif level_demand_inflow mean_realized_flows[(dst_id, dst_id)] = 0.0 end @@ -1511,8 +1511,8 @@ function Parameters(db::DB, config::Config)::Parameters graph = create_graph(db, config) allocation = Allocation(db, config, graph) - if !valid_edges(graph) - error("Invalid edge(s) found.") + if !valid_links(graph) + error("Invalid link(s) found.") end if !valid_n_neighbors(graph) error("Invalid number of connections for certain node types.") diff --git a/core/src/solve.jl b/core/src/solve.jl index 274e8cdcf..ca42c7e5c 100644 --- a/core/src/solve.jl +++ b/core/src/solve.jl @@ -171,9 +171,9 @@ function formulate_storage!( t::Number, flow_boundary::FlowBoundary, ) - for (flow_rate, outflow_edges, active, cumulative_flow) in zip( + for (flow_rate, outflow_links, active, cumulative_flow) in zip( flow_boundary.flow_rate, - flow_boundary.outflow_edges, + flow_boundary.outflow_links, flow_boundary.active, flow_boundary.cumulative_flow, ) @@ -181,8 +181,8 @@ function formulate_storage!( if active volume += integral(flow_rate, tprev, t) end - for outflow_edge in outflow_edges - outflow_id = outflow_edge.edge[2] + for outflow_link in outflow_links + outflow_id = outflow_link.link[2] if outflow_id.type == NodeType.Basin current_storage[outflow_id.idx] += volume end @@ -331,10 +331,10 @@ function formulate_flow!( )::Nothing (; allocation) = p all_nodes_active = p.all_nodes_active[] - for (id, inflow_edge, outflow_edge, active, allocated, return_factor, min_level) in zip( + for (id, inflow_link, outflow_link, active, allocated, return_factor, min_level) in zip( user_demand.node_id, - user_demand.inflow_edge, - user_demand.outflow_edge, + user_demand.inflow_link, + user_demand.outflow_link, user_demand.active, eachrow(user_demand.allocated), user_demand.return_factor, @@ -358,7 +358,7 @@ function formulate_flow!( end # Smoothly let abstraction go to 0 as the source basin dries out - inflow_id = inflow_edge.edge[1] + inflow_id = inflow_link.link[1] factor_basin = get_low_storage_factor(current_low_storage_factor, inflow_id) q *= factor_basin @@ -385,11 +385,11 @@ function formulate_flow!( all_nodes_active = p.all_nodes_active[] (; node_id, active, resistance, max_flow_rate) = linear_resistance for id in node_id - inflow_edge = linear_resistance.inflow_edge[id.idx] - outflow_edge = linear_resistance.outflow_edge[id.idx] + inflow_link = linear_resistance.inflow_link[id.idx] + outflow_link = linear_resistance.outflow_link[id.idx] - inflow_id = inflow_edge.edge[1] - outflow_id = outflow_edge.edge[2] + inflow_id = inflow_link.link[1] + outflow_id = outflow_link.link[2] if (active[id.idx] || all_nodes_active) h_a = get_level(p, inflow_id, t, current_level) @@ -421,10 +421,10 @@ function formulate_flow!( tabulated_rating_curve for id in node_id - inflow_edge = tabulated_rating_curve.inflow_edge[id.idx] - outflow_edge = tabulated_rating_curve.outflow_edge[id.idx] - inflow_id = inflow_edge.edge[1] - outflow_id = outflow_edge.edge[2] + inflow_link = tabulated_rating_curve.inflow_link[id.idx] + outflow_link = tabulated_rating_curve.outflow_link[id.idx] + inflow_id = inflow_link.link[1] + outflow_id = outflow_link.link[2] max_downstream_level = tabulated_rating_curve.max_downstream_level[id.idx] h_a = get_level(p, inflow_id, t, current_level) @@ -506,11 +506,11 @@ function formulate_flow!( ) = manning_resistance all_nodes_active = p.all_nodes_active[] for id in node_id - inflow_edge = manning_resistance.inflow_edge[id.idx] - outflow_edge = manning_resistance.outflow_edge[id.idx] + inflow_link = manning_resistance.inflow_link[id.idx] + outflow_link = manning_resistance.outflow_link[id.idx] - inflow_id = inflow_edge.edge[1] - outflow_id = outflow_edge.edge[2] + inflow_id = inflow_link.link[1] + outflow_id = outflow_link.link[2] if !(active[id.idx] || all_nodes_active) continue @@ -568,8 +568,8 @@ function formulate_flow!( all_nodes_active = p.all_nodes_active[] for ( id, - inflow_edge, - outflow_edge, + inflow_link, + outflow_link, active, flow_rate, min_flow_rate, @@ -579,8 +579,8 @@ function formulate_flow!( continuous_control_type, ) in zip( pump.node_id, - pump.inflow_edge, - pump.outflow_edge, + pump.inflow_link, + pump.outflow_link, pump.active, pump.flow_rate[parent(du)], pump.min_flow_rate, @@ -594,8 +594,8 @@ function formulate_flow!( continue end - inflow_id = inflow_edge.edge[1] - outflow_id = outflow_edge.edge[2] + inflow_id = inflow_link.link[1] + outflow_id = outflow_link.link[2] src_level = get_level(p, inflow_id, t, current_level) dst_level = get_level(p, outflow_id, t, current_level) @@ -623,8 +623,8 @@ function formulate_flow!( all_nodes_active = p.all_nodes_active[] for ( id, - inflow_edge, - outflow_edge, + inflow_link, + outflow_link, active, flow_rate, min_flow_rate, @@ -634,8 +634,8 @@ function formulate_flow!( max_downstream_level, ) in zip( outlet.node_id, - outlet.inflow_edge, - outlet.outflow_edge, + outlet.inflow_link, + outlet.outflow_link, outlet.active, outlet.flow_rate[parent(du)], outlet.min_flow_rate, @@ -649,8 +649,8 @@ function formulate_flow!( continue end - inflow_id = inflow_edge.edge[1] - outflow_id = outflow_edge.edge[2] + inflow_id = inflow_link.link[1] + outflow_id = outflow_link.link[2] src_level = get_level(p, inflow_id, t, current_level) dst_level = get_level(p, outflow_id, t, current_level) @@ -807,10 +807,10 @@ function limit_flow!( end # UserDemand inflow bounds depend on multiple aspects of the simulation - for (id, active, inflow_edge, demand_from_timeseries) in zip( + for (id, active, inflow_link, demand_from_timeseries) in zip( user_demand.node_id, user_demand.active, - user_demand.inflow_edge, + user_demand.inflow_link, user_demand.demand_from_timeseries, ) min_flow_rate, max_flow_rate = if demand_from_timeseries @@ -819,7 +819,7 @@ function limit_flow!( else # The lower bound is estimated as the lowest inflow given the minimum values # of the reduction factors involved (with a margin) - inflow_id = inflow_edge.edge[1] + inflow_id = inflow_link.link[1] factor_basin_min = min_low_storage_factor(current_storage, basin.storage_prev, inflow_id) factor_level_min = min_low_user_demand_level_factor( diff --git a/core/src/util.jl b/core/src/util.jl index 03b1b636d..c1dc33277 100644 --- a/core/src/util.jl +++ b/core/src/util.jl @@ -335,7 +335,7 @@ function is_flow_constraining(type::NodeType.T)::Bool type in (NodeType.LinearResistance, NodeType.Pump, NodeType.Outlet) end -"""Whether the given node is flow direction constraining (only in direction of edges).""" +"""Whether the given node is flow direction constraining (only in direction of links).""" function is_flow_direction_constraining(type::NodeType.T)::Bool type in ( NodeType.Pump, @@ -388,7 +388,7 @@ end function get_external_priority_idx(p::Parameters, node_id::NodeID)::Int (; graph, level_demand, flow_demand, allocation) = p - inneighbor_control_ids = inneighbor_labels_type(graph, node_id, EdgeType.control) + inneighbor_control_ids = inneighbor_labels_type(graph, node_id, LinkType.control) if isempty(inneighbor_control_ids) return 0 end @@ -436,7 +436,7 @@ function set_continuous_control_type!( errors = false for id in node_id - id_controlled = only(outneighbor_labels_type(graph, id, EdgeType.control)) + id_controlled = only(outneighbor_labels_type(graph, id, LinkType.control)) if id_controlled.type == NodeType.Pump pump.continuous_control_type[id_controlled.idx] = continuous_control_type elseif id_controlled.type == NodeType.Outlet @@ -454,7 +454,7 @@ function has_external_demand( node_id::NodeID, node_type::Symbol, )::Tuple{Bool, Union{NodeID, Nothing}} - control_inneighbors = inneighbor_labels_type(graph, node_id, EdgeType.control) + control_inneighbors = inneighbor_labels_type(graph, node_id, LinkType.control) for id in control_inneighbors if graph[id].type == node_type return true, id @@ -496,9 +496,9 @@ function get_Δt(integrator)::Float64 end end -inflow_edge(graph, node_id)::EdgeMetadata = graph[inflow_id(graph, node_id), node_id] -outflow_edge(graph, node_id)::EdgeMetadata = graph[node_id, outflow_id(graph, node_id)] -outflow_edges(graph, node_id)::Vector{EdgeMetadata} = +inflow_link(graph, node_id)::LinkMetadata = graph[inflow_id(graph, node_id), node_id] +outflow_link(graph, node_id)::LinkMetadata = graph[node_id, outflow_id(graph, node_id)] +outflow_links(graph, node_id)::Vector{LinkMetadata} = [graph[node_id, outflow_id] for outflow_id in outflow_ids(graph, node_id)] """ @@ -518,26 +518,26 @@ function set_initial_allocation_mean_flows!(integrator)::Nothing water_balance!(du, u, p, t) for mean_input_flows_subnetwork in values(mean_input_flows) - for edge in keys(mean_input_flows_subnetwork) - if edge[1] == edge[2] - q = get_influx(du, edge[1], p) + for link in keys(mean_input_flows_subnetwork) + if link[1] == link[2] + q = get_influx(du, link[1], p) else - q = get_flow(du, p, t, edge) + q = get_flow(du, p, t, link) end # Multiply by Δt_allocation as averaging divides by this factor # in update_allocation! - mean_input_flows_subnetwork[edge] = q * Δt_allocation + mean_input_flows_subnetwork[link] = q * Δt_allocation end end # Mean realized demands for basins are calculated as Δstorage/Δt # This sets the realized demands as -storage_old - for edge in keys(mean_realized_flows) - if edge[1] == edge[2] - mean_realized_flows[edge] = -u[edge[1].idx] + for link in keys(mean_realized_flows) + if link[1] == link[2] + mean_realized_flows[link] = -u[link[1].idx] else - q = get_flow(du, p, t, edge) - mean_realized_flows[edge] = q * Δt_allocation + q = get_flow(du, p, t, link) + mean_realized_flows[link] = q * Δt_allocation end end @@ -673,7 +673,7 @@ function set_continuously_controlled_variable_refs!(p::Parameters)::Nothing (pid_control, fill("flow_rate", length(pid_control.node_id))), ) for (id, controlled_variable) in zip(node.node_id, controlled_variable) - controlled_node_id = only(outneighbor_labels_type(graph, id, EdgeType.control)) + controlled_node_id = only(outneighbor_labels_type(graph, id, LinkType.control)) ref, error = get_variable_ref(p, controlled_node_id, controlled_variable; listen = false) push!(node.target_ref, ref) @@ -870,7 +870,7 @@ end function build_state_vector(p::Parameters) # It is assumed that the horizontal flow states come first in - # p.state_inflow_edge and p.state_outflow_edge + # p.state_inflow_link and p.state_outflow_link return ComponentVector{Float64}(; tabulated_rating_curve = zeros(length(p.tabulated_rating_curve.node_id)), pump = zeros(length(p.pump.node_id)), @@ -911,13 +911,13 @@ function build_flow_to_storage(p::Parameters, u::ComponentVector)::Parameters flow_to_storage_node_outflow = flow_to_storage_node_inflow end - for (inflow_edge, outflow_edge) in zip(node.inflow_edge, node.outflow_edge) - inflow_id, node_id = inflow_edge.edge + for (inflow_link, outflow_link) in zip(node.inflow_link, node.outflow_link) + inflow_id, node_id = inflow_link.link if inflow_id.type == NodeType.Basin flow_to_storage_node_inflow[inflow_id.idx, node_id.idx] = -1.0 end - outflow_id = outflow_edge.edge[2] + outflow_id = outflow_link.link[2] if outflow_id.type == NodeType.Basin flow_to_storage_node_outflow[outflow_id.idx, node_id.idx] = 1.0 end @@ -936,70 +936,70 @@ function build_flow_to_storage(p::Parameters, u::ComponentVector)::Parameters end """ -Create vectors state_inflow_edge and state_outflow_edge which give for each state -in the state vector in order the metadata of the edge that is associated with that state. +Create vectors state_inflow_link and state_outflow_link which give for each state +in the state vector in order the metadata of the link that is associated with that state. Only for horizontal flows, which are assumed to come first in the state vector. """ -function set_state_flow_edges(p::Parameters, u0::ComponentVector)::Parameters +function set_state_flow_links(p::Parameters, u0::ComponentVector)::Parameters (; user_demand, graph) = p components = Symbol[] - state_inflow_edges = Vector{EdgeMetadata}[] - state_outflow_edges = Vector{EdgeMetadata}[] + state_inflow_links = Vector{LinkMetadata}[] + state_outflow_links = Vector{LinkMetadata}[] - placeholder_edge = - EdgeMetadata(0, EdgeType.flow, (NodeID(:Terminal, 0, 0), NodeID(:Terminal, 0, 0))) + placeholder_link = + LinkMetadata(0, LinkType.flow, (NodeID(:Terminal, 0, 0), NodeID(:Terminal, 0, 0))) for node_name in keys(u0) if hasfield(Parameters, node_name) node::AbstractParameterNode = getfield(p, node_name) push!(components, node_name) - state_inflow_edges_component = EdgeMetadata[] - state_outflow_edges_component = EdgeMetadata[] + state_inflow_links_component = LinkMetadata[] + state_outflow_links_component = LinkMetadata[] for id in node.node_id inflow_ids_ = collect(inflow_ids(p.graph, id)) outflow_ids_ = collect(outflow_ids(p.graph, id)) - inflow_edge = if length(inflow_ids_) == 0 - placeholder_edge + inflow_link = if length(inflow_ids_) == 0 + placeholder_link elseif length(inflow_ids_) == 1 inflow_id = only(inflow_ids_) graph[inflow_id, id] else error("Multiple inflows not supported") end - push!(state_inflow_edges_component, inflow_edge) + push!(state_inflow_links_component, inflow_link) - outflow_edge = if length(outflow_ids_) == 0 - placeholder_edge + outflow_link = if length(outflow_ids_) == 0 + placeholder_link elseif length(outflow_ids_) == 1 outflow_id = only(outflow_ids_) graph[id, outflow_id] else error("Multiple outflows not supported") end - push!(state_outflow_edges_component, outflow_edge) + push!(state_outflow_links_component, outflow_link) end - push!(state_inflow_edges, state_inflow_edges_component) - push!(state_outflow_edges, state_outflow_edges_component) + push!(state_inflow_links, state_inflow_links_component) + push!(state_outflow_links, state_outflow_links_component) elseif startswith(String(node_name), "user_demand") push!(components, node_name) - placeholder_edges = fill(placeholder_edge, length(user_demand.node_id)) + placeholder_links = fill(placeholder_link, length(user_demand.node_id)) if node_name == :user_demand_inflow - push!(state_inflow_edges, user_demand.inflow_edge) - push!(state_outflow_edges, placeholder_edges) + push!(state_inflow_links, user_demand.inflow_link) + push!(state_outflow_links, placeholder_links) elseif node_name == :user_demand_outflow - push!(state_inflow_edges, placeholder_edges) - push!(state_outflow_edges, user_demand.outflow_edge) + push!(state_inflow_links, placeholder_links) + push!(state_outflow_links, user_demand.outflow_link) end end end - state_inflow_edge = ComponentVector(NamedTuple(zip(components, state_inflow_edges))) - state_outflow_edge = ComponentVector(NamedTuple(zip(components, state_outflow_edges))) + state_inflow_link = ComponentVector(NamedTuple(zip(components, state_inflow_links))) + state_outflow_link = ComponentVector(NamedTuple(zip(components, state_outflow_links))) - @reset p.state_inflow_edge = state_inflow_edge - @reset p.state_outflow_edge = state_outflow_edge + @reset p.state_inflow_link = state_inflow_link + @reset p.state_outflow_link = state_outflow_link return p end @@ -1047,9 +1047,9 @@ function get_state_index( return nothing end -function get_state_index(u::ComponentVector, edge::Tuple{NodeID, NodeID})::Int - idx = get_state_index(edge[2], u) - isnothing(idx) ? get_state_index(edge[1], u; inflow = false) : idx +function get_state_index(u::ComponentVector, link::Tuple{NodeID, NodeID})::Int + idx = get_state_index(link[2], u) + isnothing(idx) ? get_state_index(link[1], u; inflow = false) : idx end """ @@ -1118,7 +1118,7 @@ function mean_input_flows_subnetwork(p::Parameters, subnetwork_id::Int32) return mean_input_flows[subnetwork_idx] end -source_edges_subnetwork(p::Parameters, subnetwork_id::Int32) = +source_links_subnetwork(p::Parameters, subnetwork_id::Int32) = keys(mean_input_flows_subnetwork(p, subnetwork_id)) """ diff --git a/core/src/validation.jl b/core/src/validation.jl index 2ac800f0b..dd0d5f559 100644 --- a/core/src/validation.jl +++ b/core/src/validation.jl @@ -205,18 +205,18 @@ function valid_nodes(db::DB)::Bool end function database_warning(db::DB)::Nothing - cols = SQLite.columns(db, "Edge") + cols = SQLite.columns(db, "Link") if "subnetwork_id" in cols.name - @warn "The 'subnetwork_id' column in the 'Edge' table is deprecated since ribasim v2025.1." + @warn "The 'subnetwork_id' column in the 'Link' table is deprecated since ribasim v2025.1." end return nothing end """ Test for each node given its node type whether the nodes that -# are downstream ('down-edge') of this node are of an allowed type +# are downstream ('down-link') of this node are of an allowed type """ -function valid_edges(graph::MetaGraph)::Bool +function valid_links(graph::MetaGraph)::Bool errors = false for e in edges(graph) id_src = label_for(graph, e.src) @@ -323,7 +323,7 @@ function valid_pid_connectivity( end controlled_id = - only(outneighbor_labels_type(graph, pid_control_id, EdgeType.control)) + only(outneighbor_labels_type(graph, pid_control_id, LinkType.control)) @assert controlled_id.type in [NodeType.Pump, NodeType.Outlet] id_inflow = inflow_id(graph, controlled_id) @@ -486,30 +486,30 @@ function valid_n_neighbors(node_name::Symbol, graph::MetaGraph)::Bool # return !errors for node_id in labels(graph) node_id.type == node_type || continue - for (bounds, edge_type) in - zip((bounds_flow, bounds_control), (EdgeType.flow, EdgeType.control)) + for (bounds, link_type) in + zip((bounds_flow, bounds_control), (LinkType.flow, LinkType.control)) n_inneighbors = - count(x -> true, inneighbor_labels_type(graph, node_id, edge_type)) + count(x -> true, inneighbor_labels_type(graph, node_id, link_type)) n_outneighbors = - count(x -> true, outneighbor_labels_type(graph, node_id, edge_type)) + count(x -> true, outneighbor_labels_type(graph, node_id, link_type)) if n_inneighbors < bounds.in_min - @error "$node_id must have at least $(bounds.in_min) $edge_type inneighbor(s) (got $n_inneighbors)." + @error "$node_id must have at least $(bounds.in_min) $link_type inneighbor(s) (got $n_inneighbors)." errors = true end if n_inneighbors > bounds.in_max - @error "$node_id can have at most $(bounds.in_max) $edge_type inneighbor(s) (got $n_inneighbors)." + @error "$node_id can have at most $(bounds.in_max) $link_type inneighbor(s) (got $n_inneighbors)." errors = true end if n_outneighbors < bounds.out_min - @error "$node_id must have at least $(bounds.out_min) $edge_type outneighbor(s) (got $n_outneighbors)." + @error "$node_id must have at least $(bounds.out_min) $link_type outneighbor(s) (got $n_outneighbors)." errors = true end if n_outneighbors > bounds.out_max - @error "$node_id can have at most $(bounds.out_max) $edge_type outneighbor(s) (got $n_outneighbors)." + @error "$node_id can have at most $(bounds.out_max) $link_type outneighbor(s) (got $n_outneighbors)." errors = true end end @@ -517,18 +517,18 @@ function valid_n_neighbors(node_name::Symbol, graph::MetaGraph)::Bool return !errors end -"Check that only supported edge types are declared." -function valid_edge_types(db::DB)::Bool - edge_rows = execute( +"Check that only supported link types are declared." +function valid_link_types(db::DB)::Bool + link_rows = execute( db, - "SELECT edge_id, from_node_id, to_node_id, edge_type FROM Edge ORDER BY edge_id", + "SELECT link_id, from_node_id, to_node_id, link_type FROM Link ORDER BY link_id", ) errors = false - for (; edge_id, from_node_id, to_node_id, edge_type) in edge_rows - if edge_type ∉ ["flow", "control"] + for (; link_id, from_node_id, to_node_id, link_type) in link_rows + if link_type ∉ ["flow", "control"] errors = true - @error "Invalid edge type '$edge_type' for edge #$edge_id from node #$from_node_id to node #$to_node_id." + @error "Invalid link type '$link_type' for link #$link_id from node #$from_node_id to node #$to_node_id." end end return !errors @@ -576,7 +576,7 @@ function valid_discrete_control(p::Parameters, config::Config)::Bool # Check whether these control states are defined for the # control outneighbors - for id_outneighbor in outneighbor_labels_type(graph, id, EdgeType.control) + for id_outneighbor in outneighbor_labels_type(graph, id, LinkType.control) # Node object for the outneighbor node type node = getfield(p, graph[id_outneighbor].type) diff --git a/core/src/write.jl b/core/src/write.jl index 91e2e2775..7d7405183 100644 --- a/core/src/write.jl +++ b/core/src/write.jl @@ -208,7 +208,7 @@ function flow_table( model::Model, )::@NamedTuple{ time::Vector{DateTime}, - edge_id::Vector{Union{Int32, Missing}}, + link_id::Vector{Union{Int32, Missing}}, from_node_id::Vector{Int32}, to_node_id::Vector{Int32}, flow_rate::Vector{Float64}, @@ -217,30 +217,30 @@ function flow_table( (; t, saveval) = saved.flow (; p) = integrator (; graph) = p - (; flow_edges) = graph[] + (; flow_links) = graph[] from_node_id = Int32[] to_node_id = Int32[] - unique_edge_ids_flow = Union{Int32, Missing}[] + unique_link_ids_flow = Union{Int32, Missing}[] - flow_edge_ids = [flow_edge.edge for flow_edge in flow_edges] + flow_link_ids = [flow_link.link for flow_link in flow_links] - for (from_id, to_id) in flow_edge_ids + for (from_id, to_id) in flow_link_ids push!(from_node_id, from_id.value) push!(to_node_id, to_id.value) - push!(unique_edge_ids_flow, graph[from_id, to_id].id) + push!(unique_link_ids_flow, graph[from_id, to_id].id) end - nflow = length(unique_edge_ids_flow) + nflow = length(unique_link_ids_flow) ntsteps = length(t) flow_rate = zeros(nflow * ntsteps) - for (i, edge) in enumerate(flow_edge_ids) + for (i, link) in enumerate(flow_link_ids) for (j, cvec) in enumerate(saveval) (; flow, flow_boundary) = cvec flow_rate[i + (j - 1) * nflow] = - get_flow(flow, p, 0.0, edge; boundary_flow = flow_boundary) + get_flow(flow, p, 0.0, link; boundary_flow = flow_boundary) end end @@ -250,11 +250,11 @@ function flow_table( t_starts[1] = 0.0 end time = repeat(datetime_since.(t_starts, config.starttime); inner = nflow) - edge_id = repeat(unique_edge_ids_flow; outer = ntsteps) + link_id = repeat(unique_link_ids_flow; outer = ntsteps) from_node_id = repeat(from_node_id; outer = ntsteps) to_node_id = repeat(to_node_id; outer = ntsteps) - return (; time, edge_id, from_node_id, to_node_id, flow_rate) + return (; time, link_id, from_node_id, to_node_id, flow_rate) end "Create a concentration result table from the saved data" @@ -336,7 +336,7 @@ function allocation_flow_table( model::Model, )::@NamedTuple{ time::Vector{DateTime}, - edge_id::Vector{Int32}, + link_id::Vector{Int32}, from_node_type::Vector{String}, from_node_id::Vector{Int32}, to_node_type::Vector{String}, @@ -353,7 +353,7 @@ function allocation_flow_table( return (; time, - record_flow.edge_id, + record_flow.link_id, record_flow.from_node_type, record_flow.from_node_id, record_flow.to_node_type, diff --git a/core/test/allocation_test.jl b/core/test/allocation_test.jl index 05cb76ce3..fa249d652 100644 --- a/core/test/allocation_test.jl +++ b/core/test/allocation_test.jl @@ -158,13 +158,13 @@ end # Main network objective function F = problem[:F] objective = JuMP.objective_function(problem) - objective_edges = keys(objective.terms) + objective_links = keys(objective.terms) F_1 = F[(NodeID(:Basin, 2, p), NodeID(:Pump, 11, p))] F_2 = F[(NodeID(:Basin, 6, p), NodeID(:Pump, 24, p))] F_3 = F[(NodeID(:Basin, 10, p), NodeID(:Pump, 38, p))] - @test JuMP.UnorderedPair(F_1, F_1) ∈ objective_edges - @test JuMP.UnorderedPair(F_2, F_2) ∈ objective_edges - @test JuMP.UnorderedPair(F_3, F_3) ∈ objective_edges + @test JuMP.UnorderedPair(F_1, F_1) ∈ objective_links + @test JuMP.UnorderedPair(F_2, F_2) ∈ objective_links + @test JuMP.UnorderedPair(F_3, F_3) ∈ objective_links # Running full allocation algorithm (; Δt_allocation) = allocation_models[1] @@ -179,7 +179,7 @@ end @test subnetwork_allocateds[NodeID(:Basin, 10, p), NodeID(:Pump, 38, p)] ≈ [0.001, 0.00024888, 0.0] rtol = 1e-3 - # Test for existence of edges in allocation flow record + # Test for existence of links in allocation flow record allocation_flow = DataFrame(record_flow) transform!( allocation_flow, @@ -187,9 +187,9 @@ end ByRow( (a, b, c, d) -> haskey(graph, NodeID(Symbol(a), b, p), NodeID(Symbol(c), d, p)), - ) => :edge_exists, + ) => :link_exists, ) - @test all(allocation_flow.edge_exists) + @test all(allocation_flow.link_exists) @test user_demand.allocated[2, :] ≈ [4.0, 0.0, 0.0] atol = 1e-3 @test user_demand.allocated[7, :] ≈ [0.0, 0.0, 0.0] atol = 1e-3 @@ -352,7 +352,7 @@ end # Realized user demand flow_table = DataFrame(Ribasim.flow_table(model)) - flow_table_user_3 = flow_table[flow_table.edge_id .== 2, :] + flow_table_user_3 = flow_table[flow_table.link_id .== 2, :] itp_user_3 = LinearInterpolation( flow_table_user_3.flow_rate, Ribasim.seconds_since.(flow_table_user_3.time, model.config.starttime), @@ -504,7 +504,7 @@ end @test Tables.schema(allocation_flow) == Tables.Schema( ( :time, - :edge_id, + :link_id, :from_node_type, :from_node_id, :to_node_type, diff --git a/core/test/control_test.jl b/core/test/control_test.jl index ab28e5101..933190e10 100644 --- a/core/test/control_test.jl +++ b/core/test/control_test.jl @@ -280,7 +280,7 @@ end model = Ribasim.run(toml_path) flow_data = DataFrame(Ribasim.flow_table(model)) - function get_edge_flow(from_node_id, to_node_id) + function get_link_flow(from_node_id, to_node_id) data = filter( [:from_node_id, :to_node_id] => (a, b) -> (a == from_node_id) && (b == to_node_id), @@ -289,11 +289,11 @@ end return data.flow_rate end - inflow = get_edge_flow(2, 3) - @test get_edge_flow(3, 4) ≈ max.(0.6 .* inflow, 0) rtol = 1e-4 - @test get_edge_flow(4, 6) ≈ max.(0.6 .* inflow, 0) rtol = 1e-4 - @test get_edge_flow(3, 5) ≈ max.(0.4 .* inflow, 0) rtol = 1e-4 - @test get_edge_flow(5, 7) ≈ max.(0.4 .* inflow, 0) rtol = 1e-4 + inflow = get_link_flow(2, 3) + @test get_link_flow(3, 4) ≈ max.(0.6 .* inflow, 0) rtol = 1e-4 + @test get_link_flow(4, 6) ≈ max.(0.6 .* inflow, 0) rtol = 1e-4 + @test get_link_flow(3, 5) ≈ max.(0.4 .* inflow, 0) rtol = 1e-4 + @test get_link_flow(5, 7) ≈ max.(0.4 .* inflow, 0) rtol = 1e-4 end @testitem "Concentration discrete control" begin @@ -306,13 +306,13 @@ end @test ispath(toml_path) model = Ribasim.run(toml_path) flow_data = DataFrame(Ribasim.flow_table(model)) - flow_edge_0 = filter(:edge_id => id -> id == 0, flow_data) - t = Ribasim.seconds_since.(flow_edge_0.time, model.config.starttime) + flow_link_0 = filter(:link_id => id -> id == 0, flow_data) + t = Ribasim.seconds_since.(flow_link_0.time, model.config.starttime) itp = model.integrator.p.basin.concentration_data.concentration_external[1]["concentration_external.kryptonite"] concentration = itp.(t) threshold = 0.5 above_threshold = concentration .> threshold - @test all(isapprox.(flow_edge_0.flow_rate[above_threshold], 1e-3, rtol = 1e-2)) - @test all(isapprox.(flow_edge_0.flow_rate[.!above_threshold], 0.0, atol = 1e-5)) + @test all(isapprox.(flow_link_0.flow_rate[above_threshold], 1e-3, rtol = 1e-2)) + @test all(isapprox.(flow_link_0.flow_rate[.!above_threshold], 0.0, atol = 1e-5)) end diff --git a/core/test/main_test.jl b/core/test/main_test.jl index 95dfac40e..ad391dcec 100644 --- a/core/test/main_test.jl +++ b/core/test/main_test.jl @@ -31,7 +31,7 @@ end import TOML using Ribasim: Config, results_path - model_path = normpath(@__DIR__, "../../generated_testmodels/invalid_edge_types/") + model_path = normpath(@__DIR__, "../../generated_testmodels/invalid_link_types/") toml_path = normpath(model_path, "ribasim.toml") @test ispath(toml_path) diff --git a/core/test/run_models_test.jl b/core/test/run_models_test.jl index cbf4ca8bd..62719dd83 100644 --- a/core/test/run_models_test.jl +++ b/core/test/run_models_test.jl @@ -38,7 +38,7 @@ @testset "Schema" begin @test Tables.schema(flow) == Tables.Schema( - (:time, :edge_id, :from_node_id, :to_node_id, :flow_rate), + (:time, :link_id, :from_node_id, :to_node_id, :flow_rate), (DateTime, Union{Int32, Missing}, Int32, Int32, Float64), ) @test Tables.schema(basin) == Tables.Schema( @@ -86,7 +86,7 @@ @testset "Results size" begin nsaved = length(tsaves(model)) @test nsaved > 10 - # t0 has no flow, 2 flow edges + # t0 has no flow, 2 flow links @test nrow(flow) == (nsaved - 1) * 2 @test nrow(basin) == nsaved - 1 @test nrow(subgrid) == nsaved * length(p.subgrid.level) @@ -94,7 +94,7 @@ @testset "Results values" begin @test flow.time[1] == DateTime(2020) - @test coalesce.(flow.edge_id[1:2], -1) == [100, 101] + @test coalesce.(flow.link_id[1:2], -1) == [100, 101] @test flow.from_node_id[1:2] == [6, 0] @test flow.to_node_id[1:2] == [0, 2147483647] diff --git a/core/test/validation_test.jl b/core/test/validation_test.jl index 295d59542..e0d7a30db 100644 --- a/core/test/validation_test.jl +++ b/core/test/validation_test.jl @@ -72,13 +72,13 @@ end using Graphs: DiGraph using Logging using MetaGraphsNext: MetaGraph - using Ribasim: NodeID, NodeMetadata, EdgeMetadata, EdgeType + using Ribasim: NodeID, NodeMetadata, LinkMetadata, LinkType graph = MetaGraph( DiGraph(); label_type = NodeID, vertex_data_type = NodeMetadata, - edge_data_type = EdgeMetadata, + edge_data_type = LinkMetadata, graph_data = nothing, ) @@ -88,14 +88,14 @@ end graph[NodeID(:Basin, 4, 1)] = NodeMetadata(:pump, 9) graph[NodeID(:Pump, 6, 1)] = NodeMetadata(:pump, 9) - function set_edge_metadata!(id_1, id_2, edge_type) - graph[id_1, id_2] = EdgeMetadata(; id = 0, type = edge_type, edge = (id_1, id_2)) + function set_link_metadata!(id_1, id_2, link_type) + graph[id_1, id_2] = LinkMetadata(; id = 0, type = link_type, link = (id_1, id_2)) return nothing end - set_edge_metadata!(NodeID(:Basin, 2, 1), NodeID(:Pump, 1, 1), EdgeType.flow) - set_edge_metadata!(NodeID(:Basin, 3, 1), NodeID(:Pump, 1, 1), EdgeType.flow) - set_edge_metadata!(NodeID(:Pump, 6, 1), NodeID(:Basin, 2, 1), EdgeType.flow) + set_link_metadata!(NodeID(:Basin, 2, 1), NodeID(:Pump, 1, 1), LinkType.flow) + set_link_metadata!(NodeID(:Basin, 3, 1), NodeID(:Pump, 1, 1), LinkType.flow) + set_link_metadata!(NodeID(:Pump, 6, 1), NodeID(:Basin, 2, 1), LinkType.flow) logger = TestLogger() with_logger(logger) do @@ -124,7 +124,7 @@ end using Graphs: DiGraph using Logging using MetaGraphsNext: MetaGraph - using Ribasim: NodeID, NodeMetadata, EdgeMetadata, NodeID, EdgeType + using Ribasim: NodeID, NodeMetadata, LinkMetadata, NodeID, LinkType pid_control_node_id = NodeID.(:PidControl, [1, 6], 1) pid_control_listen_node_id = [NodeID(:Terminal, 3, 1), NodeID(:Basin, 5, 1)] @@ -133,7 +133,7 @@ end DiGraph(); label_type = NodeID, vertex_data_type = NodeMetadata, - edge_data_type = EdgeMetadata, + edge_data_type = LinkMetadata, graph_data = nothing, ) @@ -145,18 +145,18 @@ end graph[NodeID(:Basin, 5, 1)] = NodeMetadata(:basin, 0) graph[NodeID(:Basin, 7, 1)] = NodeMetadata(:basin, 0) - function set_edge_metadata!(id_1, id_2, edge_type) - graph[id_1, id_2] = EdgeMetadata(; id = 0, type = edge_type, edge = (id_1, id_2)) + function set_link_metadata!(id_1, id_2, link_type) + graph[id_1, id_2] = LinkMetadata(; id = 0, type = link_type, link = (id_1, id_2)) return nothing end - set_edge_metadata!(NodeID(:Terminal, 3, 1), NodeID(:Pump, 4, 1), EdgeType.flow) - set_edge_metadata!(NodeID(:Basin, 7, 1), NodeID(:Pump, 2, 1), EdgeType.flow) - set_edge_metadata!(NodeID(:Pump, 2, 1), NodeID(:Basin, 7, 1), EdgeType.flow) - set_edge_metadata!(NodeID(:Pump, 4, 1), NodeID(:Basin, 7, 1), EdgeType.flow) + set_link_metadata!(NodeID(:Terminal, 3, 1), NodeID(:Pump, 4, 1), LinkType.flow) + set_link_metadata!(NodeID(:Basin, 7, 1), NodeID(:Pump, 2, 1), LinkType.flow) + set_link_metadata!(NodeID(:Pump, 2, 1), NodeID(:Basin, 7, 1), LinkType.flow) + set_link_metadata!(NodeID(:Pump, 4, 1), NodeID(:Basin, 7, 1), LinkType.flow) - set_edge_metadata!(NodeID(:PidControl, 1, 1), NodeID(:Pump, 4, 1), EdgeType.control) - set_edge_metadata!(NodeID(:PidControl, 6, 1), NodeID(:Pump, 2, 1), EdgeType.control) + set_link_metadata!(NodeID(:PidControl, 1, 1), NodeID(:Pump, 4, 1), LinkType.control) + set_link_metadata!(NodeID(:PidControl, 6, 1), NodeID(:Pump, 2, 1), LinkType.control) logger = TestLogger() with_logger(logger) do @@ -258,12 +258,12 @@ end "Pump #1 flow rates must be non-negative, found -1.0 for control state 'foo'." end -@testitem "Edge type validation" begin +@testitem "Link type validation" begin import SQLite using Logging toml_path = - normpath(@__DIR__, "../../generated_testmodels/invalid_edge_types/ribasim.toml") + normpath(@__DIR__, "../../generated_testmodels/invalid_link_types/ribasim.toml") @test ispath(toml_path) cfg = Ribasim.Config(toml_path) @@ -271,17 +271,17 @@ end db = SQLite.DB(db_path) logger = TestLogger() with_logger(logger) do - @test !Ribasim.valid_edge_types(db) + @test !Ribasim.valid_link_types(db) end close(db) @test length(logger.logs) == 2 @test logger.logs[1].level == Error @test logger.logs[1].message == - "Invalid edge type 'foo' for edge #1 from node #1 to node #2." + "Invalid link type 'foo' for link #1 from node #1 to node #2." @test logger.logs[2].level == Error @test logger.logs[2].message == - "Invalid edge type 'bar' for edge #2 from node #2 to node #3." + "Invalid link type 'bar' for link #2 from node #2 to node #3." end @testitem "Subgrid validation" begin diff --git a/docs/concept/allocation.qmd b/docs/concept/allocation.qmd index 91318fd84..394390496 100644 --- a/docs/concept/allocation.qmd +++ b/docs/concept/allocation.qmd @@ -40,15 +40,15 @@ The allocation problem is solved per subnetwork, which is given by a subset $S \ ### Source flows -Sources are indicated by a set of edges in the subnetwork +Sources are indicated by a set of links in the subnetwork $$ E_S^\text{source} \subset E, $$ -which are automatically inferred as all edges that point out of LevelBoundary or FlowBoundary nodes. That is, if $(i,j) \in E_S^\text{source}$, then the average over the last allocation interval $\Delta t_{\text{alloc}}$ of the of the flow over this edge +which are automatically inferred as all links that point out of LevelBoundary or FlowBoundary nodes. That is, if $(i,j) \in E_S^\text{source}$, then the average over the last allocation interval $\Delta t_{\text{alloc}}$ of the of the flow over this link $$ \frac{1}{\Delta t_{\text{alloc}}}\int_{t - \Delta t_{\text{alloc}}}^tQ_{ij}(t') dt' $$ - is treated as a source flow in the allocation problem. These edges are either coming from a boundary/source node (e.g. a level or flow boundary) or connect the main network to a subnetwork. For the definition of $Q_{ij}$ see [the formal model description](/concept/equations.qmd#formal-model-description). + is treated as a source flow in the allocation problem. These links are either coming from a boundary/source node (e.g. a level or flow boundary) or connect the main network to a subnetwork. For the definition of $Q_{ij}$ see [the formal model description](/concept/equations.qmd#formal-model-description). ### User demands @@ -77,7 +77,7 @@ for all $i \in FD_S$. Here $d^{p_{\text{df}}}$ is given by the original flow dem ### Vertical fluxes and local storage -Apart from the source flows denoted by edges, there are other sources of water in the subnetwork, associated with the Basins in the subnetwork $B_S = B \cap S$. First, there is the average over the last allocation interval $\Delta t_{\text{alloc}}$ of the vertical fluxes (precipitation, evaporation, infiltration and drainage) for each Basin: +Apart from the source flows denoted by links, there are other sources of water in the subnetwork, associated with the Basins in the subnetwork $B_S = B \cap S$. First, there is the average over the last allocation interval $\Delta t_{\text{alloc}}$ of the vertical fluxes (precipitation, evaporation, infiltration and drainage) for each Basin: $$ \phi_i(t) = \frac{1}{\Delta t_{\text{alloc}}}\int_{t - \Delta t_{\text{alloc}}}^t \left[Q_{P,i}(t') - Q_{E,i}(t') + Q_{\text{drn},i}(t') - Q_{\text{inf},i}(t') \right] dt', \quad \forall i \in B_S. $$ @@ -102,29 +102,29 @@ for all $i \in B_S$. Note that the basin demand has only a single priority, so f Nodes in the Ribasim model that have a `max_flow_rate`, i.e. Pump, Outlet and LinearResistance, put a constraint on the flow through that node. Some nodes only allow flow in one direction, like Pump, Outlet and TabulatedRatingCurve. #### UserDemand return flows -UserDemand nodes dictate proportional relationships between flows over edges in the subnetwork. The return factor is given by $0 \le r_i(t) \le 1, i \in U_S$. +UserDemand nodes dictate proportional relationships between flows over links in the subnetwork. The return factor is given by $0 \le r_i(t) \le 1, i \in U_S$. ## The subnetwork -The subnetwork consists of a set of nodes $S \subset V$ and edges +The subnetwork consists of a set of nodes $S \subset V$ and links $$ E_S = (S \times S) \cup E_S^\text{source}, $$ -i.e. the edges that lie within the subnetwork together with the source edges (which can be partially outside the subnetwork). +i.e. the links that lie within the subnetwork together with the source links (which can be partially outside the subnetwork). The nodes in $S$ together with the connected nodes outside the subnetwork are called the extended subnetwork. ### Capacities -Each edge in the subnetwork has an associated capacity. These capacities are collected in the sparse capacity matrix $C_S \in \overline{\mathbb{R}}_{\ge 0}^{n\times n}$ where $n$ is the number of nodes in the extended subnetwork. An edge capacity is infinite if there is nothing in the model constraining the capacity. +Each link in the subnetwork has an associated capacity. These capacities are collected in the sparse capacity matrix $C_S \in \overline{\mathbb{R}}_{\ge 0}^{n\times n}$ where $n$ is the number of nodes in the extended subnetwork. An link capacity is infinite if there is nothing in the model constraining the capacity. The capacities are determined in different ways: -- If an edge does not exist in the allocation network, i.e. $(i,j) \notin E_S$ for certain $1 \le i,j\le n'$, then $(C_S)_{i,j} = 0$; -- The capacity of the edge $e \in E_S$ is given by the smallest `max_flow_rate` of the nodes along the equivalent edges in the subnetwork. If there are no nodes with a `max_flow_rate`, the edge capacity is infinite; -- If the edge is a source, the capacity of the edge is given by the flow rate of that source; +- If an link does not exist in the allocation network, i.e. $(i,j) \notin E_S$ for certain $1 \le i,j\le n'$, then $(C_S)_{i,j} = 0$; +- The capacity of the link $e \in E_S$ is given by the smallest `max_flow_rate` of the nodes along the equivalent links in the subnetwork. If there are no nodes with a `max_flow_rate`, the link capacity is infinite; +- If the link is a source, the capacity of the link is given by the flow rate of that source; -There are also capacities for special edges: +There are also capacities for special links: - $C^{LD}_S \in \mathbb{R}^b_{\ge 0}$ where $b = \# B_S$ is the number of basins, for the flow supplied by basins based on level demand (this capacity is 0 for basins that have no level demand). - $C^{FD}_S \in \mathbb{R}^c_{\ge 0}$ where $c = \# FD_S$ is the number of nodes with a flow demand, for the flow supplied by flow buffers at these nodes with a flow demand. @@ -138,7 +138,7 @@ The optimization problem for a subnetwork consists of a quadratic objective func There are several types of variable whose value has to be determined to solve the allocation problem: -- The flows $F \in \mathbb{R}_{\ge 0}^{n\times n}$ over the edges in the allocation network; +- The flows $F \in \mathbb{R}_{\ge 0}^{n\times n}$ over the links in the allocation network; - The flows $F^\text{basin out}_{i}, F^\text{basin in}_{i} \geq 0$ for all $i \in B_S$ supplied and consumed by the basins with a level demand respectively; - The flows $F^\text{buffer out}_{i}, F^\text{buffer in}_{i} \ge 0$ for all $i \in FD_S \cup FF_S$ supplied and consumed by the flow buffers of nodes with a flow demand. @@ -196,7 +196,7 @@ In here, we have the following special flows: In the above, the placement of the basin and buffer flows might seem counter-intuitive. Think of the storage or buffer as a separate node connected to the node with the demand. ::: -- Capacity: the flows over the edges are bounded by the edge capacity: +- Capacity: the flows over the links are bounded by the link capacity: $$ F_{ij} \le \left(C_S\right)_{ij}, \quad \forall(i,j) \in E_S. $$ {#eq-capacityconstraint} @@ -209,7 +209,7 @@ $$ :::{.callout-note} There are several things to note about the source constraints: - The sources are not all used at once. There is an optimization for each source in a subnetwork, where only one source has nonzero capacity. -- When performing subnetwork demand collection, these capacities are set to $\infty$ for edges which connect the main network to a subnetwork. +- When performing subnetwork demand collection, these capacities are set to $\infty$ for links which connect the main network to a subnetwork. ::: Similar constraints hold for the flow out of basins, flow demand buffers and user demand outflow sources: @@ -224,7 +224,7 @@ $$ $$ F_{ij} \le (C^{UD}_S)_i, \quad \forall i \in U_S, \quad V_S^{\text{out}}(i) = \{j\}. $$ -Here we use that each UserDemand node in the allocation network has a unique outflow edge. The user outflow source capacities are increased after each optimization solve by the return fraction: +Here we use that each UserDemand node in the allocation network has a unique outflow link. The user outflow source capacities are increased after each optimization solve by the return fraction: $$ r_i(t) \cdot F_{ki}, \quad V_S^{\text{in}}(i) = \{k\}. $$ diff --git a/docs/concept/core.qmd b/docs/concept/core.qmd index 5dbf510b5..afcc63aeb 100644 --- a/docs/concept/core.qmd +++ b/docs/concept/core.qmd @@ -27,7 +27,7 @@ The figure below gives a more detailed description of the simulation loop in the - Allocation optimization; activated when the allocation timestep has been passed; - Control actions; activated when some discrete control callback is triggered; -- Water balance; computing the flows over flow edges happens each timestep; +- Water balance; computing the flows over flow links happens each timestep; - Time integration step; done by the integrator from `OrdinaryDiffEq.jl`. ```{mermaid} @@ -65,7 +65,7 @@ sequenceDiagram activate Sim State-->>Sim: Input Param-->>Sim: Input - Sim->>Sim: Compute flows over edges per node type + Sim->>Sim: Compute flows over links per node type Sim-->>Param: Set flows deactivate Sim end @@ -132,9 +132,9 @@ Ribasim can calculate concentrations of conservative tracers (i.e. substances th It does so by calculating the mass transports by flows for each timestep, in the `update_cumulative_flows!` callback. Specifically, for each Basin at each timestep it calculates: -- all mass inflows ($flow * source\_concentration$) given the edge inflows +- all mass inflows ($flow * source\_concentration$) given the link inflows - update the concentrations in the Basin based on the added storage ($previous storage + inflows$) -- all mass outflows ($flow * basin\_concentration\_state$) give the edge outflows +- all mass outflows ($flow * basin\_concentration\_state$) give the link outflows - update the concentrations in the Basin based on the current storage We thus keep track of both mass and concentration of substances for each Basin. diff --git a/docs/concept/equations.qmd b/docs/concept/equations.qmd index 3f08ba269..d7e8ffde4 100644 --- a/docs/concept/equations.qmd +++ b/docs/concept/equations.qmd @@ -10,7 +10,7 @@ $$ which is a system of coupled first order differential equations. -The model is given by a directed graph, consisting of a set of node IDs (vertices) $V$ and edges $E$, consisting of ordered pairs of node IDs. +The model is given by a directed graph, consisting of a set of node IDs (vertices) $V$ and links $E$, consisting of ordered pairs of node IDs. We denote the subset of the nodes given by the Basins $B \subset V$, and the subset of nodes that prescribe flow $N \subset V$. The states $\mathbf{u}$ of the model are given by cumulative flows since the start of the simulation as prescribed by the nodes $N$: @@ -91,7 +91,7 @@ in a Basin over a time period $\Delta t$ and compares that to the total inflows - $\text{total inflow}$: the precipitation, drainage and horizontal flows into the Basin; - $\text{total outflow}$: the evaporation, infiltration and horizontal flows out of the Basin. -Whether a flow is an inflow or an outflow depends on whether the flow contributes to or takes from the Basin storage, which means that this is independent of the edge direction. This is determined for each solver timestep individually. +Whether a flow is an inflow or an outflow depends on whether the flow contributes to or takes from the Basin storage, which means that this is independent of the link direction. This is determined for each solver timestep individually. Then from this we compute the errors: diff --git a/docs/concept/modelconcept.qmd b/docs/concept/modelconcept.qmd index c9e46bea9..4da050d52 100644 --- a/docs/concept/modelconcept.qmd +++ b/docs/concept/modelconcept.qmd @@ -75,7 +75,7 @@ Internally a directed graph is used. The direction is defined to be the positive flow direction, and is generally set in the dominant flow direction. The basins are the nodes of the network graph. Basin states and properties such storage volume and wetted area are associated with the nodes (A, B, C, D), as are most forcing data such as precipitation, evaporation, or water demand. -Basin connection properties and interbasin flows are associated with the edges (the +Basin connection properties and interbasin flows are associated with the links (the lines between A, B, C, and D) instead. Multiple basins may exist within the same spatial polygon, representing different aspects of the surface water system (perennial ditches, ephemeral ditches, or even surface ponding). @@ -120,8 +120,8 @@ or over a fixed structure. situations or situations where head difference over a structure determines its actual flow capacity. - [ManningResistance](/reference/node/manning-resistance.qmd): bi-directional flow based on head difference and resistance using Manning-Gauckler formula. Same usage as LinearResistance, providing a better hydrological meaning to the resistance parameterization. -- [Pump](/reference/node/pump.qmd): one-directional structure with a set flow rate. Node type typically used in combination with control to force water over the edge. -- [Outlet](/reference/node/outlet.qmd): one-directional gravity structure with a set flow rate. Node type typically used in combination with control to force water over the edge, even if +- [Pump](/reference/node/pump.qmd): one-directional structure with a set flow rate. Node type typically used in combination with control to force water over the link. +- [Outlet](/reference/node/outlet.qmd): one-directional gravity structure with a set flow rate. Node type typically used in combination with control to force water over the link, even if their is a mismatch in actual hydraulic capacity. The node type has an automated mechanism to stop the flow when the head difference is zero. The control layer can activate or deactivate nodes, set flow rates for the Pump and Outlet, or choose different parameterizations for TabulatedRatingCurve, LinearResistance or ManningResistance. diff --git a/docs/dev/addnode.qmd b/docs/dev/addnode.qmd index e64d28b21..bbe6cfc56 100644 --- a/docs/dev/addnode.qmd +++ b/docs/dev/addnode.qmd @@ -162,7 +162,7 @@ class NewNodeTypeStatic: The new node type might have associated restrictions for a model with the new node type so that it behaves properly. Basic node ID and node type validation happens in `Model.validate_model` in `python/ribasim/ribasim/model.py`, which automatically considers all node types in the `node_types` module. -Connectivity validation happens in `valid_edges` and `valid_n_flow_neighbors` in `core/src/solve.jl`. Connectivity rules are specified in `core/src/validation.jl`. Allowed upstream and downstream neighbor types for `new_node_type` (the snake case version of `NewNodeType`) are specified as follows: +Connectivity validation happens in `valid_links` and `valid_n_flow_neighbors` in `core/src/solve.jl`. Connectivity rules are specified in `core/src/validation.jl`. Allowed upstream and downstream neighbor types for `new_node_type` (the snake case version of `NewNodeType`) are specified as follows: ```julia # set allowed downstream types diff --git a/docs/dev/allocation.qmd b/docs/dev/allocation.qmd index 4b1cb1595..67c2d2757 100644 --- a/docs/dev/allocation.qmd +++ b/docs/dev/allocation.qmd @@ -51,17 +51,17 @@ When working with optimization problems using JuMP, there are three fundamental - Optimization variables: These are the [variables](https://jump.dev/JuMP.jl/stable/api/JuMP/#@constraints) that are optimized in the allocation problem formulation. They are defined using the [`@variable`](https://jump.dev/JuMP.jl/stable/api/JuMP/#@variables) macro. -For example, to specify the flow rates in all the edges in the allocation network as variables: +For example, to specify the flow rates in all the links in the allocation network as variables: ```julia -problem[:F] = JuMP.@variable(problem, F[edge = edges] >= 0.0) +problem[:F] = JuMP.@variable(problem, F[link = links] >= 0.0) ``` More details about setting up variables in allocation can be found in the section [below](#sec-optimization-problem). - Constraints: These are the constraints that the optimization variables must satisfy. They are defined using the [`@constraint`](https://jump.dev/JuMP.jl/stable/api/JuMP/#@constraint) macro. -The definition of the edge capacity constraints is shown in section [below](#sec-constraints-and-capacities). +The definition of the link capacity constraints is shown in section [below](#sec-constraints-and-capacities). `add_constraints_*` functions are used to [add constraints](#sec-initial-constraints) to the optimization problem. The [initial value of the constraints](#sec-constraints-and-capacities) is set in the function `set_initial_values_*`. During the iteration, the constraints are updated based on the current state of the allocation network. @@ -72,11 +72,11 @@ It is defined using the [`@objective`](https://jump.dev/JuMP.jl/stable/api/JuMP/ The functions `JuMP.normalized_rhs` and `JuMP.set_normalized_rhs` are used to read and write the constant right hand side of constraints respectively. -For example, to update the capacity of one of the edges, `JuMP.normalized_rhs` moves all the constants to the right-hand sides and all variables to the left-hand side and `JuMP.set_normalized_rhs` sets the new right-hand-side value. +For example, to update the capacity of one of the links, `JuMP.normalized_rhs` moves all the constants to the right-hand sides and all variables to the left-hand side and `JuMP.set_normalized_rhs` sets the new right-hand-side value. ```julia JuMP.set_normalized_rhs( - constraints_capacity[edge_id], - JuMP.normalized_rhs(constraints_capacity[edge_id]) - JuMP.value(F[edge_id]), + constraints_capacity[link_id], + JuMP.normalized_rhs(constraints_capacity[link_id]) - JuMP.value(F[link_id]), ) ``` @@ -88,29 +88,29 @@ Initialization of the allocation data structures happens in `allocation_init.jl` For each subnetwork, an allocation problem is formulated, which is stored in the `allocation_models` field mentioned above. ## Data processing -### Deriving edge capacities -Edge capacities are important constraints in the optimization problem. They set the limit for the flows between the nodes. Therefore, the capacities of all the flow edges in the subnetworks are obtained. The capacity of an edge is given by the smallest `max_flow_rate` of the nodes connected to the edges if these nodes have such a value. The capacities are stored in a `SparseArray` object from `JuMP.jl` called `capacities`, indexed by a tuple of node IDs. +### Deriving link capacities +Link capacities are important constraints in the optimization problem. They set the limit for the flows between the nodes. Therefore, the capacities of all the flow links in the subnetworks are obtained. The capacity of an link is given by the smallest `max_flow_rate` of the nodes connected to the links if these nodes have such a value. The capacities are stored in a `SparseArray` object from `JuMP.jl` called `capacities`, indexed by a tuple of node IDs. -The function `get_capacity` obtains the capacities of the edges within a subnetwork given a subnetwork ID and the Ribasim model parameters `p`, if the sources of the subnetwork are valid (checked in function `valid_sources`). +The function `get_capacity` obtains the capacities of the links within a subnetwork given a subnetwork ID and the Ribasim model parameters `p`, if the sources of the subnetwork are valid (checked in function `valid_sources`). ### Handling the connection between the main network and subnetworks -The function `find_subnetwork_connetions` finds the edges that connected the main network to a subnetwork. `subnetwork_demands` and `subnetwork_allocateds` will be created, which stores demands and allocated values for subnetworks as a whole. -`main_network_connections` is a vector of edges that connect a subnetwork with the main network. +The function `find_subnetwork_connetions` finds the links that connected the main network to a subnetwork. `subnetwork_demands` and `subnetwork_allocateds` will be created, which stores demands and allocated values for subnetworks as a whole. +`main_network_connections` is a vector of links that connect a subnetwork with the main network. ## The optimization problem {#sec-optimization-problem} ### Setting up the optimization variables There are three types of variables in the optimization problems: -- flows between the edges in the allocation model +- flows between the links in the allocation model - flows in and out of a basin with a level demand - flows in and out of nodes that have a buffer, which are nodes that have a flow demand -The function `add_variables_flow!` is used to add the variable of flows between the edges. The variables are obtained from the capacity array. +The function `add_variables_flow!` is used to add the variable of flows between the links. The variables are obtained from the capacity array. And variables named by `F($startnode, $endnode)` are created. ```julia -edges = keys(capacity.data) -problem[:F] = JuMP.@variable(problem, F[edge = edges] >= 0.0) +links = keys(capacity.data) +problem[:F] = JuMP.@variable(problem, F[link = links] >= 0.0) ``` In the function `add_variables_basin`, variables that represent flows of those basins that are connected with level demand are defined. @@ -130,7 +130,7 @@ problem[:F_basin_out] = JuMP.@variable(problem, F_basin_out[node_id = node_ids_basin,] >= 0.0) ``` -The last set of optimization variables is the flow edges in and out of the buffer of nodes with a flow demand. It is defined in a similar way to the second set of variables. +The last set of optimization variables is the flow links in and out of the buffer of nodes with a flow demand. It is defined in a similar way to the second set of variables. ### Setting up initial optimization constraints {#sec-initial-constraints} All the variables are greater and equal to 0. This is set when the variables are added to the optimization problem. @@ -163,15 +163,15 @@ The quadratic terms are defined with the `add_objective_term` function. Function `set_objective_priority` sets the objective function based on the main network for a given priority with the following steps: - First, it treats the subnetworks as user demand nodes and adds the quadratic terms of the main network. -- Then it loops over all the edges in allocation. -- Based on the type of the node that the edge is pointing to (user demand or flow demand), it adds the corresponding quadratic terms. -- Finally, it does the same to the edges that start from a level demand node. +- Then it loops over all the links in allocation. +- Based on the type of the node that the link is pointing to (user demand or flow demand), it adds the corresponding quadratic terms. +- Finally, it does the same to the links that start from a level demand node. ### Setting the constraints and capacities In the function `set_initial_values`, the following capacities and demands are initialized: - Source capacities come from the physical layer -- Edge capacities derived from the maximum capacities between the connected nodes +- Link capacities derived from the maximum capacities between the connected nodes - Basin capacities come from the disk of water above the max level set by a level demand node - Buffer capacities start at 0 - User demands fractional return flow starts at 0 @@ -181,7 +181,7 @@ As shown below, these functions set the capacities to the corresponding initial ```julia set_initial_capacities_source!(allocation_model, p) -set_initial_capacities_edge!(allocation_model, p) +set_initial_capacities_link!(allocation_model, p) set_initial_capacities_basin!(allocation_model, p, u, t) set_initial_capacities_buffer!(allocation_model) set_initial_capacities_returnflow!(allocation_model) @@ -195,15 +195,15 @@ These capacities determine the constraints of the optimization problem. Take `set_initial_capacities_source` as an example, the right-hand-side values of the `source_constraints` are set to the `source_capacity`. ```julia -for edge_metadata in values(graph.edge_data) - (; edge) = edge_metadata - if graph[edge...].subnetwork_id_source == subnetwork_id - # If it is a source edge for this allocation problem - if edge ∉ main_network_source_edges +for link_metadata in values(graph.edge_data) + (; link) = link_metadata + if graph[link...].subnetwork_id_source == subnetwork_id + # If it is a source link for this allocation problem + if link ∉ main_network_source_links # Reset the source to the averaged flow over the last allocation period - source_capacity = mean_input_flows[edge][] + source_capacity = mean_input_flows[link][] JuMP.set_normalized_rhs( - source_constraints[edge], + source_constraints[link], # It is assumed that the allocation procedure does not have to be differentiated. source_capacity, ) @@ -230,7 +230,7 @@ Lastly, capacities and demands are updated, as shown below: ```julia adjust_capacities_source!(allocation_model) -adjust_capacities_edge!(allocation_model) +adjust_capacities_link!(allocation_model) adjust_capacities_basin!(allocation_model) adjust_capacities_buffer!(allocation_model) adjust_capacities_returnflow!(allocation_model, p) @@ -244,7 +244,7 @@ end ``` ## Output data -The function `save_demands_and_allocations` saves the demand and the allocated value per demand node. And the function `save_allocation_flows` saves the optimized flows over the edges in the subnetwork. +The function `save_demands_and_allocations` saves the demand and the allocated value per demand node. And the function `save_allocation_flows` saves the optimized flows over the links in the subnetwork. These values are saved in the `record_demand` and `record_flow` fields of the `Allocation` struct and only written to the output file at the end of the simulation. ## Communicating to the physical layer diff --git a/docs/dev/qgis_test_plan.qmd b/docs/dev/qgis_test_plan.qmd index 5f4cee10d..9c7d68066 100644 --- a/docs/dev/qgis_test_plan.qmd +++ b/docs/dev/qgis_test_plan.qmd @@ -48,8 +48,8 @@ Before starting with data, perform the following tests to see if the plugin does - Press the "New" button in the Model tab: _file navigation window pops up_. - Fill in a name ("test1"). - Press OK: _A TOML and database file are created on the given location_. -- _The layers tab shows a group with a "Node" and "Edge" layer, the canvas is empty._ -- _The Ribasim panel shows a Node and Edge layer, the file path is set in the text field, all nodes buttons are enabled_. +- _The layers tab shows a group with a "Node" and "Link" layer, the canvas is empty._ +- _The Ribasim panel shows a Node and Link layer, the file path is set in the text field, all nodes buttons are enabled_. - Cleanup: Delete the created files from disk. ## New model in same folder @@ -120,7 +120,7 @@ Intended behavior: The same model is loaded twice, but there is only a connectio - Fill in a name ("test1"). - Press OK: _A TOML and database file are created on the given location_. - Press "Add to QGIS": _Nothing happens_. -- Press the "Edge" layer in the Model tab: _"Add to QGIS" button is disabled_. +- Press the "Link" layer in the Model tab: _"Add to QGIS" button is disabled_. ❌ [Failing](https://github.com/Deltares/Ribasim/issues/1683) @@ -131,10 +131,10 @@ Intended behavior: The same model is loaded twice, but there is only a connectio - Press the "New" button in the Model tab: _file navigation window pops up_. - Fill in a name ("test1"). - Press OK: _A TOML and database file are created on the given location_. -- Press the "Edge" layer in the Model tab, press "Remove from Dataset": _Popup shows "Deleting: Edge"_. -- Press "Yes": "Edge" layer is removed from Layers panel and from Model tab. +- Press the "Link" layer in the Model tab, press "Remove from Dataset": _Popup shows "Deleting: Link"_. +- Press "Yes": "Link" layer is removed from Layers panel and from Model tab. -Unsure if this is wanted behavior, it is now impossible to load the model afterwards because it no longer contains an Edge layer. +Unsure if this is wanted behavior, it is now impossible to load the model afterwards because it no longer contains an Link layer. Perhaps better if we only allow removal of the optional tables. See this [bug report](https://github.com/Deltares/Ribasim/issues/1684). # Nodes tab button interaction tests @@ -183,7 +183,7 @@ Unexpected behavior: The default type of the nodes is NULL, and therefore undefi Should be enforced and validated. See [issue](https://github.com/Deltares/Ribasim/issues/1688#issuecomment-2265315907). -## Add edges to Edge layer +## Add links to Link layer - Open QGIS and ensure that the Ribasim plugin is installed and enabled. - Open the application via the Ribasim button on the QGIS toolbar: _Ribasim panel opens_. @@ -196,7 +196,7 @@ See [issue](https://github.com/Deltares/Ribasim/issues/1688#issuecomment-2265315 - Add a node id of 1, press OK: _The first Node appears on the map_. - Click in the canvas again: _Popup appears with Feature Attributes to fill in_. - Add a node id of 2, press OK: _The second Node appears on the map_. -- Select the Edge layer in the Layers tab: _edit buttons in the toolbar become enabled_. +- Select the Link layer in the Layers tab: _edit buttons in the toolbar become enabled_. - Enable snapping under View > Toolbars > Snapping Toolbars: _Magnet button is enabled and active_. - Press the Add Line Feature button: _Mouse becomes a crosshair_. - Snap a line between the two nodes, click the two nodes and then right click to finish: _Popup shows with input, most fields are set to NULL_. @@ -211,8 +211,8 @@ See [issue](https://github.com/Deltares/Ribasim/issues/1688#issuecomment-2265315 - Press the "Open" button in the Model tab: _file navigation window pops up_. - Choose an existing model from the `generated_testmodels` folder. - Press OK: _The model layers appear in the layer panel and on the map_. -- Select the node layer, and make a subselection of nodes on the map: _Nodes are highlighted in yellow, including their edges_. -- Open the Edge attribute table: _The highlighted rows are those with a from/to node\_id that was selected_. +- Select the node layer, and make a subselection of nodes on the map: _Nodes are highlighted in yellow, including their links_. +- Open the Link attribute table: _The highlighted rows are those with a from/to node\_id that was selected_. - Open any non-spatial attribute table: _The highlighted rows are those with an node\_id that was selected_. diff --git a/docs/dev/scripts/plot_trace.jl b/docs/dev/scripts/plot_trace.jl index bd966b2bb..58745ad9a 100644 --- a/docs/dev/scripts/plot_trace.jl +++ b/docs/dev/scripts/plot_trace.jl @@ -107,7 +107,7 @@ function set_coordinates!(graph, nodes_per_depth, max_depth, plot_non_Ribasim) end end -function plot_edges!(ax, graph, max_depth, nodes_per_depth; n_points = 25) +function plot_links!(ax, graph, max_depth, nodes_per_depth; n_points = 25) for depth in 0:(max_depth - 1) nodes_at_depth = nodes_per_depth[depth] n_nodes = length(nodes_at_depth) @@ -194,7 +194,7 @@ function plot_graph( delete!(theme, :resolution) # Needed because of a refactor in Makie going from resolution to size f = Figure(; size = size) ax = Axis(f[1, 1]; xlabel = "depth →", xticks = 0:max_depth) - plot_edges!(ax, graph, max_depth, nodes_per_depth) + plot_links!(ax, graph, max_depth, nodes_per_depth) plot_labels!(ax, graph, max_depth, color_dict) hideydecorations!(ax) hidespines!(ax) diff --git a/docs/guide/examples.ipynb b/docs/guide/examples.ipynb index 3e09e5986..2e739f081 100644 --- a/docs/guide/examples.ipynb +++ b/docs/guide/examples.ipynb @@ -299,7 +299,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Setup the edges:\n" + "Setup the links:\n" ] }, { @@ -308,34 +308,34 @@ "metadata": {}, "outputs": [], "source": [ - "model.edge.add(basin1, manning_resistance2)\n", - "model.edge.add(manning_resistance2, basin3)\n", - "model.edge.add(\n", + "model.link.add(basin1, manning_resistance2)\n", + "model.link.add(manning_resistance2, basin3)\n", + "model.link.add(\n", " basin3,\n", " tabulated_rating_curve8,\n", ")\n", - "model.edge.add(\n", + "model.link.add(\n", " basin3,\n", " tabulated_rating_curve5,\n", ")\n", - "model.edge.add(\n", + "model.link.add(\n", " basin3,\n", " tabulated_rating_curve4,\n", ")\n", - "model.edge.add(tabulated_rating_curve5, basin6)\n", - "model.edge.add(tabulated_rating_curve8, basin9)\n", - "model.edge.add(\n", + "model.link.add(tabulated_rating_curve5, basin6)\n", + "model.link.add(tabulated_rating_curve8, basin9)\n", + "model.link.add(\n", " tabulated_rating_curve4,\n", " terminal14,\n", ")\n", - "model.edge.add(basin6, pump7)\n", - "model.edge.add(pump7, basin9)\n", - "model.edge.add(basin9, linear_resistance10)\n", - "model.edge.add(level_boundary11, linear_resistance12)\n", - "model.edge.add(linear_resistance12, basin3)\n", - "model.edge.add(flow_boundary15, basin6)\n", - "model.edge.add(flow_boundary16, basin1)\n", - "model.edge.add(linear_resistance10, level_boundary17)" + "model.link.add(basin6, pump7)\n", + "model.link.add(pump7, basin9)\n", + "model.link.add(basin9, linear_resistance10)\n", + "model.link.add(level_boundary11, linear_resistance12)\n", + "model.link.add(linear_resistance12, basin3)\n", + "model.link.add(flow_boundary15, basin6)\n", + "model.link.add(flow_boundary16, basin1)\n", + "model.link.add(linear_resistance10, level_boundary17)" ] }, { @@ -431,10 +431,10 @@ "outputs": [], "source": [ "df_flow = pd.read_feather(datadir / \"basic/results/flow.arrow\", dtype_backend=\"pyarrow\")\n", - "df_flow[\"edge\"] = list(zip(df_flow.from_node_id, df_flow.to_node_id))\n", + "df_flow[\"link\"] = list(zip(df_flow.from_node_id, df_flow.to_node_id))\n", "df_flow[\"flow_m3d\"] = df_flow.flow_rate * 86400\n", - "ax = df_flow.pivot_table(index=\"time\", columns=\"edge\", values=\"flow_m3d\").plot()\n", - "ax.legend(bbox_to_anchor=(1.3, 1), title=\"Edge\")\n", + "ax = df_flow.pivot_table(index=\"time\", columns=\"link\", values=\"flow_m3d\").plot()\n", + "ax.legend(bbox_to_anchor=(1.3, 1), title=\"Link\")\n", "ax.set_ylabel(\"flow [m³day⁻¹]\");" ] }, @@ -628,7 +628,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Setup edges:\n" + "Setup links:\n" ] }, { @@ -637,14 +637,14 @@ "metadata": {}, "outputs": [], "source": [ - "model.edge.add(model.basin[1], model.pump[3])\n", - "model.edge.add(model.pump[3], model.level_boundary[4])\n", - "model.edge.add(model.level_boundary[4], model.pump[2])\n", - "model.edge.add(model.pump[2], model.basin[1])\n", - "model.edge.add(model.basin[1], model.tabulated_rating_curve[5])\n", - "model.edge.add(model.tabulated_rating_curve[5], model.terminal[6])\n", - "model.edge.add(model.discrete_control[7], model.pump[2])\n", - "model.edge.add(model.discrete_control[7], model.pump[3])" + "model.link.add(model.basin[1], model.pump[3])\n", + "model.link.add(model.pump[3], model.level_boundary[4])\n", + "model.link.add(model.level_boundary[4], model.pump[2])\n", + "model.link.add(model.pump[2], model.basin[1])\n", + "model.link.add(model.basin[1], model.tabulated_rating_curve[5])\n", + "model.link.add(model.tabulated_rating_curve[5], model.terminal[6])\n", + "model.link.add(model.discrete_control[7], model.pump[2])\n", + "model.link.add(model.discrete_control[7], model.pump[3])" ] }, { @@ -669,7 +669,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Listen edges are plotted with a dashed line since they are not present in the \"Edge / static\" schema but only in the \"Control / condition\" schema.\n" + "Listen links are plotted with a dashed line since they are not present in the \"Link / static\" schema but only in the \"Control / condition\" schema.\n" ] }, { @@ -913,7 +913,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Setup the edges:\n" + "Setup the links:\n" ] }, { @@ -922,13 +922,13 @@ "metadata": {}, "outputs": [], "source": [ - "model.edge.add(model.flow_boundary[1], model.basin[2])\n", - "model.edge.add(model.basin[2], model.pump[3])\n", - "model.edge.add(model.pump[3], model.level_boundary[4])\n", - "model.edge.add(model.level_boundary[4], model.outlet[6])\n", - "model.edge.add(model.outlet[6], model.basin[2])\n", - "model.edge.add(model.pid_control[5], model.pump[3])\n", - "model.edge.add(model.pid_control[7], model.outlet[6])" + "model.link.add(model.flow_boundary[1], model.basin[2])\n", + "model.link.add(model.basin[2], model.pump[3])\n", + "model.link.add(model.pump[3], model.level_boundary[4])\n", + "model.link.add(model.level_boundary[4], model.outlet[6])\n", + "model.link.add(model.outlet[6], model.basin[2])\n", + "model.link.add(model.pid_control[5], model.pump[3])\n", + "model.link.add(model.pid_control[7], model.outlet[6])" ] }, { @@ -1194,7 +1194,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Setup the edges:\n" + "Setup the links:\n" ] }, { @@ -1203,15 +1203,15 @@ "metadata": {}, "outputs": [], "source": [ - "model.edge.add(model.flow_boundary[1], model.basin[2])\n", - "model.edge.add(model.basin[2], model.user_demand[3])\n", - "model.edge.add(model.basin[2], model.linear_resistance[4])\n", - "model.edge.add(model.linear_resistance[4], model.basin[5])\n", - "model.edge.add(model.basin[5], model.user_demand[6])\n", - "model.edge.add(model.basin[5], model.tabulated_rating_curve[7])\n", - "model.edge.add(model.user_demand[3], model.basin[2])\n", - "model.edge.add(model.user_demand[6], model.basin[5])\n", - "model.edge.add(model.tabulated_rating_curve[7], model.terminal[8])" + "model.link.add(model.flow_boundary[1], model.basin[2])\n", + "model.link.add(model.basin[2], model.user_demand[3])\n", + "model.link.add(model.basin[2], model.linear_resistance[4])\n", + "model.link.add(model.linear_resistance[4], model.basin[5])\n", + "model.link.add(model.basin[5], model.user_demand[6])\n", + "model.link.add(model.basin[5], model.tabulated_rating_curve[7])\n", + "model.link.add(model.user_demand[3], model.basin[2])\n", + "model.link.add(model.user_demand[6], model.basin[5])\n", + "model.link.add(model.tabulated_rating_curve[7], model.terminal[8])" ] }, { @@ -1490,7 +1490,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Setup the edges:\n" + "Setup the links:\n" ] }, { @@ -1499,11 +1499,11 @@ "metadata": {}, "outputs": [], "source": [ - "model.edge.add(model.flow_boundary[1], model.basin[2])\n", - "model.edge.add(model.basin[2], model.user_demand[3])\n", - "model.edge.add(model.level_demand[4], model.basin[2])\n", - "model.edge.add(model.user_demand[3], model.basin[5])\n", - "model.edge.add(model.level_demand[4], model.basin[5])" + "model.link.add(model.flow_boundary[1], model.basin[2])\n", + "model.link.add(model.basin[2], model.user_demand[3])\n", + "model.link.add(model.level_demand[4], model.basin[2])\n", + "model.link.add(model.user_demand[3], model.basin[5])\n", + "model.link.add(model.level_demand[4], model.basin[5])" ] }, { @@ -1875,7 +1875,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Setup the edges:" + "Setup the links:" ] }, { @@ -1884,22 +1884,22 @@ "metadata": {}, "outputs": [], "source": [ - "model.edge.add(model.basin[1], model.outlet[2])\n", - "model.edge.add(model.pid_control[3], model.outlet[2])\n", - "model.edge.add(model.outlet[2], model.basin[4])\n", - "model.edge.add(model.basin[4], model.outlet[5])\n", - "model.edge.add(model.outlet[5], model.basin[6])\n", - "model.edge.add(model.basin[6], model.outlet[7])\n", - "model.edge.add(model.outlet[7], model.basin[8])\n", - "model.edge.add(model.basin[8], model.outlet[9])\n", - "model.edge.add(model.outlet[9], model.basin[10])\n", - "model.edge.add(model.basin[10], model.outlet[11])\n", - "model.edge.add(model.outlet[11], model.basin[12])\n", - "model.edge.add(model.basin[12], model.pump[13])\n", - "model.edge.add(model.pump[13], model.basin[1])\n", - "model.edge.add(model.pid_control[14], model.outlet[5])\n", - "model.edge.add(model.pid_control[15], model.outlet[7])\n", - "model.edge.add(model.pid_control[16], model.outlet[9])" + "model.link.add(model.basin[1], model.outlet[2])\n", + "model.link.add(model.pid_control[3], model.outlet[2])\n", + "model.link.add(model.outlet[2], model.basin[4])\n", + "model.link.add(model.basin[4], model.outlet[5])\n", + "model.link.add(model.outlet[5], model.basin[6])\n", + "model.link.add(model.basin[6], model.outlet[7])\n", + "model.link.add(model.outlet[7], model.basin[8])\n", + "model.link.add(model.basin[8], model.outlet[9])\n", + "model.link.add(model.outlet[9], model.basin[10])\n", + "model.link.add(model.basin[10], model.outlet[11])\n", + "model.link.add(model.outlet[11], model.basin[12])\n", + "model.link.add(model.basin[12], model.pump[13])\n", + "model.link.add(model.pump[13], model.basin[1])\n", + "model.link.add(model.pid_control[14], model.outlet[5])\n", + "model.link.add(model.pid_control[15], model.outlet[7])\n", + "model.link.add(model.pid_control[16], model.outlet[9])" ] }, { @@ -1971,10 +1971,10 @@ "source": [ "datadir_flow = datadir / \"local_pidcontrolled_cascade/results/flow.arrow\"\n", "df_flow = pd.read_feather(datadir_flow, dtype_backend=\"pyarrow\")\n", - "df_flow[\"edge\"] = list(zip(df_flow.from_node_id, df_flow.to_node_id))\n", + "df_flow[\"link\"] = list(zip(df_flow.from_node_id, df_flow.to_node_id))\n", "df_flow[\"flow_m3d\"] = df_flow.flow_rate * 86400\n", "\n", - "df_pivot = df_flow.pivot_table(index=\"time\", columns=\"edge\", values=\"flow_m3d\")" + "df_pivot = df_flow.pivot_table(index=\"time\", columns=\"link\", values=\"flow_m3d\")" ] }, { @@ -2209,8 +2209,8 @@ "source": [ "This defines:\n", "\n", - "- A `ContinuousControl` node with ID 1, which listens to the flow rate of the `LinearResistance` node with ID 1, puts that trough the function $f(x) = \\max(0, 0.6x)$, and assigns the result to the flow rate of the node this `ContinuousControl` node is controlling, which is defined by a (control) edge;\n", - "- A `ContinuousControl` node with ID 2, which listens to the flow rate of the `LinearResistance` node with ID 1, puts that through the function $f(x) = \\max(0, 0.4x)$, and assigns the result to the flow rate of the node this `ContinuousControl` node is controlling, which is defined by a (control) edge." + "- A `ContinuousControl` node with ID 1, which listens to the flow rate of the `LinearResistance` node with ID 1, puts that trough the function $f(x) = \\max(0, 0.6x)$, and assigns the result to the flow rate of the node this `ContinuousControl` node is controlling, which is defined by a (control) link;\n", + "- A `ContinuousControl` node with ID 2, which listens to the flow rate of the `LinearResistance` node with ID 1, puts that through the function $f(x) = \\max(0, 0.4x)$, and assigns the result to the flow rate of the node this `ContinuousControl` node is controlling, which is defined by a (control) link." ] }, { @@ -2219,16 +2219,16 @@ "metadata": {}, "outputs": [], "source": [ - "model.edge.add(model.level_boundary[1], model.linear_resistance[2])\n", - "model.edge.add(model.linear_resistance[2], model.basin[3])\n", - "model.edge.add(model.basin[3], model.outlet[4])\n", - "model.edge.add(model.basin[3], model.outlet[5])\n", - "model.edge.add(model.outlet[4], model.terminal[6])\n", - "model.edge.add(model.outlet[5], model.terminal[7])\n", + "model.link.add(model.level_boundary[1], model.linear_resistance[2])\n", + "model.link.add(model.linear_resistance[2], model.basin[3])\n", + "model.link.add(model.basin[3], model.outlet[4])\n", + "model.link.add(model.basin[3], model.outlet[5])\n", + "model.link.add(model.outlet[4], model.terminal[6])\n", + "model.link.add(model.outlet[5], model.terminal[7])\n", "\n", "# Define which node is controlled by each continuous control node\n", - "model.edge.add(model.continuous_control[8], model.outlet[4])\n", - "model.edge.add(model.continuous_control[9], model.outlet[5])" + "model.link.add(model.continuous_control[8], model.outlet[4])\n", + "model.link.add(model.continuous_control[9], model.outlet[5])" ] }, { @@ -2311,7 +2311,7 @@ "fig, ax = plt.subplots()\n", "\n", "\n", - "def plot_edge_flow(from_node_type, from_node_id, to_node_type, to_node_id):\n", + "def plot_link_flow(from_node_type, from_node_id, to_node_type, to_node_id):\n", " df_flow_filtered = df_flow[\n", " (df_flow[\"from_node_id\"] == from_node_id)\n", " & (df_flow[\"to_node_id\"] == to_node_id)\n", @@ -2324,9 +2324,9 @@ " )\n", "\n", "\n", - "plot_edge_flow(\"LinearResistance\", 1, \"Basin\", 1)\n", - "plot_edge_flow(\"Basin\", 1, \"Outlet\", 1)\n", - "plot_edge_flow(\"Basin\", 1, \"Outlet\", 2)\n", + "plot_link_flow(\"LinearResistance\", 1, \"Basin\", 1)\n", + "plot_link_flow(\"Basin\", 1, \"Outlet\", 1)\n", + "plot_link_flow(\"Basin\", 1, \"Outlet\", 2)\n", "ax.set_ylabel(\"flow [m³s⁻¹]\");" ] } diff --git a/docs/guide/qgis.qmd b/docs/guide/qgis.qmd index 41faf4929..512ecb1e1 100644 --- a/docs/guide/qgis.qmd +++ b/docs/guide/qgis.qmd @@ -69,9 +69,9 @@ snapping mode by clicking the magnet and set the snapping distance to 25 pixels. ![](https://user-images.githubusercontent.com/4471859/224939328-8359272a-30bb-4eb1-ab6c-968318ac3997.png){fig-align="left"} -### Creating connecting edges +### Creating connecting links -Select the Edge layer and turn on the edit mode. +Select the Link layer and turn on the edit mode. ![](https://user-images.githubusercontent.com/4471859/224939342-c6939331-a60d-4526-a350-3cddb122c62d.png){fig-align="left"} @@ -81,11 +81,11 @@ Select "Add line feature". Create a connection by left clicking a source node and right clicking the destination node. -Create an edge +Create an link -A form where one can change the edge attributes will pop up. Once done with editing, click ok. +A form where one can change the link attributes will pop up. Once done with editing, click ok. -Edge form +Link form Now leave the edit mode and save the results to the layer. Your model is now ready to run. See @@ -100,7 +100,7 @@ Now leave the edit mode and save the results to the layer. Your model is now rea # Inspect a (large) model {#sec-inspect} For larger models the node tables can grow quite large. To facilitate inspection, -the tables are linked via the `node_id` field to the Node table, and react to the selection of the Node layer. That is, on selection of certain nodes---either via the map or the attribute table---the selection is also made in all related tables. This is also the case for the Edge layer. +the tables are linked via the `node_id` field to the Node table, and react to the selection of the Node layer. That is, on selection of certain nodes---either via the map or the attribute table---the selection is also made in all related tables. This is also the case for the Link layer. It helps to set the attribute table of a table of interest to show selected features only (using the dropdown button on the bottom left). Selection change @@ -116,7 +116,7 @@ Click the "Time Series" button of the iMOD plugin. Select the layer that you wish to plot. From the "Node" layer you can plot level or storage on Basin nodes. -From the "Edge" layer you can plot flow over flow edges. +From the "Link" layer you can plot flow over flow links. Note that before switching between these, you typically have to click "Clear" to clear the selection. If you run a simulation with the model open in QGIS, you have to close and re-open the "iMOD Time Series Plot" panel for the new results to be loaded. diff --git a/docs/known_issues.qmd b/docs/known_issues.qmd index 0783d358d..547d221fb 100644 --- a/docs/known_issues.qmd +++ b/docs/known_issues.qmd @@ -9,7 +9,7 @@ Besides the issues that need to be fixed, there are also considerations that had - The QGIS plugin does not have a dynamic relation between its own plugin and the layers that are loaded in the QGIS project. That means that deleting a layer from the layers panel does not automatically remove it from the GeoPackage, as the layers are clones. -- Tables and geometries are not linked. If you remove an edge or a node, the tables containing information about those objects will remain. +- Tables and geometries are not linked. If you remove an link or a node, the tables containing information about those objects will remain. It is up to the user to clean up all tables. -- The QGIS plugin does not update edges when nodes are moved. +- The QGIS plugin does not update links when nodes are moved. Snapping is only used to grab the information of the node the user points to. diff --git a/docs/reference/node/discrete-control.qmd b/docs/reference/node/discrete-control.qmd index 040bb0b22..ad8596478 100644 --- a/docs/reference/node/discrete-control.qmd +++ b/docs/reference/node/discrete-control.qmd @@ -88,7 +88,7 @@ DiscreteControl is applied in the Julia core as follows: - When a condition changes, the corresponding DiscreteControl node ID is retrieved (node_id in the condition schema above). - The truth value of all the conditions this DiscreteControl node listens to are retrieved, **in the sorted order as specified in the condition schema**. This is then converted into a string of "T" for true and "F" for false. This string we call the truth state.* - The table below determines for the given DiscreteControl node ID and truth state what the corresponding control state is. -- For all the nodes this DiscreteControl node affects (as given by the "control" edges in [Edges / static](/reference/usage.qmd#edge)), their parameters are set to those parameters in `NodeType / static` corresponding to the determined control state. +- For all the nodes this DiscreteControl node affects (as given by the "control" links in [Links / static](/reference/usage.qmd#link)), their parameters are set to those parameters in `NodeType / static` corresponding to the determined control state. *. There is also a second truth state created in which for the last condition that changed it is specified whether it was an upcrossing ("U") or downcrossing ("D") of the threshold (greater than) value. If a control state is specified for a truth state that is crossing-specific, this takes precedence over the control state for the truth state that contains only "T" and "F". diff --git a/docs/reference/node/flow-boundary.qmd b/docs/reference/node/flow-boundary.qmd index b74c4eb38..964cbb7c4 100644 --- a/docs/reference/node/flow-boundary.qmd +++ b/docs/reference/node/flow-boundary.qmd @@ -4,7 +4,7 @@ title: "FlowBoundary" A FlowBoundary adds water to the model at a specified flow rate. It can be used as a boundary condition like a measured upstream flow rate, or lateral inflow. -We require that an edge connected to a FlowBoundary is always outgoing, and points towards a Basin. +We require that an link connected to a FlowBoundary is always outgoing, and points towards a Basin. # Tables diff --git a/docs/reference/node/level-demand.qmd b/docs/reference/node/level-demand.qmd index d942fa425..3179c1ff9 100644 --- a/docs/reference/node/level-demand.qmd +++ b/docs/reference/node/level-demand.qmd @@ -4,7 +4,7 @@ title: "LevelDemand" A `LevelDemand` node associates a minimum and a maximum level with connected Basins to be used by the allocation algorithm. -Since this connection conveys information rather than flow, an outgoing control edge must be used. +Since this connection conveys information rather than flow, an outgoing control link must be used. Below the minimum level the Basin has a demand, above the maximum level the Basin has a surplus and acts as a source. The source can be used by all nodes with demands in order of priority. diff --git a/docs/reference/node/outlet.qmd b/docs/reference/node/outlet.qmd index b2b17cfc3..b2848fd0f 100644 --- a/docs/reference/node/outlet.qmd +++ b/docs/reference/node/outlet.qmd @@ -5,7 +5,7 @@ title: "Outlet" The Outlet lets water flow downstream with a prescribed flow rate. It is similar to the Pump, except that water only flows down, by gravity. -When PID controlled, the Outlet must point towards the controlled Basin in terms of edges. +When PID controlled, the Outlet must point towards the controlled Basin in terms of links. # Tables diff --git a/docs/reference/node/pid-control.qmd b/docs/reference/node/pid-control.qmd index 9b369d877..fea787e69 100644 --- a/docs/reference/node/pid-control.qmd +++ b/docs/reference/node/pid-control.qmd @@ -7,7 +7,7 @@ See also [PID controller](https://en.wikipedia.org/wiki/PID_controller). When a PidControl node is made inactive, the node under its control retains the last flow rate value, and the error integral is reset to 0. -In the future controlling the flow on a particular edge could be supported. +In the future controlling the flow on a particular link could be supported. # Tables @@ -107,5 +107,5 @@ Note that to compute this, $\hat{f}_\text{PID}$ has to be known first, meaning t Note by @eq-error that the error is positive if the setpoint is larger than the Basin level and negative if the setpoint is smaller than the Basin level. -We enforce the convention that when a Pump is controlled, its edge points away from the Basin, and when an Outlet is controlled, its edge points towards the Basin, so that the main flow direction along these edges is positive. +We enforce the convention that when a Pump is controlled, its link points away from the Basin, and when an Outlet is controlled, its link points towards the Basin, so that the main flow direction along these links is positive. Therefore, positive flows of the Pump and Outlet have opposite effects on the Basin, and thus the parameters $K_p,K_i,K_d$ of the Pump and Outlet must have opposite signs to achieve the same goal. diff --git a/docs/reference/node/pump.qmd b/docs/reference/node/pump.qmd index ac7c80f71..e28223083 100644 --- a/docs/reference/node/pump.qmd +++ b/docs/reference/node/pump.qmd @@ -6,7 +6,7 @@ Pump water from a source node to a destination node. The set flow rate will be pumped unless the intake storage is less than $10~m^3$, in which case the flow rate will be linearly reduced to $0~m^3/s$. The intake must be either a Basin or LevelBoundary. -When PID controlled, the pump must point away from the controlled basin in terms of edges. +When PID controlled, the pump must point away from the controlled basin in terms of links. # Tables diff --git a/docs/reference/node/user-demand.qmd b/docs/reference/node/user-demand.qmd index 9057317c3..4066f2c5a 100644 --- a/docs/reference/node/user-demand.qmd +++ b/docs/reference/node/user-demand.qmd @@ -12,7 +12,7 @@ This parameter is only used when allocation is active and is optional when alloc When the connected Basin is almost empty or reaches the minimum level at which the UserDemand can extract water (`min_level`), it will stop extraction. -UserDemands need an outgoing flow edge along which they can send their return flow, this can also be to the same Basin from which it extracts water. +UserDemands need an outgoing flow link along which they can send their return flow, this can also be to the same Basin from which it extracts water. The amount of return flow is always a fraction of the inflow into the UserDemand. The difference is consumed by the UserDemand. diff --git a/docs/reference/usage.qmd b/docs/reference/usage.qmd index d9f24c625..1cf96def3 100644 --- a/docs/reference/usage.qmd +++ b/docs/reference/usage.qmd @@ -100,12 +100,12 @@ concentration | Bool | Whether to enable tracer calculations or not. # GeoPackage database and Arrow tables {#sec-geopackage} The input and output tables described below all share that they are tabular files. The Node -and Edge tables always have to be in the [GeoPackage](https://www.geopackage.org/) database file, and +and Link tables always have to be in the [GeoPackage](https://www.geopackage.org/) database file, and results are always written to [Apache Arrow](https://arrow.apache.org/) files, sometimes also known as Feather files. All other tables can either be in the database or in separate Arrow files that are listed in the TOML as described above. -For visualization, the Node and Edge tables typically have associated geometries. GeoPackage +For visualization, the Node and Link tables typically have associated geometries. GeoPackage was used since it provides a standardized way to store tables with (and without) geometry columns in a SQLite database. If, like Ribasim, you can ignore the geometries, a GeoPackage is easy to read using SQLite libraries, which are commonly available. Furthermore GeoPackage @@ -159,8 +159,8 @@ unique among all nodes, and the type must be one of the available node types lis Nodes are components that are connected together to form a larger system. The Basin is a central node type that stores water. The other node types influence the flow between Basins -in some way. Counter intuitively, even systems you may think of as edges, such as a canal, -are nodes in Ribasim. This is because edges only define direct instantaneous couplings +in some way. Counter intuitively, even systems you may think of as links, such as a canal, +are nodes in Ribasim. This is because links only define direct instantaneous couplings between nodes, and never have storage of their own. column | type | restriction @@ -174,22 +174,22 @@ subnetwork_id | Int32 | (optional) Adding a point geometry to the node table can be helpful to examine models in [QGIS](https://qgis.org/en/site/), as it will show the location of the nodes on the map. The geometry is not used by Ribasim. -# Edge {#sec-edge} +# Link {#sec-link} -Edges define connections between nodes. The only thing that defines an edge is the nodes it connects, and in what direction. -There are currently 2 possible edge types: +Links define connections between nodes. The only thing that defines an link is the nodes it connects, and in what direction. +There are currently 2 possible link types: -1. "flow": Flows between nodes are stored on edges. The effect of - the edge direction depends on the node type, Node types that have a notion of an upstream - and downstream side use the incoming edge as the upstream side, and the outgoing edge as the - downstream side. This means that edges should generally be drawn in the main flow direction. - But for instance between two `LinearResistances` the edge direction does not affect - anything, other than the sign of the flow on the edge. The sign of the flow follows the edge - direction; a positive flow flows along the edge direction, a negative flow in the opposite +1. "flow": Flows between nodes are stored on links. The effect of + the link direction depends on the node type, Node types that have a notion of an upstream + and downstream side use the incoming link as the upstream side, and the outgoing link as the + downstream side. This means that links should generally be drawn in the main flow direction. + But for instance between two `LinearResistances` the link direction does not affect + anything, other than the sign of the flow on the link. The sign of the flow follows the link + direction; a positive flow flows along the link direction, a negative flow in the opposite way. -2. "control": The control edges define which nodes are controlled by a particular control node. - Control edges should always point away from the control node. - The edges between the control node and the nodes it *listens* to are *not* present in `Edge`, these are defined in [`DiscreteControl / condition`](node/discrete-control.qmd) +2. "control": The control links define which nodes are controlled by a particular control node. + Control links should always point away from the control node. + The links between the control node and the nodes it *listens* to are *not* present in `Link`, these are defined in [`DiscreteControl / condition`](node/discrete-control.qmd) column | type | restriction --------------| ----------------------------- | ----------- @@ -197,7 +197,7 @@ from_node_type| String | - from_node_id | Int32 | - to_node_type | String | - to_node_id | Int32 | - -edge_type | String | must be "flow" or "control" +link_type | String | must be "flow" or "control" geom | LineString or MultiLineString | (optional) name | String | (optional, does not have to be unique) @@ -216,10 +216,10 @@ The Basin table contains: - The initial condition is written to the file, but the final state is not. It will be placed in a separate output state file in the future. - The `inflow_rate` and `outflow_rate` are the sum of the flows from other nodes into and out of the Basin respectively. - The actual flows determine in which term they are counted, not the edge direction. + The actual flows determine in which term they are counted, not the link direction. - The `storage_rate` is the net mean flow that is needed to achieve the storage change between timesteps. -- The `inflow_rate` consists of the sum of all modelled flows into the basin: `inflow_rate` (horizontal flows into the basin, independent of edge direction) + `precipitation` + `drainage`. -- The `outflow_rate` consists of the sum of all modelled flows out of the basin: `outflow_rate` (horizontal flows out of the basin, independent of edge direction) + `evaporation` + `infiltration`. +- The `inflow_rate` consists of the sum of all modelled flows into the basin: `inflow_rate` (horizontal flows into the basin, independent of link direction) + `precipitation` + `drainage`. +- The `outflow_rate` consists of the sum of all modelled flows out of the basin: `outflow_rate` (horizontal flows out of the basin, independent of link direction) + `evaporation` + `infiltration`. - The `balance_error` is the difference between the `storage_rate` on one side and the `inflow_rate` and `outflow_rate` on the other side: `storage_rate` - (`inflow_rate` - `outflow_rate`). It can be used to check if the numerical error when solving the water balance is sufficiently small. - The `relative_error` is the fraction of the `balance_error` over the mean of the `total_inflow` and `total_outflow`. @@ -246,21 +246,21 @@ The table is sorted by time, and per time it is sorted by `node_id`. ## Flow - `flow.arrow` -The flow table contains calculated mean flows over the `saveat` intervals for every flow edge in the model. +The flow table contains calculated mean flows over the `saveat` intervals for every flow link in the model. In the time column the start of the period is indicated. column | type | unit -------------- | --------------------- | ---- time | DateTime | - -edge_id | Int32 | - +link_id | Int32 | - from_node_type | String | - from_node_id | Int32 | - to_node_type | String | - to_node_id | Int32 | - flow_rate | Float64 | $\text{m}^3/\text{s}$ -The table is sorted by time, and per time the same `edge_id` order is used, though not sorted. -The `edge_id` value is the same as the `fid` written to the Edge table, and can be used to directly look up the Edge geometry. +The table is sorted by time, and per time the same `link_id` order is used, though not sorted. +The `link_id` value is the same as the `fid` written to the Link table, and can be used to directly look up the Link geometry. Flows from the "from" to the "to" node have a positive sign, and if the flow is reversed it will be negative. ## State - `basin_state.arrow` {#sec-state} @@ -315,14 +315,14 @@ Currently the stored demand and abstraction rate are those at the allocation tim ## Allocation flow - `allocation_flow.arrow` -The allocation flow table contains results of the optimized allocation flow on every edge in the model that is part of a subnetwork, for each time an optimization problem is solved (see also [here](/concept/allocation.qmd#the-high-level-algorithm)). +The allocation flow table contains results of the optimized allocation flow on every link in the model that is part of a subnetwork, for each time an optimization problem is solved (see also [here](/concept/allocation.qmd#the-high-level-algorithm)). If in the model a main network and subnetwork(s) are specified, there are 2 different types of optimization for the subnetwork: collecting its total demand per priority (for allocating flow from the main network to the subnetwork), and allocating flow within the subnetwork. The column `collect_demands` provides the distinction between these two optimization types. column | type ----------------| ----- time | DateTime -edge_id | Int32 +link_id | Int32 from_node_type | String from_node_id | Int32 to_node_type | String diff --git a/docs/reference/validation.qmd b/docs/reference/validation.qmd index e92ba6e36..ec4a03fdc 100644 --- a/docs/reference/validation.qmd +++ b/docs/reference/validation.qmd @@ -41,7 +41,7 @@ markdown_table(df) # Neighbor amounts -The table below shows for each node type between which bounds the amount of in- and outneighbors must be, for both flow and control edges. +The table below shows for each node type between which bounds the amount of in- and outneighbors must be, for both flow and control links. ```{julia} # | code-fold: true diff --git a/docs/tutorial/irrigation-demand.ipynb b/docs/tutorial/irrigation-demand.ipynb index 18010d1ae..43c3277b2 100644 --- a/docs/tutorial/irrigation-demand.ipynb +++ b/docs/tutorial/irrigation-demand.ipynb @@ -227,7 +227,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Add edges" + "### Add links" ] }, { @@ -236,14 +236,14 @@ "metadata": {}, "outputs": [], "source": [ - "model.edge.add(main, diversion_basin, name=\"main\")\n", - "model.edge.add(minor, confluence, name=\"minor\")\n", - "model.edge.add(diversion_basin, irrigation, name=\"irrigation\")\n", - "model.edge.add(irrigation, confluence)\n", - "model.edge.add(diversion_basin, diversion_weir, name=\"not diverted\")\n", - "model.edge.add(diversion_weir, confluence)\n", - "model.edge.add(confluence, weir)\n", - "model.edge.add(weir, sea, name=\"sea\")" + "model.link.add(main, diversion_basin, name=\"main\")\n", + "model.link.add(minor, confluence, name=\"minor\")\n", + "model.link.add(diversion_basin, irrigation, name=\"irrigation\")\n", + "model.link.add(irrigation, confluence)\n", + "model.link.add(diversion_basin, diversion_weir, name=\"not diverted\")\n", + "model.link.add(diversion_weir, confluence)\n", + "model.link.add(confluence, weir)\n", + "model.link.add(weir, sea, name=\"sea\")" ] }, { @@ -398,8 +398,8 @@ "df_flow = pd.read_feather(\n", " base_dir / \"Crystal-2/results/flow.arrow\", dtype_backend=\"pyarrow\"\n", ")\n", - "# Add the edge names and then remove unnamed edges\n", - "df_flow[\"name\"] = model.edge.df[\"name\"].loc[df_flow[\"edge_id\"]].to_numpy()\n", + "# Add the link names and then remove unnamed links\n", + "df_flow[\"name\"] = model.link.df[\"name\"].loc[df_flow[\"link_id\"]].to_numpy()\n", "df_flow = df_flow[df_flow[\"name\"].astype(bool)]\n", "\n", "# Plot the flow data, interactive plot with Plotly\n", @@ -408,7 +408,7 @@ ").reset_index()\n", "fig = px.line(pivot_flow, x=\"time\", y=pivot_flow.columns[1:], title=\"Flow [m3/s]\")\n", "\n", - "fig.update_layout(legend_title_text=\"Edge\")\n", + "fig.update_layout(legend_title_text=\"Link\")\n", "fig.show()" ] }, @@ -416,7 +416,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Try toggling the edges on and off by clicking on them in the edges." + "Try toggling the links on and off by clicking on them in the links." ] } ], diff --git a/docs/tutorial/natural-flow.ipynb b/docs/tutorial/natural-flow.ipynb index cfb47db89..4eef774b3 100644 --- a/docs/tutorial/natural-flow.ipynb +++ b/docs/tutorial/natural-flow.ipynb @@ -279,8 +279,8 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Defining edges\n", - "Implement the connections (edges) between the nodes." + "### Defining links\n", + "Implement the connections (links) between the nodes." ] }, { @@ -289,10 +289,10 @@ "metadata": {}, "outputs": [], "source": [ - "model.edge.add(main, confluence, name=\"main\")\n", - "model.edge.add(minor, confluence, name=\"minor\")\n", - "model.edge.add(confluence, weir)\n", - "model.edge.add(weir, sea, name=\"sea\")" + "model.link.add(main, confluence, name=\"main\")\n", + "model.link.add(minor, confluence, name=\"minor\")\n", + "model.link.add(confluence, weir)\n", + "model.link.add(weir, sea, name=\"sea\")" ] }, { @@ -390,7 +390,7 @@ "metadata": {}, "source": [ "### Post-processing results\n", - "Read the Arrow files and plot the simulated flows from different edges and the levels and storages at our confluence point:" + "Read the Arrow files and plot the simulated flows from different links and the levels and storages at our confluence point:" ] }, { @@ -453,8 +453,8 @@ "df_flow = pd.read_feather(\n", " base_dir / \"Crystal-1/results/flow.arrow\", dtype_backend=\"pyarrow\"\n", ")\n", - "# Add the edge names and then remove unnamed edges\n", - "df_flow[\"name\"] = model.edge.df[\"name\"].loc[df_flow[\"edge_id\"]].to_numpy()\n", + "# Add the link names and then remove unnamed links\n", + "df_flow[\"name\"] = model.link.df[\"name\"].loc[df_flow[\"link_id\"]].to_numpy()\n", "df_flow = df_flow[df_flow[\"name\"].astype(bool)]\n", "\n", "# Create a pivot table\n", @@ -472,7 +472,7 @@ "# Set labels and title\n", "ax.set_xlabel(\"Time\")\n", "ax.set_ylabel(\"Flow [m³/s]\")\n", - "ax.legend(bbox_to_anchor=(1.15, 1), title=\"Edge\")\n", + "ax.legend(bbox_to_anchor=(1.15, 1), title=\"Link\")\n", "plt.title(\"Flow\")\n", "plt.grid(True)\n", "plt.show()" @@ -482,9 +482,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "The figure above shows the discharges in $\\text{m}^3/\\text{s}$ on each edge.\n", + "The figure above shows the discharges in $\\text{m}^3/\\text{s}$ on each link.\n", "\n", - "Edge (3,4) represents the flow from the confluence to the TabulatedRatingCurve and edge (4,5) represents the flow from the TabulatedRatingCurve to the Terminal.\n", + "Link (3,4) represents the flow from the confluence to the TabulatedRatingCurve and link (4,5) represents the flow from the TabulatedRatingCurve to the Terminal.\n", "Both show the same discharge over time.\n", "Which is expected in a natural flow environment, as what is coming into the confluence must come out." ] diff --git a/docs/tutorial/reservoir.ipynb b/docs/tutorial/reservoir.ipynb index 03c1673ed..ab3c4e6a2 100644 --- a/docs/tutorial/reservoir.ipynb +++ b/docs/tutorial/reservoir.ipynb @@ -210,16 +210,16 @@ "metadata": {}, "outputs": [], "source": [ - "model.edge.add(main, reservoir, name=\"main\")\n", - "model.edge.add(minor, confluence, name=\"minor\")\n", - "model.edge.add(reservoir, irrigation, name=\"irrigation\")\n", - "model.edge.add(irrigation, confluence)\n", - "model.edge.add(reservoir, city, name=\"city\")\n", - "model.edge.add(city, confluence, name=\"city returnflow\")\n", - "model.edge.add(reservoir, diversion_weir, name=\"not diverted\")\n", - "model.edge.add(diversion_weir, confluence)\n", - "model.edge.add(confluence, weir)\n", - "model.edge.add(weir, sea, name=\"sea\")" + "model.link.add(main, reservoir, name=\"main\")\n", + "model.link.add(minor, confluence, name=\"minor\")\n", + "model.link.add(reservoir, irrigation, name=\"irrigation\")\n", + "model.link.add(irrigation, confluence)\n", + "model.link.add(reservoir, city, name=\"city\")\n", + "model.link.add(city, confluence, name=\"city returnflow\")\n", + "model.link.add(reservoir, diversion_weir, name=\"not diverted\")\n", + "model.link.add(diversion_weir, confluence)\n", + "model.link.add(confluence, weir)\n", + "model.link.add(weir, sea, name=\"sea\")" ] }, { @@ -334,8 +334,8 @@ "df_flow = pd.read_feather(\n", " base_dir / \"Crystal-3/results/flow.arrow\", dtype_backend=\"pyarrow\"\n", ")\n", - "# Add the edge names and then remove unnamed edges\n", - "df_flow[\"name\"] = model.edge.df[\"name\"].loc[df_flow[\"edge_id\"]].to_numpy()\n", + "# Add the link names and then remove unnamed links\n", + "df_flow[\"name\"] = model.link.df[\"name\"].loc[df_flow[\"link_id\"]].to_numpy()\n", "df_flow = df_flow[df_flow[\"name\"].astype(bool)]\n", "\n", "# Plot the flow data, interactive plot with Plotly\n", @@ -344,7 +344,7 @@ ").reset_index()\n", "fig = px.line(pivot_flow, x=\"time\", y=pivot_flow.columns[1:], title=\"Flow [m3/s]\")\n", "\n", - "fig.update_layout(legend_title_text=\"Edge\")\n", + "fig.update_layout(legend_title_text=\"Link\")\n", "fig.show()" ] } diff --git a/python/ribasim/ribasim/__init__.py b/python/ribasim/ribasim/__init__.py index 9f3c41138..98d785bf0 100644 --- a/python/ribasim/ribasim/__init__.py +++ b/python/ribasim/ribasim/__init__.py @@ -1,9 +1,9 @@ __version__ = "2025.1.0" # Keep synced write_schema_version in ribasim_qgis/core/geopackage.py -__schema_version__ = 3 +__schema_version__ = 4 from ribasim.config import Allocation, Logging, Node, Solver -from ribasim.geometry.edge import EdgeTable +from ribasim.geometry.link import LinkTable from ribasim.model import Model -__all__ = ["EdgeTable", "Allocation", "Logging", "Model", "Solver", "Node"] +__all__ = ["LinkTable", "Allocation", "Logging", "Model", "Solver", "Node"] diff --git a/python/ribasim/ribasim/config.py b/python/ribasim/ribasim/config.py index d86bf8ee6..92f9cc523 100644 --- a/python/ribasim/ribasim/config.py +++ b/python/ribasim/ribasim/config.py @@ -11,7 +11,7 @@ from shapely.geometry import Point from ribasim.geometry import BasinAreaSchema, NodeTable -from ribasim.geometry.edge import NodeData +from ribasim.geometry.link import NodeData from ribasim.input_base import ChildModel, NodeModel, SpatialTableModel, TableModel # These schemas are autogenerated diff --git a/python/ribasim/ribasim/delwaq/generate.py b/python/ribasim/ribasim/delwaq/generate.py index 309f7a67b..482a7345c 100644 --- a/python/ribasim/ribasim/delwaq/generate.py +++ b/python/ribasim/ribasim/delwaq/generate.py @@ -40,7 +40,7 @@ autoescape=True, loader=jinja2.FileSystemLoader(delwaq_dir / "template") ) -# Add evaporation edges, so mass balance is correct +# Add evaporation links, so mass balance is correct # To simulate salt increase due to evaporation, set to False USE_EVAP = True @@ -90,12 +90,12 @@ def _make_boundary(data, boundary_type): return boundary, substances -def _setup_graph(nodes, edge, evaporate_mass=True): +def _setup_graph(nodes, link, evaporate_mass=True): G = nx.DiGraph() assert nodes.df is not None for row in nodes.df.itertuples(): - if row.node_type not in ribasim.geometry.edge.SPATIALCONTROLNODETYPES: + if row.node_type not in ribasim.geometry.link.SPATIALCONTROLNODETYPES: G.add_node( row.Index, type=row.node_type, @@ -104,9 +104,9 @@ def _setup_graph(nodes, edge, evaporate_mass=True): y=row.geometry.y, pos=(row.geometry.x, row.geometry.y), ) - assert edge.df is not None - for row in edge.df.itertuples(): - if row.edge_type == "flow": + assert link.df is not None + for row in link.df.itertuples(): + if row.link_type == "flow": G.add_edge( row.from_node_id, row.to_node_id, @@ -116,7 +116,7 @@ def _setup_graph(nodes, edge, evaporate_mass=True): # Simplify network, only keeping Basins and Boundaries. # We find an unwanted node, remove it, - # and merge the flow edges to/from the node. + # and merge the flow links to/from the node. remove_nodes = [] for node_id, out in G.succ.items(): if G.nodes[node_id]["type"] not in [ @@ -133,15 +133,15 @@ def _setup_graph(nodes, edge, evaporate_mass=True): for outneighbor_id in out.keys(): if outneighbor_id in remove_nodes: - logger.debug("Not making edge to removed node.") + logger.debug("Not making link to removed node.") continue - edge = (inneighbor_id, outneighbor_id) - edge_id = G.get_edge_data(node_id, outneighbor_id)["id"][0] - if G.has_edge(*edge): - data = G.get_edge_data(*edge) - data["id"].append(edge_id) + link = (inneighbor_id, outneighbor_id) + link_id = G.get_edge_data(node_id, outneighbor_id)["id"][0] + if G.has_edge(*link): + data = G.get_edge_data(*link) + data["id"].append(link_id) else: - G.add_edge(*edge, id=[edge_id]) + G.add_edge(*link, id=[link_id]) iso = nx.number_of_isolates(G) if iso > 0: @@ -154,9 +154,9 @@ def _setup_graph(nodes, edge, evaporate_mass=True): # Due to the simplification, we can end up with cycles of length 2. # This happens when a UserDemand is connected to and from a Basin, # but can also happen in other cases (rivers with a outlet and pump), - # for which we do nothing. We merge these UserDemand cycles edges to - # a single edge, and later merge the flows. - merge_edges = [] + # for which we do nothing. We merge these UserDemand cycles links to + # a single link, and later merge the flows. + merge_links = [] for loop in nx.simple_cycles(G): if len(loop) == 2: if ( @@ -165,23 +165,23 @@ def _setup_graph(nodes, edge, evaporate_mass=True): ): logger.debug("Found cycle that is not a UserDemand.") else: - edge_ids = G.edges[loop]["id"] - G.edges[reversed(loop)]["id"].extend(edge_ids) - merge_edges.extend(edge_ids) - G.remove_edge(*loop) + link_ids = G.edges[loop]["id"] + G.edges[reversed(loop)]["id"].extend(link_ids) + merge_links.extend(link_ids) + G.remove_link(*loop) - # Remove boundary to boundary edges - remove_double_edges = [] + # Remove boundary to boundary links + remove_double_links = [] for x in G.edges(data=True): a, b, d = x if G.nodes[a]["type"] == "Terminal" and G.nodes[b]["type"] == "UserDemand": - logger.debug("Removing edge between Terminal and UserDemand") - remove_double_edges.append(a) + logger.debug("Removing link between Terminal and UserDemand") + remove_double_links.append(a) elif G.nodes[a]["type"] == "UserDemand" and G.nodes[b]["type"] == "Terminal": - remove_double_edges.append(b) - logger.debug("Removing edge between UserDemand and Terminal") + remove_double_links.append(b) + logger.debug("Removing link between UserDemand and Terminal") - for node_id in remove_double_edges: + for node_id in remove_double_links: G.remove_node(node_id) # Relabel the nodes as consecutive integers for Delwaq @@ -221,7 +221,7 @@ def _setup_graph(nodes, edge, evaporate_mass=True): G.add_edge( boundary_id, node_id, - key=edge_id, + key=link_id, id=[-1], boundary=(node["id"], "drainage"), ) @@ -236,7 +236,7 @@ def _setup_graph(nodes, edge, evaporate_mass=True): G.add_edge( boundary_id, node_id, - key=edge_id, + key=link_id, id=[-1], boundary=(node["id"], "precipitation"), ) @@ -252,20 +252,20 @@ def _setup_graph(nodes, edge, evaporate_mass=True): G.add_edge( node_id, boundary_id, - key=edge_id, + key=link_id, id=[-1], boundary=(node["id"], "evaporation"), ) - # Setup edge mapping - edge_mapping = {} + # Setup link mapping + link_mapping = {} for i, (a, b, d) in enumerate(G.edges(data=True)): - for edge_id in d["id"]: - edge_mapping[edge_id] = i + for link_id in d["id"]: + link_mapping[link_id] = i assert len(basin_mapping) == basin_id - return G, merge_edges, node_mapping, edge_mapping, basin_mapping + return G, merge_links, node_mapping, link_mapping, basin_mapping def _setup_boundaries(model): @@ -315,8 +315,8 @@ def generate( output_path.mkdir(exist_ok=True) # Setup flow network - G, merge_edges, node_mapping, edge_mapping, basin_mapping = _setup_graph( - model.node_table(), model.edge, evaporate_mass=evaporate_mass + G, merge_links, node_mapping, link_mapping, basin_mapping = _setup_graph( + model.node_table(), model.link, evaporate_mass=evaporate_mass ) # Plot @@ -338,7 +338,7 @@ def generate( # Write topology to delwaq pointer file pointer = pd.DataFrame(G.edges(), columns=["from_node_id", "to_node_id"]) write_pointer(output_path / "ribasim.poi", pointer) - pointer["riba_edge_id"] = [e[2] for e in G.edges.data("id")] + pointer["riba_link_id"] = [e[2] for e in G.edges.data("id")] pointer["riba_from_node_id"] = pointer["from_node_id"].map( {v: k for k, v in node_mapping.items()} ) @@ -369,24 +369,24 @@ def generate( flows.time = (flows.time - flows.time[0]).dt.total_seconds().astype("int32") basins.time = (basins.time - basins.time[0]).dt.total_seconds().astype("int32") - # Invert flows for half-edge of cycles so later summing is correct - m = flows.edge_id.isin(merge_edges) + # Invert flows for half-link of cycles so later summing is correct + m = flows.link_id.isin(merge_links) flows.loc[m, "flow_rate"] = flows.loc[m, "flow_rate"] * -1 - # Map edge_id to the new edge_id and merge any duplicate flows - flows["riba_edge_id"] = flows["edge_id"] - flows["edge_id"] = flows["edge_id"].map(edge_mapping) - flows.dropna(subset=["edge_id"], inplace=True) - flows["edge_id"] = flows["edge_id"].astype("int32") + # Map link_id to the new link_id and merge any duplicate flows + flows["riba_link_id"] = flows["link_id"] + flows["link_id"] = flows["link_id"].map(link_mapping) + flows.dropna(subset=["link_id"], inplace=True) + flows["link_id"] = flows["link_id"].astype("int32") nflows = flows.copy() - nflows = flows.groupby(["time", "edge_id"]).sum().reset_index() + nflows = flows.groupby(["time", "link_id"]).sum().reset_index() nflows.drop( columns=["from_node_id", "to_node_id"], inplace=True, ) # Add basin boundaries to flows - for edge_id, (a, b, (node_id, boundary_type)) in enumerate( + for link_id, (a, b, (node_id, boundary_type)) in enumerate( G.edges(data="boundary", default=(None, None)) ): if boundary_type is None: @@ -394,14 +394,14 @@ def generate( df = basins[basins.node_id == node_id][["time", boundary_type]].rename( columns={boundary_type: "flow_rate"} ) - df["edge_id"] = edge_id + df["link_id"] = link_id nflows = _concat([nflows, df], ignore_index=True) # Save flows to Delwaq format - nflows.sort_values(by=["time", "edge_id"], inplace=True) + nflows.sort_values(by=["time", "link_id"], inplace=True) nflows.to_csv(output_path / "flows.csv", index=False) # not needed nflows.drop( - columns=["edge_id", "riba_edge_id"], + columns=["link_id", "riba_link_id"], inplace=True, ) write_flows(output_path / "ribasim.flo", nflows, timestep) @@ -483,7 +483,7 @@ def generate( initial_concentrations = icdf.to_string(header=False, index=False) # Write boundary list, ordered by bid to map the unique boundary names - # to the edges described in the pointer file. + # to the links described in the pointer file. bnd = pointer.copy() bnd["bid"] = np.minimum(bnd["from_node_id"], bnd["to_node_id"]) bnd = bnd[bnd["bid"] < 0] @@ -525,7 +525,7 @@ def generate( ) ) - # Return the graph with original edges and the substances + # Return the graph with original links and the substances # so we can parse the results back to the original model return G, substances diff --git a/python/ribasim/ribasim/delwaq/util.py b/python/ribasim/ribasim/delwaq/util.py index 8db0a0c5c..0a3b63ad9 100644 --- a/python/ribasim/ribasim/delwaq/util.py +++ b/python/ribasim/ribasim/delwaq/util.py @@ -30,7 +30,7 @@ def strfdelta(tdelta) -> str: def write_pointer(fn: Path | str, data: pd.DataFrame) -> None: """Write pointer file for Delwaq. - The format is a matrix of int32 of edges + The format is a matrix of int32 of links with 4 columns: from_node_id, to_node_id, 0, 0 This saves as column major order for Fortran compatibility. @@ -45,9 +45,9 @@ def write_pointer(fn: Path | str, data: pd.DataFrame) -> None: def write_lengths(fn: Path | str, data: npt.NDArray[np.float32]) -> None: """Write lengths file for Delwaq. - The format is an int defining time/edges (?) - Followed by a matrix of float32 of 2, n_edges - Defining the length of the half-edges. + The format is an int defining time/links (?) + Followed by a matrix of float32 of 2, n_links + Defining the length of the half-links. This saves as column major order for Fortran compatibility. @@ -84,7 +84,7 @@ def write_flows(fn: Path | str, data: pd.DataFrame, timestep: timedelta) -> None """Write flows file for Delwaq. The format is an int defining the time - followed by the flow for each edge + followed by the flow for each link The order should be the same as the nodes in the pointer. This saves as column major order for Fortran compatibility. @@ -104,22 +104,22 @@ def write_flows(fn: Path | str, data: pd.DataFrame, timestep: timedelta) -> None def ugrid(G) -> xugrid.UgridDataset: # TODO Deduplicate with ribasim.Model.to_xugrid - edge_df = pd.DataFrame(G.edges(), columns=["from_node_id", "to_node_id"]) + link_df = pd.DataFrame(G.edges(), columns=["from_node_id", "to_node_id"]) node_df = pd.DataFrame(G.nodes(), columns=["node_id"]) node_df["x"] = [i[1] for i in G.nodes(data="x")] node_df["y"] = [i[1] for i in G.nodes(data="y")] node_df = node_df[node_df.node_id > 0].reset_index(drop=True) node_df.set_index("node_id", drop=False, inplace=True) node_df.sort_index(inplace=True) - edge_df = edge_df[ - edge_df.from_node_id.isin(node_df.node_id) - & edge_df.to_node_id.isin(node_df.node_id) + link_df = link_df[ + link_df.from_node_id.isin(node_df.node_id) + & link_df.to_node_id.isin(node_df.node_id) ].reset_index(drop=True) node_id = node_df.node_id.to_numpy() - edge_id = edge_df.index.to_numpy() - from_node_id = edge_df.from_node_id.to_numpy() - to_node_id = edge_df.to_node_id.to_numpy() + link_id = link_df.index.to_numpy() + from_node_id = link_df.from_node_id.to_numpy() + to_node_id = link_df.to_node_id.to_numpy() # from node_id to the node_dim index node_lookup = pd.Series( @@ -141,14 +141,14 @@ def ugrid(G) -> xugrid.UgridDataset: name="ribasim", ) - edge_dim = grid.edge_dimension + link_dim = grid.edge_dimension node_dim = grid.node_dimension uds = xugrid.UgridDataset(None, grid) uds = uds.assign_coords(node_id=(node_dim, node_id)) - uds = uds.assign_coords(edge_id=(edge_dim, edge_id)) - uds = uds.assign_coords(from_node_id=(edge_dim, from_node_id)) - uds = uds.assign_coords(to_node_id=(edge_dim, to_node_id)) + uds = uds.assign_coords(link_id=(link_dim, link_id)) + uds = uds.assign_coords(from_node_id=(link_dim, from_node_id)) + uds = uds.assign_coords(to_node_id=(link_dim, to_node_id)) return uds diff --git a/python/ribasim/ribasim/geometry/__init__.py b/python/ribasim/ribasim/geometry/__init__.py index 90889b3c4..3a1ce913f 100644 --- a/python/ribasim/ribasim/geometry/__init__.py +++ b/python/ribasim/ribasim/geometry/__init__.py @@ -1,5 +1,5 @@ from ribasim.geometry.area import BasinAreaSchema -from ribasim.geometry.edge import EdgeTable +from ribasim.geometry.link import LinkTable from ribasim.geometry.node import NodeTable -__all__ = ["BasinAreaSchema", "EdgeTable", "NodeTable"] +__all__ = ["BasinAreaSchema", "LinkTable", "NodeTable"] diff --git a/python/ribasim/ribasim/geometry/edge.py b/python/ribasim/ribasim/geometry/link.py similarity index 67% rename from python/ribasim/ribasim/geometry/edge.py rename to python/ribasim/ribasim/geometry/link.py index 127d69487..3016e1bed 100644 --- a/python/ribasim/ribasim/geometry/edge.py +++ b/python/ribasim/ribasim/geometry/link.py @@ -1,3 +1,4 @@ +from pathlib import Path from typing import NamedTuple import matplotlib.pyplot as plt @@ -13,18 +14,19 @@ from pydantic import NonNegativeInt, PrivateAttr, model_validator from shapely.geometry import LineString, MultiLineString, Point +from ribasim.db_utils import _get_db_schema_version from ribasim.input_base import SpatialTableModel from ribasim.utils import UsedIDs, _concat from ribasim.validation import ( can_connect, - control_edge_neighbor_amount, - flow_edge_neighbor_amount, + control_link_neighbor_amount, + flow_link_neighbor_amount, node_type_connectivity, ) from .base import _GeoBaseSchema -__all__ = ("EdgeTable",) +__all__ = ("LinkTable",) SPATIALCONTROLNODETYPES = { "ContinuousControl", @@ -41,44 +43,52 @@ class NodeData(NamedTuple): geometry: Point -class EdgeSchema(_GeoBaseSchema): - edge_id: Index[Int32] = pa.Field(default=0, ge=0, check_name=True) +class LinkSchema(_GeoBaseSchema): + link_id: Index[Int32] = pa.Field(default=0, ge=0, check_name=True) name: Series[str] = pa.Field(default="") from_node_id: Series[Int32] = pa.Field(default=0) to_node_id: Series[Int32] = pa.Field(default=0) - edge_type: Series[str] = pa.Field(default="flow") + link_type: Series[str] = pa.Field(default="flow") geometry: GeoSeries[LineString] = pa.Field(default=None, nullable=True) @classmethod def _index_name(self) -> str: - return "edge_id" + return "link_id" -class EdgeTable(SpatialTableModel[EdgeSchema]): +class LinkTable(SpatialTableModel[LinkSchema]): """Defines the connections between nodes.""" - _used_edge_ids: UsedIDs = PrivateAttr(default_factory=UsedIDs) + _used_link_ids: UsedIDs = PrivateAttr(default_factory=UsedIDs) @model_validator(mode="after") - def _update_used_ids(self) -> "EdgeTable": + def _update_used_ids(self) -> "LinkTable": if self.df is not None and len(self.df.index) > 0: - self._used_edge_ids.node_ids.update(self.df.index) - self._used_edge_ids.max_node_id = self.df.index.max() + self._used_link_ids.node_ids.update(self.df.index) + self._used_link_ids.max_node_id = self.df.index.max() return self + @classmethod + def _from_db(cls, path: Path, table: str) -> pd.DataFrame | None: + schema_version = _get_db_schema_version(path) + # The table name was changed from "Edge" to "Link" in schema_version 4. + if schema_version < 4: + table = "Edge" + return super()._from_db(path, table) + def add( self, from_node: NodeData, to_node: NodeData, geometry: LineString | MultiLineString | None = None, name: str = "", - edge_id: NonNegativeInt | None = None, + link_id: NonNegativeInt | None = None, **kwargs, ): """ - Add an edge between nodes. + Add an link between nodes. - The type of the edge (flow or control) is automatically inferred from the type of the `from_node`. + The type of the link (flow or control) is automatically inferred from the type of the `from_node`. Parameters ---------- @@ -89,9 +99,9 @@ def add( geometry : LineString | MultiLineString | None The geometry of a line. If not supplied, it creates a straight line between the nodes. name : str - An optional name for the edge. - edge_id : int - An optional non-negative edge ID. If not supplied, it will be automatically generated. + An optional name for the link. + link_id : int + An optional non-negative link ID. If not supplied, it will be automatically generated. **kwargs : Dict """ if not can_connect(from_node.node_type, to_node.node_type): @@ -104,79 +114,79 @@ def add( if geometry is None else [geometry] ) - edge_type = ( + link_type = ( "control" if from_node.node_type in SPATIALCONTROLNODETYPES else "flow" ) - self._validate_edge(to_node, from_node, edge_type) + self._validate_link(to_node, from_node, link_type) assert self.df is not None - if edge_id is None: - edge_id = self._used_edge_ids.new_id() - elif edge_id in self._used_edge_ids: + if link_id is None: + link_id = self._used_link_ids.new_id() + elif link_id in self._used_link_ids: raise ValueError( - f"Edge IDs have to be unique, but {edge_id} already exists." + f"Link IDs have to be unique, but {link_id} already exists." ) - table_to_append = GeoDataFrame[EdgeSchema]( + table_to_append = GeoDataFrame[LinkSchema]( data={ "from_node_id": [from_node.node_id], "to_node_id": [to_node.node_id], - "edge_type": [edge_type], + "link_type": [link_type], "name": [name], **kwargs, }, geometry=geometry_to_append, crs=self.df.crs, - index=pd.Index([edge_id], name="edge_id"), + index=pd.Index([link_id], name="link_id"), ) - self.df = GeoDataFrame[EdgeSchema](_concat([self.df, table_to_append])) + self.df = GeoDataFrame[LinkSchema](_concat([self.df, table_to_append])) if self.df.duplicated(subset=["from_node_id", "to_node_id"]).any(): raise ValueError( - f"Edges have to be unique, but edge with from_node_id {from_node.node_id} to_node_id {to_node.node_id} already exists." + f"Links have to be unique, but link with from_node_id {from_node.node_id} to_node_id {to_node.node_id} already exists." ) - self._used_edge_ids.add(edge_id) + self._used_link_ids.add(link_id) - def _validate_edge(self, to_node: NodeData, from_node: NodeData, edge_type: str): + def _validate_link(self, to_node: NodeData, from_node: NodeData, link_type: str): assert self.df is not None in_neighbor: int = self.df.loc[ (self.df["to_node_id"] == to_node.node_id) - & (self.df["edge_type"] == edge_type) + & (self.df["link_type"] == link_type) ].shape[0] out_neighbor: int = self.df.loc[ (self.df["from_node_id"] == from_node.node_id) - & (self.df["edge_type"] == edge_type) + & (self.df["link_type"] == link_type) ].shape[0] # validation on neighbor amount - max_in_flow: int = flow_edge_neighbor_amount[to_node.node_type][1] - max_out_flow: int = flow_edge_neighbor_amount[from_node.node_type][3] - max_in_control: int = control_edge_neighbor_amount[to_node.node_type][1] - max_out_control: int = control_edge_neighbor_amount[from_node.node_type][3] - if edge_type == "flow": + max_in_flow: int = flow_link_neighbor_amount[to_node.node_type][1] + max_out_flow: int = flow_link_neighbor_amount[from_node.node_type][3] + max_in_control: int = control_link_neighbor_amount[to_node.node_type][1] + max_out_control: int = control_link_neighbor_amount[from_node.node_type][3] + if link_type == "flow": if in_neighbor >= max_in_flow: raise ValueError( - f"Node {to_node.node_id} can have at most {max_in_flow} flow edge inneighbor(s) (got {in_neighbor})" + f"Node {to_node.node_id} can have at most {max_in_flow} flow link inneighbor(s) (got {in_neighbor})" ) if out_neighbor >= max_out_flow: raise ValueError( - f"Node {from_node.node_id} can have at most {max_out_flow} flow edge outneighbor(s) (got {out_neighbor})" + f"Node {from_node.node_id} can have at most {max_out_flow} flow link outneighbor(s) (got {out_neighbor})" ) - elif edge_type == "control": + elif link_type == "control": if in_neighbor >= max_in_control: raise ValueError( - f"Node {to_node.node_id} can have at most {max_in_control} control edge inneighbor(s) (got {in_neighbor})" + f"Node {to_node.node_id} can have at most {max_in_control} control link inneighbor(s) (got {in_neighbor})" ) if out_neighbor >= max_out_control: raise ValueError( - f"Node {from_node.node_id} can have at most {max_out_control} control edge outneighbor(s) (got {out_neighbor})" + f"Node {from_node.node_id} can have at most {max_out_control} control link outneighbor(s) (got {out_neighbor})" ) - def _get_where_edge_type(self, edge_type: str) -> NDArray[np.bool_]: + def _get_where_link_type(self, link_type: str) -> NDArray[np.bool_]: assert self.df is not None - return (self.df.edge_type == edge_type).to_numpy() + return (self.df.link_type == link_type).to_numpy() def plot(self, **kwargs) -> Axes: - """Plot the edges of the model. + """Plot the links of the model. Parameters ---------- @@ -200,14 +210,14 @@ def plot(self, **kwargs) -> Axes: if color_flow is None: color_flow = "#3690c0" # lightblue kwargs_flow["color"] = color_flow - kwargs_flow["label"] = "Flow edge" + kwargs_flow["label"] = "Flow link" if color_control is None: color_control = "grey" kwargs_control["color"] = color_control - kwargs_control["label"] = "Control edge" + kwargs_control["label"] = "Control link" - where_flow = self._get_where_edge_type("flow") - where_control = self._get_where_edge_type("control") + where_flow = self._get_where_link_type("flow") + where_control = self._get_where_link_type("control") if not self.df[where_flow].empty: self.df[where_flow].plot(**kwargs_flow) @@ -218,9 +228,9 @@ def plot(self, **kwargs) -> Axes: # Determine the angle for every caret marker and where to place it. coords, index = shapely.get_coordinates(self.df.geometry, return_index=True) keep = np.diff(index) == 0 - edge_coords = np.stack((coords[:-1, :], coords[1:, :]), axis=1)[keep] - x, y = np.mean(edge_coords, axis=1).T - dx, dy = np.diff(edge_coords, axis=1)[:, 0, :].T + link_coords = np.stack((coords[:-1, :], coords[1:, :]), axis=1)[keep] + x, y = np.mean(link_coords, axis=1).T + dx, dy = np.diff(link_coords, axis=1)[:, 0, :].T angle = np.degrees(np.arctan2(dy, dx)) - 90 # Set the color of the marker to match the line. diff --git a/python/ribasim/ribasim/input_base.py b/python/ribasim/ribasim/input_base.py index 8d8d1c259..7bf4e4b5a 100644 --- a/python/ribasim/ribasim/input_base.py +++ b/python/ribasim/ribasim/input_base.py @@ -366,9 +366,9 @@ class SpatialTableModel(TableModel[TableT], Generic[TableT]): df: GeoDataFrame[TableT] | None = Field(default=None, exclude=True, repr=False) def sort(self): - # Only sort the index (node_id / edge_id) since this needs to be sorted in a GeoPackage. + # Only sort the index (node_id / link_id) since this needs to be sorted in a GeoPackage. # Under most circumstances, this retains the input order, - # making the edge_id as stable as possible; useful for post-processing. + # making the link_id as stable as possible; useful for post-processing. self.df.sort_index(inplace=True) @classmethod diff --git a/python/ribasim/ribasim/migrations.py b/python/ribasim/ribasim/migrations.py index d122b7e69..f818f10d6 100644 --- a/python/ribasim/ribasim/migrations.py +++ b/python/ribasim/ribasim/migrations.py @@ -16,20 +16,26 @@ def nodeschema_migration(gdf: GeoDataFrame, schema_version: int) -> GeoDataFrame return gdf -def edgeschema_migration(gdf: GeoDataFrame, schema_version: int) -> GeoDataFrame: +def linkschema_migration(gdf: GeoDataFrame, schema_version: int) -> GeoDataFrame: if schema_version == 0: - warnings.warn("Migrating outdated Edge table.", UserWarning) + warnings.warn("Migrating outdated Link table.", UserWarning) gdf.drop(columns="from_node_type", inplace=True, errors="ignore") if schema_version == 0: - warnings.warn("Migrating outdated Edge table.", UserWarning) + warnings.warn("Migrating outdated Link table.", UserWarning) gdf.drop(columns="to_node_type", inplace=True, errors="ignore") if "edge_id" in gdf.columns and schema_version == 0: - warnings.warn("Migrating outdated Edge table.", UserWarning) - assert gdf["edge_id"].is_unique, "Edge IDs have to be unique." + warnings.warn("Migrating outdated Link table.", UserWarning) + assert gdf["edge_id"].is_unique, "Link IDs have to be unique." gdf.set_index("edge_id", inplace=True) if schema_version < 3 and "subnetwork_id" in gdf.columns: - warnings.warn("Migrating outdated Edge table.", UserWarning) + warnings.warn("Migrating outdated Link table.", UserWarning) gdf.drop(columns="subnetwork_id", inplace=True, errors="ignore") + if schema_version < 4 and gdf.index.name == "edge_id": + warnings.warn("Migrating outdated Link table.", UserWarning) + gdf.index.rename("link_id", inplace=True) + if schema_version < 4 and "edge_type" in gdf.columns: + warnings.warn("Migrating outdated Link table.", UserWarning) + gdf.rename(columns={"edge_type": "link_type"}, inplace=True) return gdf diff --git a/python/ribasim/ribasim/model.py b/python/ribasim/ribasim/model.py index 548368244..d5b3e178d 100644 --- a/python/ribasim/ribasim/model.py +++ b/python/ribasim/ribasim/model.py @@ -45,7 +45,7 @@ UserDemand, ) from ribasim.db_utils import _set_db_schema_version -from ribasim.geometry.edge import EdgeSchema, EdgeTable +from ribasim.geometry.link import LinkSchema, LinkTable from ribasim.geometry.node import NodeTable from ribasim.input_base import ( ChildModel, @@ -58,12 +58,12 @@ MissingOptionalModule, UsedIDs, _concat, - _edge_lookup, + _link_lookup, _node_lookup, _node_lookup_numpy, _time_in_ns, ) -from ribasim.validation import control_edge_neighbor_amount, flow_edge_neighbor_amount +from ribasim.validation import control_link_neighbor_amount, flow_link_neighbor_amount try: import xugrid @@ -107,7 +107,7 @@ class Model(FileModel): terminal: Terminal = Field(default_factory=Terminal) user_demand: UserDemand = Field(default_factory=UserDemand) - edge: EdgeTable = Field(default_factory=EdgeTable) + link: LinkTable = Field(default_factory=LinkTable) use_validation: bool = Field(default=True, exclude=True) _used_node_ids: UsedIDs = PrivateAttr(default_factory=UsedIDs) @@ -123,10 +123,10 @@ def _set_node_parent(self) -> "Model": return self @model_validator(mode="after") - def _ensure_edge_table_is_present(self) -> "Model": - if self.edge.df is None: - self.edge.df = GeoDataFrame[EdgeSchema](index=pd.Index([], name="edge_id")) - self.edge.df = self.edge.df.set_geometry("geometry", crs=self.crs) + def _ensure_link_table_is_present(self) -> "Model": + if self.link.df is None: + self.link.df = GeoDataFrame[LinkSchema](index=pd.Index([], name="link_id")) + self.link.df = self.link.df.set_geometry("geometry", crs=self.crs) return self @model_validator(mode="after") @@ -150,6 +150,7 @@ def model_post_init(self, __context: Any) -> None: # By overriding `BaseModel.model_post_init` we can set them explicitly, # and enforce that they are always written. self.model_fields_set.update({"input_dir", "results_dir"}) + self.edge = self.link # Backwards compatible alias for link def __repr__(self) -> str: """Generate a succinct overview of the Model content. @@ -160,8 +161,8 @@ def __repr__(self) -> str: INDENT = " " for field in self._fields(): attr = getattr(self, field) - if isinstance(attr, EdgeTable): - content.append(f"{INDENT}{field}=Edge(...),") + if isinstance(attr, LinkTable): + content.append(f"{INDENT}{field}=Link(...),") else: if isinstance(attr, MultiNodeModel) and attr.node.df is None: # Skip unused node types @@ -204,7 +205,7 @@ def _save(self, directory: DirectoryPath, input_dir: DirectoryPath): db_path.unlink(missing_ok=True) context_file_writing.get()["database"] = db_path - self.edge._save(directory, input_dir) + self.link._save(directory, input_dir) node = self.node_table() assert node.df is not None @@ -235,7 +236,7 @@ def to_crs(self, crs: str) -> None: def _apply_crs_function(self, function_name: str, crs: str) -> None: """Apply `function_name`, with `crs` as the first and only argument to all spatial tables.""" - getattr(self.edge.df, function_name)(crs, inplace=True) + getattr(self.link.df, function_name)(crs, inplace=True) for sub in self._nodes(): if sub.node.df is not None: getattr(sub.node.df, function_name)(crs, inplace=True) @@ -317,12 +318,12 @@ def write(self, filepath: str | PathLike[str]) -> Path: return fn def _validate_model(self): - df_edge = self.edge.df + df_link = self.link.df df_chunks = [node.node.df for node in self._nodes()] df_node = _concat(df_chunks) - df_graph = df_edge - # Join df_edge with df_node to get to_node_type + df_graph = df_link + # Join df_link with df_node to get to_node_type df_graph = df_graph.join( df_node[["node_type"]], on="from_node_id", how="left", rsuffix="_from" ) @@ -334,26 +335,26 @@ def _validate_model(self): df_graph = df_graph.rename(columns={"node_type": "to_node_type"}) if not self._has_valid_neighbor_amount( - df_graph, flow_edge_neighbor_amount, "flow", df_node["node_type"] + df_graph, flow_link_neighbor_amount, "flow", df_node["node_type"] ): raise ValueError("Minimum flow inneighbor or outneighbor unsatisfied") if not self._has_valid_neighbor_amount( - df_graph, control_edge_neighbor_amount, "control", df_node["node_type"] + df_graph, control_link_neighbor_amount, "control", df_node["node_type"] ): raise ValueError("Minimum control inneighbor or outneighbor unsatisfied") def _has_valid_neighbor_amount( self, df_graph: pd.DataFrame, - edge_amount: dict[str, list[int]], - edge_type: str, + link_amount: dict[str, list[int]], + link_type: str, nodes, ) -> bool: - """Check if the neighbor amount of the two nodes connected by the given edge meet the minimum requirements.""" + """Check if the neighbor amount of the two nodes connected by the given link meet the minimum requirements.""" is_valid = True - # filter graph by edge type - df_graph = df_graph.loc[df_graph["edge_type"] == edge_type] + # filter graph by link type + df_graph = df_graph.loc[df_graph["link_type"] == link_type] # count occurrence of "from_node" which reflects the number of outneighbors from_node_count = ( @@ -376,10 +377,10 @@ def _has_valid_neighbor_amount( # loop over all the "from_node" and check if they have enough outneighbor for _, row in from_node_info.iterrows(): # from node's outneighbor - if row["from_node_count"] < edge_amount[row["from_node_type"]][2]: + if row["from_node_count"] < link_amount[row["from_node_type"]][2]: is_valid = False logging.error( - f"Node {row['from_node_id']} must have at least {edge_amount[row['from_node_type']][2]} outneighbor(s) (got {row['from_node_count']})" + f"Node {row['from_node_id']} must have at least {link_amount[row['from_node_type']][2]} outneighbor(s) (got {row['from_node_count']})" ) # count occurrence of "to_node" which reflects the number of inneighbors @@ -400,10 +401,10 @@ def _has_valid_neighbor_amount( # loop over all the "to_node" and check if they have enough inneighbor for _, row in to_node_info.iterrows(): - if row["to_node_count"] < edge_amount[row["to_node_type"]][0]: + if row["to_node_count"] < link_amount[row["to_node_type"]][0]: is_valid = False logging.error( - f"Node {row['to_node_id']} must have at least {edge_amount[row['to_node_type']][0]} inneighbor(s) (got {row['to_node_count']})" + f"Node {row['to_node_id']} must have at least {link_amount[row['to_node_type']][0]} inneighbor(s) (got {row['to_node_count']})" ) return is_valid @@ -413,7 +414,7 @@ def _add_source_sink_node( ) -> pd.DataFrame: """Loop over node table. - Add the nodes whose id are missing in the from_node and to_node column in the edge table because they are not the upstream or downstrem of other nodes. + Add the nodes whose id are missing in the from_node and to_node column in the link table because they are not the upstream or downstrem of other nodes. Specify that their occurrence in from_node table or to_node table is 0. """ @@ -459,24 +460,24 @@ def _reset_contextvar(self) -> "Model": return self def plot_control_listen(self, ax): - """Plot the implicit listen edges of the model.""" - df_listen_edge = pd.DataFrame( + """Plot the implicit listen links of the model.""" + df_listen_link = pd.DataFrame( data={ "control_node_id": pd.Series([], dtype="int32[pyarrow]"), "listen_node_id": pd.Series([], dtype="int32[pyarrow]"), } ) - # Listen edges from PidControl + # Listen links from PidControl for table in (self.pid_control.static.df, self.pid_control.time.df): if table is None: continue to_add = table[["node_id", "listen_node_id"]].drop_duplicates() to_add.columns = ["control_node_id", "listen_node_id"] - df_listen_edge = _concat([df_listen_edge, to_add]) + df_listen_link = _concat([df_listen_link, to_add]) - # Listen edges from ContinuousControl and DiscreteControl + # Listen links from ContinuousControl and DiscreteControl for table, name in ( (self.continuous_control.variable.df, "ContinuousControl"), (self.discrete_control.variable.df, "DiscreteControl"), @@ -489,25 +490,25 @@ def plot_control_listen(self, ax): "control_node_id", "listen_node_id", ] - df_listen_edge = _concat([df_listen_edge, to_add]) + df_listen_link = _concat([df_listen_link, to_add]) # Collect geometry data node = self.node_table().df - control_nodes_geometry = df_listen_edge.merge( + control_nodes_geometry = df_listen_link.merge( node, left_on=["control_node_id"], right_on=["node_id"], how="left", )["geometry"] - listen_nodes_geometry = df_listen_edge.merge( + listen_nodes_geometry = df_listen_link.merge( node, left_on=["listen_node_id"], right_on=["node_id"], how="left", )["geometry"] - # Plot listen edges + # Plot listen links for i, (point_listen, point_control) in enumerate( zip(listen_nodes_geometry, control_nodes_geometry) ): @@ -516,7 +517,7 @@ def plot_control_listen(self, ax): [point_listen.y, point_control.y], color="gray", ls="--", - label="Listen edge" if i == 0 else None, + label="Listen link" if i == 0 else None, ) return @@ -526,7 +527,7 @@ def plot( indicate_subnetworks: bool = True, aspect_ratio_bound: float = 0.33, ) -> Any: - """Plot the nodes, edges and allocation networks of the model. + """Plot the nodes, links and allocation networks of the model. Parameters ---------- @@ -548,7 +549,7 @@ def plot( ax.axis("off") node = self.node_table() - self.edge.plot(ax=ax, zorder=2) + self.link.plot(ax=ax, zorder=2) self.plot_control_listen(ax) node.plot(ax=ax, zorder=3) @@ -600,15 +601,15 @@ def to_xugrid(self, add_flow: bool = False, add_allocation: bool = False): node_df = self.node_table().df assert node_df is not None - assert self.edge.df is not None - edge_df = self.edge.df.copy() + assert self.link.df is not None + link_df = self.link.df.copy() # We assume only the flow network is of interest. - edge_df = edge_df[edge_df.edge_type == "flow"] + link_df = link_df[link_df.link_type == "flow"] node_id = node_df.index.to_numpy() - edge_id = edge_df.index.to_numpy() - from_node_id = edge_df.from_node_id.to_numpy() - to_node_id = edge_df.to_node_id.to_numpy() + link_id = link_df.index.to_numpy() + from_node_id = link_df.from_node_id.to_numpy() + to_node_id = link_df.to_node_id.to_numpy() node_lookup = _node_lookup_numpy(node_id) grid = xugrid.Ugrid1d( @@ -626,14 +627,14 @@ def to_xugrid(self, add_flow: bool = False, add_allocation: bool = False): crs=node_df.crs, ) - edge_dim = grid.edge_dimension + link_dim = grid.edge_dimension node_dim = grid.node_dimension uds = xugrid.UgridDataset(None, grid) uds = uds.assign_coords(node_id=(node_dim, node_id)) - uds = uds.assign_coords(edge_id=(edge_dim, edge_id)) - uds = uds.assign_coords(from_node_id=(edge_dim, from_node_id)) - uds = uds.assign_coords(to_node_id=(edge_dim, to_node_id)) + uds = uds.assign_coords(link_id=(link_dim, link_id)) + uds = uds.assign_coords(from_node_id=(link_dim, from_node_id)) + uds = uds.assign_coords(to_node_id=(link_dim, to_node_id)) if add_flow: uds = self._add_flow(uds, node_lookup) @@ -667,15 +668,15 @@ def _add_flow(self, uds, node_lookup): _time_in_ns(flow_df) # add the xugrid dimension indices to the dataframes - edge_dim = uds.grid.edge_dimension + link_dim = uds.grid.edge_dimension node_dim = uds.grid.node_dimension node_lookup = _node_lookup(uds) - edge_lookup = _edge_lookup(uds) - flow_df[edge_dim] = edge_lookup[flow_df["edge_id"]].to_numpy() + link_lookup = _link_lookup(uds) + flow_df[link_dim] = link_lookup[flow_df["link_id"]].to_numpy() basin_df[node_dim] = node_lookup[basin_df["node_id"]].to_numpy() # add flow results to the UgridDataset - flow_da = flow_df.set_index(["time", edge_dim])["flow_rate"].to_xarray() + flow_da = flow_df.set_index(["time", link_dim])["flow_rate"].to_xarray() uds[flow_da.name] = flow_da # add basin results to the UgridDataset @@ -701,22 +702,22 @@ def _add_allocation(self, uds): alloc_flow_df = pd.read_feather( alloc_flow_path, - columns=["time", "edge_id", "flow_rate", "optimization_type", "priority"], + columns=["time", "link_id", "flow_rate", "optimization_type", "priority"], dtype_backend="pyarrow", ) _time_in_ns(alloc_flow_df) - # add the xugrid edge dimension index to the dataframe - edge_dim = uds.grid.edge_dimension - edge_lookup = _edge_lookup(uds) - alloc_flow_df[edge_dim] = edge_lookup[alloc_flow_df["edge_id"]].to_numpy() + # add the xugrid link dimension index to the dataframe + link_dim = uds.grid.edge_dimension + link_lookup = _link_lookup(uds) + alloc_flow_df[link_dim] = link_lookup[alloc_flow_df["link_id"]].to_numpy() # "flow_rate_allocated" is the sum of all allocated flow rates over the priorities allocate_df = alloc_flow_df.loc[ alloc_flow_df["optimization_type"] == "allocate" ] uds["flow_rate_allocated"] = ( - allocate_df.groupby(["time", edge_dim])["flow_rate"].sum().to_xarray() + allocate_df.groupby(["time", link_dim])["flow_rate"].sum().to_xarray() ) # also add the individual priorities and optimization types @@ -725,7 +726,7 @@ def _add_allocation(self, uds): ["optimization_type", "priority"] ): varname = f"{optimization_type}_priority_{priority}" - da = group.set_index(["time", edge_dim])["flow_rate"].to_xarray() + da = group.set_index(["time", link_dim])["flow_rate"].to_xarray() uds[varname] = da return uds diff --git a/python/ribasim/ribasim/styles/EdgeStyle.qml b/python/ribasim/ribasim/styles/LinkStyle.qml similarity index 98% rename from python/ribasim/ribasim/styles/EdgeStyle.qml rename to python/ribasim/ribasim/styles/LinkStyle.qml index 5b9bee800..5fb3ff7d8 100644 --- a/python/ribasim/ribasim/styles/EdgeStyle.qml +++ b/python/ribasim/ribasim/styles/LinkStyle.qml @@ -144,7 +144,7 @@ - + @@ -555,7 +555,7 @@ - + @@ -679,7 +679,7 @@