From e754acacfe28a365a78c0fffc56aa023a879b230 Mon Sep 17 00:00:00 2001 From: Daniel Tollenaar Date: Thu, 5 Sep 2024 15:47:19 +0200 Subject: [PATCH] Dommel basin profile (#142) With this branch we can create de dommel model version 2024.8.4 from 2024.6.3 All code used is updated to function with current ribasim-version (main) --------- Co-authored-by: Martijn Visser --- notebooks/de_dommel/01_fix_model_network.py | 82 ++-- notebooks/de_dommel/02_fix_edges.py | 9 +- notebooks/de_dommel/03_fix_basin_area.py | 9 +- notebooks/de_dommel/04_parameterize_model.py | 437 ++++++++++++++++++ notebooks/de_dommel/rare_edges.gpkg | Bin 0 -> 106496 bytes pixi.lock | 8 +- src/ribasim_nl/ribasim_nl/model.py | 87 +++- .../ribasim_nl/network_validator.py | 10 +- src/ribasim_nl/ribasim_nl/structure_node.py | 84 ++++ 9 files changed, 630 insertions(+), 96 deletions(-) create mode 100644 notebooks/de_dommel/04_parameterize_model.py create mode 100644 notebooks/de_dommel/rare_edges.gpkg create mode 100644 src/ribasim_nl/ribasim_nl/structure_node.py diff --git a/notebooks/de_dommel/01_fix_model_network.py b/notebooks/de_dommel/01_fix_model_network.py index d549f20..1c87870 100644 --- a/notebooks/de_dommel/01_fix_model_network.py +++ b/notebooks/de_dommel/01_fix_model_network.py @@ -1,11 +1,10 @@ # %% -import sqlite3 import geopandas as gpd -import pandas as pd from ribasim import Node from ribasim.nodes import basin, level_boundary, manning_resistance, outlet from ribasim_nl import CloudStorage, Model, NetworkValidator +from shapely.geometry import Point cloud = CloudStorage() @@ -24,26 +23,6 @@ basin.State(level=[0]), ] -# %% remove urban_runoff -# Connect to the SQLite database - -conn = sqlite3.connect(database_gpkg) - -# get table into DataFrame -table = "Basin / static" -df = pd.read_sql_query(f"SELECT * FROM '{table}'", conn) - -# drop urban runoff column if exists -if "urban_runoff" in df.columns: - df.drop(columns="urban_runoff", inplace=True) - - # Write the DataFrame back to the SQLite table - df.to_sql(table, conn, if_exists="replace", index=False) - -# # Close the connection -conn.close() - - # %% read model model = Model.read(ribasim_toml) @@ -52,10 +31,7 @@ # %% verwijder duplicated edges # see: https://github.com/Deltares/Ribasim-NL/issues/102#issuecomment-2288780504 -model.edge.df = model.edge.df[~((model.edge.df.from_node_type == "Basin") & (model.edge.df.to_node_type == "Basin"))] - # see: https://github.com/Deltares/Ribasim-NL/issues/102#issuecomment-2291081244 -duplicated_fids = network_validator.edge_duplicated().index.to_list() model.edge.df = model.edge.df.drop_duplicates(subset=["from_node_id", "to_node_id"]) if not network_validator.edge_duplicated().empty: @@ -64,20 +40,16 @@ # %% toevoegen bovenstroomse knopen # see: https://github.com/Deltares/Ribasim-NL/issues/102#issuecomment-2291091067 -edge_mask = model.edge.df.index.isin(duplicated_fids) node_id = model.next_node_id -edge_fid = next(i for i in duplicated_fids if i in model.edge.df.index) -model.edge.df.loc[edge_mask, ["from_node_type"]] = "Basin" -model.edge.df.loc[edge_mask, ["from_node_id"]] = node_id +edge_id = model.edge.df.loc[model.edge.df.to_node_id == 251].index[0] +model.edge.df.loc[edge_id, ["from_node_id"]] = node_id -node = Node(node_id, model.edge.df.at[edge_fid, "geometry"].boundary.geoms[0]) +node = Node(node_id, model.edge.df.at[edge_id, "geometry"].boundary.geoms[0]) model.basin.area.df.loc[model.basin.area.df.node_id == 1009, ["node_id"]] = node_id area = basin.Area(geometry=model.basin.area[node_id].geometry.to_list()) model.basin.add(node, basin_data + [area]) # see: https://github.com/Deltares/Ribasim-NL/issues/102#issuecomment-2291111647 - - for row in network_validator.edge_incorrect_connectivity().itertuples(): # drop edge from model model.remove_edge(row.from_node_id, row.to_node_id, remove_disconnected_nodes=False) @@ -91,28 +63,31 @@ if row.to_node_id == 2: geometry = row.geometry.interpolate(0.99, normalized=True) name = "" + meta_object_type = "openwater" if row.to_node_id == 14: gdf = gpd.read_file( - cloud.joinpath("DeDommel", "verwerkt", "1_ontvangen_data", "Geodata", "data_Q42018.gpkg"), - layer="DuikerSifonHevel", + cloud.joinpath("DeDommel", "verwerkt", "4_ribasim", "hydamo.gpkg"), + layer="duikersifonhevel", engine="pyogrio", fid_as_index=True, ) - kdu = gdf.loc[5818] + kdu = gdf.loc[250] geometry = kdu.geometry.interpolate(0.5, normalized=True) - name = kdu.objectId + geometry = Point(geometry.x, geometry.y) + name = kdu.CODE + meta_object_type = "duikersifonhevel" # add manning-node - manning_node_id = model.next_node_id - manning_data = manning_resistance.Static(length=[100], manning_n=[0.04], profile_width=[10], profile_slope=[1]) - model.manning_resistance.add( - Node(node_id=manning_node_id, geometry=geometry, name=name), - [manning_data], + outlet_node_id = model.next_node_id + outlet_data = outlet.Static(flow_rate=[100]) + model.outlet.add( + Node(node_id=outlet_node_id, geometry=geometry, name=name, meta_object_type=meta_object_type), + [outlet_data], ) # add edges - model.edge.add(model.basin[row.from_node_id], model.manning_resistance[manning_node_id]) - model.edge.add(model.manning_resistance[manning_node_id], model.level_boundary[row.to_node_id]) + model.edge.add(model.basin[row.from_node_id], model.outlet[outlet_node_id]) + model.edge.add(model.outlet[outlet_node_id], model.level_boundary[row.to_node_id]) if not network_validator.edge_incorrect_connectivity().empty: @@ -122,11 +97,11 @@ # see: https://github.com/Deltares/Ribasim-NL/issues/102#issuecomment-2291271525 for row in network_validator.node_internal_basin().itertuples(): - if row.node_id not in model.basin.area.df.node_id.to_numpy(): # remove or change to level-boundary - edge_select_df = model.edge.df[model.edge.df.to_node_id == row.node_id] + if row.Index not in model.basin.area.df.node_id.to_numpy(): # remove or change to level-boundary + edge_select_df = model.edge.df[model.edge.df.to_node_id == row.Index] if len(edge_select_df) == 1: - if edge_select_df.iloc[0]["from_node_type"] == "FlowBoundary": - model.remove_node(row.node_id) + if model.node_table().df.at[edge_select_df.iloc[0]["from_node_id"], "node_type"] == "FlowBoundary": + model.remove_node(row.Index) model.remove_node(edge_select_df.iloc[0]["from_node_id"]) model.edge.df.drop(index=edge_select_df.index[0], inplace=True) @@ -166,7 +141,7 @@ geometry = gdf.loc[2751].geometry.interpolate(0.5, normalized=True) node_id = model.next_node_id -data = manning_resistance.Static(length=[100], manning_n=[0.04], profile_width=[10], profile_slope=[1]) +manning_data = manning_resistance.Static(length=[100], manning_n=[0.04], profile_width=[10], profile_slope=[1]) model.manning_resistance.add(Node(node_id=node_id, geometry=geometry), [manning_data]) @@ -192,6 +167,7 @@ # # see: https://github.com/Deltares/Ribasim-NL/issues/132 model.basin.area.df.loc[model.basin.area.df.duplicated("node_id"), ["node_id"]] = -1 model.basin.area.df.reset_index(drop=True, inplace=True) +model.basin.area.df.index.name = "fid" model.fix_unassigned_basin_area() model.fix_unassigned_basin_area(method="closest", distance=100) model.fix_unassigned_basin_area() @@ -199,10 +175,8 @@ model.basin.area.df = model.basin.area.df[~model.basin.area.df.node_id.isin(model.unassigned_basin_area.node_id)] # # %% write model -ribasim_toml = ribasim_toml.parents[1].joinpath("DeDommel", ribasim_toml.name) -model.write(ribasim_toml) +model.edge.df.reset_index(drop=True, inplace=True) +model.edge.df.index.name = "edge_id" +ribasim_toml = ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel_fix_model_network", "model.toml") -# %% upload model - -# cloud.upload_model("DeDommel", "DeDommel") -# %% +model.write(ribasim_toml) diff --git a/notebooks/de_dommel/02_fix_edges.py b/notebooks/de_dommel/02_fix_edges.py index fc37cf7..3667b14 100644 --- a/notebooks/de_dommel/02_fix_edges.py +++ b/notebooks/de_dommel/02_fix_edges.py @@ -6,7 +6,7 @@ # %% load model -ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel", "model.toml") +ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel_fix_model_network", "model.toml") model = Model.read(ribasim_toml) # %% network from HydroObjects @@ -15,13 +15,13 @@ network = Network.from_network_gpkg(network_gpkg) else: network = Network.from_lines_gpkg( - cloud.joinpath("DeDommel", "verwerkt", "2_voorbewerking", "hydamo.gpkg"), layer="hydroobject" + cloud.joinpath("DeDommel", "verwerkt", "4_ribasim", "hydamo.gpkg"), layer="hydroobject" ) network.to_file(network_gpkg) # %% edges follow HydroObjects model.reset_edge_geometry() -node_df = model.node_table().df.set_index("node_id") +node_df = model.node_table().df data = [] for row in model.edge.df.itertuples(): try: @@ -58,4 +58,5 @@ gpd.GeoDataFrame(data, crs=28992).to_file("rare_edges.gpkg") # %% write model -# model.write(ribasim_toml) +ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel_fix_edges", "model.toml") +model.write(ribasim_toml) diff --git a/notebooks/de_dommel/03_fix_basin_area.py b/notebooks/de_dommel/03_fix_basin_area.py index 504362f..7e8dd0c 100644 --- a/notebooks/de_dommel/03_fix_basin_area.py +++ b/notebooks/de_dommel/03_fix_basin_area.py @@ -8,7 +8,7 @@ # %% load model -ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel", "model.toml") +ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel_fix_edges", "model.toml") model = Model.read(ribasim_toml) @@ -23,7 +23,7 @@ dissolved_area_gdf.to_file(cloud.joinpath("DeDommel", "verwerkt", "water_area.gpkg")) # %% -basin_df = model.basin.node.df.set_index("node_id") +basin_df = model.basin.node.df basin_area_df = gpd.read_file( cloud.joinpath("DeDommel", "verwerkt", "basin_area.gpkg"), engine="pyogrio", fid_as_index=True ) @@ -51,7 +51,7 @@ area_df = selected_areas[selected_areas.contains(row.geometry)] if not area_df.empty: name = area_df.iloc[0].NAAM - model.basin.node.df.loc[model.basin.node[row.Index].index[0], ["name"]] = name + model.basin.node.df.loc[row.Index, ["name"]] = name # assign name to edges if defined model.edge.df.loc[edges_mask, ["name"]] = name @@ -75,7 +75,8 @@ area_df = gpd.GeoDataFrame(data, crs=model.basin.node.df.crs) area_df = area_df[~area_df.is_empty] +area_df.index.name = "fid" model.basin.area.df = area_df # %% - +ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel_fix_areas", "model.toml") model.write(ribasim_toml) diff --git a/notebooks/de_dommel/04_parameterize_model.py b/notebooks/de_dommel/04_parameterize_model.py new file mode 100644 index 0000000..82675ae --- /dev/null +++ b/notebooks/de_dommel/04_parameterize_model.py @@ -0,0 +1,437 @@ +# %% +import geopandas as gpd +import pandas as pd +from ribasim.nodes import manning_resistance, pump +from ribasim_nl import CloudStorage, Model +from ribasim_nl.structure_node import get_outlet, get_tabulated_rating_curve +from shapely.geometry import MultiLineString + +PROFIEL_ID_COLUMN = "PROFIELLIJNID" +PROFIEL_LINE_ID_COLUMN = "profiel_id" +PROFIEL_HOOGTE_COLUMN = "HOOGTE" +PROFIEL_BREEDTE_COLUMN = "breedte" +STUW_TARGET_LEVEL_COLUMN = "WS_STREEFPEILLAAG" +STUW_CREST_LEVEL_COLUMN = "LAAGSTEDOORSTROOMHOOGTE" +STUW_CODE_COLUMN = "WS_DOMMELID" +STUW_WIDTH_COLUMN = "KRUINBREEDTE" +STUW_NAME_COLUMN = "NAAM" + +KDU_INVERT_LEVEL_US_COLUMN = "WS_BODEMHOOGTEBOV" +KDU_INVERT_LEVEL_DS_COLUMN = "WS_BODEMHOOGTEBEN" +KDU_WIDTH_COLUMN = "BREEDTEOPENING" +KDU_HEIGHT_COLUMN = "HOOGTEOPENING" +KDU_SHAPE_COLUMN = "VORMKOKER2" +KDU_SHAPE_MAP = { + "rond": "round", + "rechthoekig": "rectangle", + "eivorming": "ellipse", + "heulprofiel": "ellipse", + "muilprofiel": "ellipse", + "ellipsvormig": "ellipse", +} + +KGM_CAPACITY_COLUMN = "MAXIMALECAPACITEIT" +KGM_NAME_COLUMN = "NAAM" +KGM_CODE_COLUMN = "WS_DOMMELID" + + +cloud = CloudStorage() + + +# %% Voorbereiden profielen uit HyDAMO +ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel_fix_areas", "model.toml") +model = Model.read(ribasim_toml) +model.tabulated_rating_curve.static.df = None +model.manning_resistance.static.df = None +model.outlet.static.df = None + + +profile_gpkg = cloud.joinpath("DeDommel", "verwerkt", "profile.gpkg") +hydamo_gpkg = cloud.joinpath("DeDommel", "verwerkt", "4_ribasim", "hydamo.gpkg") +stuw_df = gpd.read_file(hydamo_gpkg, layer="stuw", engine="pyogrio") +stuw_df.loc[stuw_df.CODE.isna(), ["CODE"]] = stuw_df[stuw_df.CODE.isna()].NAAM +stuw_df.loc[stuw_df.CODE.isna(), ["CODE"]] = stuw_df[stuw_df.CODE.isna()].WS_DOMMELID +stuw_df.set_index("CODE", inplace=True) + +kdu_df = gpd.read_file(hydamo_gpkg, layer="duikersifonhevel", engine="pyogrio").set_index("CODE") + +kgm_df = gpd.read_file(hydamo_gpkg, layer="gemaal", engine="pyogrio").set_index("CODE") + +basin_area_df = gpd.read_file(cloud.joinpath("DeDommel", "verwerkt", "basin_area.gpkg"), engine="pyogrio").set_index( + "node_id" +) + +if not profile_gpkg.exists(): + profielpunt_gdf = gpd.read_file( + hydamo_gpkg, + layer="profielpunt", + engine="pyogrio", + fid_as_index=True, + ) + + profiellijn_gdf = gpd.read_file( + hydamo_gpkg, + layer="profiellijn", + engine="pyogrio", + fid_as_index=True, + ).set_index("GLOBALID") + + hydroobject_gdf = gpd.read_file( + hydamo_gpkg, + layer="hydroobject", + engine="pyogrio", + fid_as_index=True, + ) + + area_df = gpd.read_file( + cloud.joinpath("DeDommel", "verwerkt", "watervlakken", "LWW_2023_A_water_vlak_V.shp"), + engine="pyogrio", + fid_as_index=True, + ) + + data = [] + for profiel_id, df in profielpunt_gdf.groupby(PROFIEL_ID_COLUMN): + if not df.empty: + if profiel_id in profiellijn_gdf.index: + lowest_point = df.at[df[PROFIEL_HOOGTE_COLUMN].idxmin(), "geometry"] + containing_area_df = area_df[area_df.contains(lowest_point)] + if (not containing_area_df.empty) | (len(containing_area_df) > 1): + area_poly = containing_area_df.iloc[0].geometry + print(profiel_id) + profiel_geom = profiellijn_gdf.at[profiel_id, "geometry"] + breedte = profiellijn_gdf.at[profiel_id, PROFIEL_BREEDTE_COLUMN] + profiel_geom = profiel_geom.intersection(area_poly) + if isinstance(profiel_geom, MultiLineString): + geoms = [i for i in profiel_geom.geoms if hydroobject_gdf.intersects(i).any()] + else: + geoms = [profiel_geom] + + bodemhoogte = df[PROFIEL_HOOGTE_COLUMN].min() + insteekhoogte = df[PROFIEL_HOOGTE_COLUMN].max() + waterlijnhoogte = df[df.within(area_poly)][PROFIEL_HOOGTE_COLUMN].max() + + data += [ + { + "profiel_id": profiel_id, + "bodemhoogte": bodemhoogte, + "insteekhoogte": insteekhoogte, + "waterlijnhoogte": waterlijnhoogte, + "breedte": breedte, + "geometry": geom, + } + for geom in geoms + ] + + profile_df = gpd.GeoDataFrame(data, crs=profielpunt_gdf.crs) + profile_df = profile_df[~profile_df.is_empty] + profile_df.drop_duplicates("profiel_id", inplace=True) + profile_df.to_file(profile_gpkg, engine="pyogrio") +else: + profile_df = gpd.read_file(profile_gpkg, engine="pyogrio", fid_as_index=True) + profile_df.drop_duplicates("profiel_id", inplace=True) + + +# %% Basin / Profile +# of all profiles within basin/area we take the one with the lowest level +def get_area_and_profile(node_id): + area_geometry = None + + # try to get a sensible area_geometry from basin-area + if node_id in model.basin.area.df.node_id.to_list(): + area_geometry = model.basin.area[node_id].set_index("node_id").at[node_id, "geometry"] + if area_geometry.area > 1000: + selected_profiles_df = profile_df[profile_df.intersects(area_geometry)] + else: + area_geometry = None + + # if we didn't get an area (of sufficient size) we get it from profiles and edges + if area_geometry is None: + edges_select_df = model.edge.df[(model.edge.df.from_node_id == node_id) | (model.edge.df.to_node_id == node_id)] + selected_profiles_df = profile_df[profile_df.intersects(edges_select_df.union_all())] + if selected_profiles_df.empty: + width = 2 + else: + width = selected_profiles_df.length.mean() + area_geometry = edges_select_df.buffer(width / 2).union_all() + + # select profile + if not selected_profiles_df.empty: # we select the profile with the lowest level + profile = selected_profiles_df.loc[selected_profiles_df["bodemhoogte"].idxmin()] + + else: # we select closest profile + print(f"basin without intersecting profile {row.Index}") + profile = profile_df.loc[profile_df.distance(row.geometry).idxmin()] + + return area_geometry, profile + + +# %% update basin / profile +basin_profile_df = model.basin.profile.df[model.basin.profile.df.node_id == -999] +for row in model.basin.node.df.itertuples(): + area_geometry, profile = get_area_and_profile(row.Index) + + level = [profile.bodemhoogte, profile.insteekhoogte] + area = [1, round(max(area_geometry.area, 999))] + + # remove profile from basin + model.basin.profile.df = model.basin.profile.df[model.basin.profile.df.node_id != row.Index] + + # add profile to basin + basin_profile_df = pd.concat( + [ + basin_profile_df, + pd.DataFrame({"node_id": [row.Index] * len(level), "level": level, "area": area}), + ] + ) + + model.basin.node.df.loc[row.Index, ["meta_profile_id"]] = profile[PROFIEL_LINE_ID_COLUMN] + + +basin_profile_df.reset_index(inplace=True, drop=True) +basin_profile_df.index.name = "fid" +model.basin.profile.df = basin_profile_df + +# set basin state +state_df = model.basin.profile.df.groupby("node_id").max()["level"].reset_index() +state_df.index.name = "fid" +model.basin.state.df = state_df + +# %% +# Stuwen als tabulated_rating_cuves +for row in model.node_table().df[model.node_table().df.meta_object_type == "stuw"].itertuples(): + node_id = row.Index + + # get weir + if row.name in stuw_df.index: + kst = stuw_df.loc[row.name] + elif stuw_df.distance(row.geometry).min() < 1: + kst = stuw_df.loc[stuw_df.distance(row.geometry).idxmin()] + else: + raise ValueError(f"Geen stuw gevonden voor node_id {node_id}") + + if isinstance(kst, gpd.GeoDataFrame): + kst = kst.iloc[0] + name = kst[STUW_NAME_COLUMN] + code = kst[STUW_CODE_COLUMN] + if pd.isna(name): + name = code + + # get upstream, if at flowboundary downstream profile + basin_node_id = model.upstream_node_id(node_id) + if not model.node_table().df.at[basin_node_id, "node_type"] == "Basin": + basin_node_id = model.downstream_node_id(node_id) + + profile = profile_df.set_index("profiel_id").loc[model.node_table().df.at[basin_node_id, "meta_profile_id"]] + + # get level + + # from target-level + crest_level = kst[STUW_TARGET_LEVEL_COLUMN] + + # if NA crest-level + if pd.isna(crest_level): + crest_level = kst[STUW_CREST_LEVEL_COLUMN] + + # if NA upstream min basin-level + 10cm + if pd.isna(crest_level): + crest_level = profile.waterlijnhoogte + if pd.isna(crest_level): + crest_level = profile[["bodemhoogte", "waterlijnhoogte"]].mean() + + # if crest_level < upstream bottom-level we lower it to upstream bottom-level + if crest_level < profile.bodemhoogte: + print(f"crest-level lower than upstream basin {node_id}") + crest_level = profile.bodemhoogte + 0.1 + + # get width + crest_width = kst[STUW_WIDTH_COLUMN] + + # if NA or implausible we overwrite with 0.5 of profile width + if pd.isna(crest_width) | (crest_width > profile.geometry.length): # cannot be > profile width + crest_width = 0.5 * profile.geometry.length # assumption is 1/2 profile_width + + # get data + data = [get_tabulated_rating_curve(crest_level=crest_level, width=crest_width)] + + model.update_node( + node_id, + node_type="TabulatedRatingCurve", + data=data, + node_properties={"name": name, "meta_code_waterbeheerder": code}, + ) + +# %% Duikers als tabulated_rating_cuves +for row in model.node_table().df[model.node_table().df.meta_object_type == "duikersifonhevel"].itertuples(): + node_id = row.Index + + # get culvert + if row.name in kdu_df.index: + kdu = kdu_df.loc[row.name] + elif kdu_df.distance(row.geometry).min() < 1: + kdu = kdu_df.loc[kdu_df.distance(row.geometry).idxmin()] + else: + raise ValueError(f"Geen stuw gevonden voor node_id {node_id}") + + # get upstream, if at flowboundary downstream profile + basin_node_id = model.upstream_node_id(node_id) + if not model.node_table().df.at[basin_node_id, "node_type"] == "Basin": + basin_node_id = model.downstream_node_id(node_id) + + profile = profile_df.set_index("profiel_id").loc[model.node_table().df.at[basin_node_id, "meta_profile_id"]] + + # get level + + # from invert-levels + crest_level = kdu[[KDU_INVERT_LEVEL_US_COLUMN, KDU_INVERT_LEVEL_DS_COLUMN]].dropna().max() + + # if NA upstream min basin-level + 10cm + if pd.isna(crest_level): + crest_level = profile.waterlijnhoogte + if pd.isna(crest_level): + crest_level = profile[["bodemhoogte", "waterlijnhoogte"]].mean() + + # if crest_level < upstream bottom-level we lower it to upstream bottom-level + if crest_level < profile.bodemhoogte: + print(f"crest-level lower than upstream basin {node_id}") + crest_level = profile.bodemhoogte + 0.1 + + # get width + width = kdu[KDU_WIDTH_COLUMN] + + # if NA or implausible we overwrite with 0.5 of profile width + if pd.isna(width) | (width > profile.geometry.length): # cannot be > profile width + width = profile.geometry.length / 3 # assumption is 1/3 profile_width + + # get height + height = kdu[KDU_HEIGHT_COLUMN] + + if pd.isna(height): + height = width + + # get shape + shape = kdu[KDU_SHAPE_COLUMN] + + if pd.isna(shape): + shape = "rectangle" + else: + shape = KDU_SHAPE_MAP[shape] + + # update model + data = get_tabulated_rating_curve( + crest_level=crest_level, + width=width, + height=height, + shape=shape, + levels=[0, 0.1, 0.5], + ) + + model.update_node( + node_id, + node_type="TabulatedRatingCurve", + data=[data], + node_properties={"meta_code_waterbeheerder": row.name}, + ) + +# %% gemalen als pump +for row in model.node_table().df[model.node_table().df.meta_object_type == "gemaal"].itertuples(): + node_id = row.Index + + # get upstream profile + basin_node_id = model.upstream_node_id(node_id) + profile = profile_df.set_index("profiel_id").loc[model.node_table().df.at[basin_node_id, "meta_profile_id"]] + min_upstream_level = profile.waterlijnhoogte + + kgm = kgm_df.loc[row.name] + + # set name and code column + name = kgm[KGM_NAME_COLUMN] + code = kgm[KGM_CODE_COLUMN] + + if pd.isna(name): + name = code + + # get flow_rate + flow_rate = kgm[KGM_CAPACITY_COLUMN] + + if pd.isna(flow_rate): + flow_rate = round( + basin_area_df.at[model.upstream_node_id(node_id), "geometry"].area * 0.015 / 86400, 2 + ) # 15mm/day of upstream areay + + data = pump.Static(flow_rate=[flow_rate], min_upstream_level=min_upstream_level) + + model.update_node( + node_id, + node_type="Pump", + data=[data], + node_properties={"name": name, "meta_code_waterbeheerder": code}, + ) + +# %% update open water +for row in model.node_table().df[model.node_table().df.node_type == "ManningResistance"].itertuples(): + node_id = row.Index + + # get depth + basin_node_id = model.upstream_node_id(node_id) + profile = profile_df.set_index("profiel_id").loc[model.node_table().df.at[basin_node_id, "meta_profile_id"]] + depth = profile.insteekhoogte - profile.bodemhoogte + + # compute profile_width from slope + profile_slope = 0.5 + profile_width = profile.geometry.length - ((depth / profile_slope) * 2) + + # if width < 1/3 * profile.geometry.length (width at invert), we compute profile_slope from profile_width + if profile_width < profile.geometry.length / 3: + profile_width = profile.geometry.length / 3 + profile_slope = depth / profile_width + + # get length + length = round( + model.edge.df[(model.edge.df.from_node_id == node_id) | (model.edge.df.to_node_id == node_id)].length.sum() + ) + + # # update node + data = manning_resistance.Static( + length=[round(length)], + profile_width=[round(profile_width, 2)], + profile_slope=[round(profile_slope, 2)], + manning_n=[0.04], + ) + model.update_node(node_id, node_type="ManningResistance", data=[data]) + + +# %% update outlets +for row in model.node_table().df[model.node_table().df.node_type == "Outlet"].itertuples(): + node_id = row.Index + + # get upstream, if at flowboundary downstream profile + basin_node_id = model.upstream_node_id(node_id) + if not model.node_table().df.at[basin_node_id, "node_type"] == "Basin": + basin_node_id = model.downstream_node_id(node_id) + + profile = profile_df.set_index("profiel_id").loc[model.node_table().df.at[basin_node_id, "meta_profile_id"]] + + height = round(profile.insteekhoogte - profile.bodemhoogte, 2) + width = round(profile.geometry.length, 2) + + try: + crest_level = round(model.upstream_profile(node_id).level.min() + 0.1, 2) + except ValueError: + crest_level = round(model.downstream_profile(node_id).level.min() + 0.1, 2) + + data = get_outlet(crest_level=crest_level, width=width, height=height, max_velocity=0.5) + + model.update_node(node_id, node_type="Outlet", data=[data]) + +# %% clean boundaries +model.flow_boundary.static.df = model.flow_boundary.static.df[ + model.flow_boundary.static.df.node_id.isin( + model.node_table().df[model.node_table().df.node_type == "FlowBoundary"].index + ) +] +model.flow_boundary.static.df.loc[:, "flow_rate"] = 0 +# %% write model +ribasim_toml = cloud.joinpath("DeDommel", "modellen", "DeDommel_parameterized", "model.toml") +model.write(ribasim_toml) + +# %% diff --git a/notebooks/de_dommel/rare_edges.gpkg b/notebooks/de_dommel/rare_edges.gpkg new file mode 100644 index 0000000000000000000000000000000000000000..141490d65b79acdb953d6866ff644372633e3683 GIT binary patch literal 106496 zcmeI53w#^Zndmio`aOBeiSsyCFp?TM@gt7ykPs&J#3QifM3xgw8iZnv?IC(N(iq#} zu{chkk1h8WE+x=i3VUG}?iSkZ50a2e0^#!9?UR;Ww%shHyDhXpFNM7!5GeP|XOp>?7%%@0{|!^mq)FI0vXZDcofhglR1^|0Xx>JFLnc@dUCEYrrJ zaQ8Nxp(7YVJ!~i27C~Vq*u(mmjosl$0C|;qp>zs~v7HGXc};cjdeJpPIuR53Eb33E zvSKDCq{OvIZKd2i)E`Tw(jwZ)qXeJLB5^2|LiJuh+QzZCroIrnZDpSPI#ln!2(h8| zy_eFisx25}^;8)5q|?c0g1?4OL`R0>xEe-Ce-fgp1Px*D=5~pHdI05Q&Jn0m$Fl*d zt3n~1b@@1T((6#23UghB0@E=Am}lvcIlZb+YcM}$f6P^^pf}C{-(*{v%UxSb-P9`g z1*4xC-WiO$e4kjoq@Q@OUpy6Fn zQB0!E;b0dN?n9TceR>7~T^I8QblC=Qwq!D@qa}@Q!H$rz1V0M1oA7WFYGZp8+;}(= z`IMN|q4xVXlsnx$wUk9j#rd)9?gXAmqp=Y&Ex(T%%@nOS##Br$EpfSderMS{*-9eJjt=^zq7X|&GyFg_JDx>8RnXs2>U3{!nz4m3d9#%zdz9_7 zxLoe$X6oi;xehYTY(zj?!5gcUDQ6)XJ>h`}EZg;Q3x+AXg->&yL{S{dzCy4F$LK;)y=j zi}U1hAD84)`G(gC&!}q~y{SKX?!FW(cDi>oPwPvXQB;qJHWc)Ul3>x97p06oMh(Ri z3mh)@s#Vm)_MBs;2Q%1O%77^m)7`sjT3mX)YQNJ*xh5A|U2dBbJVP60^+~wnq%(0mRTZ4R zDYbA=5Y6=Z9UsP1q3{S4z{}^ zKhYDw`?cOKy}T#Tvl$;ubO+nFdp6?p3O<4ATUOV50&5zZn;To3SJpQ*1sYmg@pD5< zeM3ut>5XvR;b5e1yGN>nC*WzWt6S~av7RVe^H#Qc?+FIYxP`D1OwK{ZPiqCw4 z-J$KC?wtu?_Xr>D5%~;{cjU3*A)()c>&b+fE;ho3w|np*k06f3`Dl6|n#QMELdp|p zYQPlM)UT;)k{@gGGW6qpbOv9d!=@7ALQE20+1S#&rm;nRwC0It`(yZcY#`PzrZZ9- zTk)f=IbUWVmdNr^9&fjW)SxGTEm2ojmsg4+k;0BKB#W$G(gH!$?YGLhwCECd}`UQoR+p&2A{=Y z+C<66TH)FYloOgp$>h9y2Z~qV?zT^uShB672equ0su$-)pf00e*l5C8%|00;m9An<7rh*720ORCG) zyYR!BiQ)TY`1l~7Rd1fj=3*M^8dlfVwbs@(Md};Z)-|kcY+Z??u3#)DS}wfvX_{Hk zQ@QZHy6-afJf00cfa1j=pI#kng7u>b$u7#11=1b_e#00KY&2mk>f00e*l5C8%|Kt~{d|F7Kt z|F(sGTPFf9fdCKy0zd!=00AHX1b_e#00KY&2mpcenm`3*tEO_t|9&|CKd;RRl?MVq z00;m9AOHk_01yBIKmZ5;0U$61fkNm1CoJ@dDa0Wf2mk>f00e*l5C8%|00;m9AOHk_ z01)_W6R4(K)$^;ZMMb#-f9dx>JA)w>|B`Yrgn!{gIsbplLcjIdZVJ>62mk>f00e*l z5C8%|00;m9AOHk_01%iVfjRc-`MU4^ldc88{r@v$2Vy_~2mk>f00e*l5C8%|00;m9 zAOHl;D+1>G|1Go5s|G-2fdCKy0zd!=00AHX1b_e#00KY&2mpaIBp{vt!~Xvap@BFM z00KY&2mk>f00e*l5C8%|00;nq^NfJ>`+upP-|I4I{5Hy-*omw2|G^9cEGQ=5{G_ z_a>w&lr^F5khvT$!Z{Gjv~eihy$u)85e%Uowv%m(pfD5cVSUWT?ri9@jzs`vWQHjc$L z_l4MPEAuM74%PcFLTspgZ~H6->+Bg}6;fF~BO0-t8CInX4Qnve!-ga0h*2r?Q^a^F z?W)>>y|a2MjC<1QWHiBF!za$xH%%pI2vyHa51^dPIRZUXX%kSz0uE>Kjtci&Jy2-a;Lkema+(`I6s!%oe)Gm8XFPQ^82Wm*PsNwWcJYZ%w{M^=%Gh zGufyRHybA@q@rUcxKix6IW{&{NEXMMW%E%EiplIJ#?oLWxoq4#X_C(wNNKOxIN9Nt zD8v%c3_lRfj%Sfi74&zMI^ElwX53FPd2?Tt>`}JQ;&Qp0o2i?Z<@&4i{mZeyN#;`lIMNMI`8*n=X$P@mL^v|g7qN9in$`%Z=dN{dzCy4F$LK;`LIl6Xwa|E-J~V@(r&Oo>9*~!yFp4OK%qo^JcZ7ApyCBdRIFG`t1j2en37C2n)Rja6p?K#Ix4`#5nl<`Wg zfIh-$c1V4)&MwmoJOg}89Lexl%Sb9WIkdBznBGoh=J7L5r@MF6w7B$o)qbasa!oF_ zy4+qbwSP3%w!C(cH#T#dNRr;srfCq~$;6^kSIU)8BEhcQ42OuI4{tICL?OxhyfuBbHObnVc%+6~ zThq0+X5b2Mz?({sdj0-@ek2bd{RkdN>IQC{q#Lje^MbiEWTfsO%^YVlx@v=KtKI4D z@=j}EVgQS#h7>CZWbVIGiu-wqbPxMR0 zv)%3$D8snn!k)aGF4p#b!37H2 zsu{68%iW(|_h)SXr>`11N0xo_XXK~Ke&^i$G_Q;){Z!pqeOi~y6t-$5&Wx>^N$02Y zX5^#AK<9o0qq z$<$SNt)X05tW-TD9ib)N7{CIq`BXdE7K!g`46G)&i0j_ z0U_=|>ydN@puA>6?aCCaX2nyobUhA9@zV6%6Nv`1Y;sr}mnUjP%I{Z?G03jxNsR;HJ63mNKuNrSd zt}3>=XkU-&=>;pK`eu&Q%;+oKg1{#T3L*EU_|cU)u=178^PKEN=~9=QVW^4fX;$LW z%D6CbDW#HNmah>P_LZn4IPDr-+6en6=p`<9TN^b|ldBCJ4#MH!)MuewG z!4Qj2C4!-j0P+N-;~)>&+u{i6DuPGe!{avt8GIF0>VSCDuNM;2g<_@J%U7hd{MnJ6 zDP9~+XLiX2NY|bG4!p6a=+n~neja_gVr%&uhmMoh)`3KNbX|zKSh&Ef>MO6j_|EJxig|U(>7S!X>s*qXL!5#Di7cH_f6Gwt&r;wFtedPI*wd*RG*oXhT zXg_||T15m&f-!^rXO7d)p(?v+qCp zXi(QGB1i;C1|n$xf&CR~Qd70mtm^$|lB?fEcIKvMFFRD#WM_{4+q@k=9(6NEntpJ) z^~*X{MgfwM2-=@@lu)xZRZGmOd`H_RznHKyqsP9r=$W*gdH$PE-(%n4X8!mg6?nEo zr^+ZmG7>?%=vZcXPE)nmtcuEf<(9-+8}nk|hYt;}w=sJUeC^4le_X&EdueN;o6@N= z3XqIM&>nYAP&aC-s?4h1UG`%7!UC#}hQD!d?{un2Mj~k6iB;X8saj-Kb@Pt~UMeo2 z>h!G-TBmIl$w&n4F|6u(P1QoPs*ykY=q`5wRn3bwEelWADw2^1+OKj>SPpBdD$S~{ z-_UgBsn;#cA9frGKJZ%$v$5#gi@$cin>qgX_dfH&gF0Io1xQ9BXur}v*K$x(afAef_PE{{eU9fBxZn9Um@WEPd|LKey^yWfUM8iJAh^nFa zswyfsUzOeV;NIs8sA@m{zQt6%nTRc2NH?aROM{$M&) zBqI^Dy=`CKM>Gku&5e3-Ys$CrE*nFbTd=sWPU1)9Y)RYZ^okPO6fwi@exS3*-on^k?Z^Y>TGd6#0y zv}av$^VU086qpx@AQ2!L$UJHN$hy{XT2obFRz((ABqv#DkrHyPQkQN-fMg(o)(@?% zI#uOnRUeFtd*9n@W4`{@bx#cMw=wNsO}tkAr3K7C+~|AzR{X4~G76B4M9}J?*16JJ ztIEu(c4J#D?Xok*CCPp~I=y^_&ex3sBqI^D&bEHjF;`PnYF0(o2mj$e_WL*PvuO*8 z*M6{mmXvIY(|~R%BKs%|Z5N8)IB2Jof$#zkANE zYn4%eWYh%bQa9RjYZ%wms=n+`{r3o-iXE$GuaRbXtpu_SF$$Pe&7uBGr^-3CD&vyG zq{=8@QZ<{pL8rAn~f>Gz<0wj{Ji0+UvGBcg~jvh$18W@XUzkNAQ2!L zh@jSspj70D{>qGW8HBFRR z)4N|{uIgGuF=R@;?vB*0$MA$mrdF*{M1Ulaj3mR=mP0Nt)`Yjrb8DpU|D)))E%*-} zKmZ5;0U!VbfB+Bx0zd!=00AHX1c1Qjgg^yltEO`IH29_ce+@pgr@ui5DvrbXKb-&P zZV-U;f8`Doa;#6T!jt2Day4GN@_+A7!|Ces} zBggp0I|<0KzBbqz1xQ9B2hI!@7BGyu(D?Xv;V-p@88*{`v4CSAPFQR$>6%r+Hb#` z+$b;j%&LAlt7Z7(H>}Lxe*f6~qif00e*l5C8%| z00;nqPnf_}l(Tw%?e6N_4g1m;$;v-VpnQ*~{CiPDqBy*E)vD3a(UmxPIM%-_Hps8U zKNqknJIwdjH`J|K&X3_AAIjq26o|_CMl<*ar)txw#CQp=F-8B21^>YV2mk>f00e*l z5C8%|00;m9AOHk_01!A22`qQuiwDKiUptV_|NV5Mg+5K6r2jy_Nxw$_iatTVO#hVr zA^igVH2nm9gnpF%F8#OkU(@%}e?i|t-%5Xt{t`V&-$0MkBAuqMrg{1*dON+9-a>cM z9rQ+eJ>7zfga;4+0zd!=00AHX1b_e#00KY&2mpcenSjl2vlQEWQs9+>3#Fh&3NDa> z3KmL1r4+cOV1X3Omx6gxFjorZNWp9=m?Z_Y z6jVq-xfGO1L8%m!NI|g_6iIz>&Ud(_P8)Z4 z-cIiDJxkSi8@Ko`Z^8fn!5u&4=Z>GAt;X9q*NP3CtK)xhM?)d*XzEop-oY(A?BN#v zxSKm%GR7UgkWu5EO8ifnaQs8Mc$X6Y;$Iu&iG`XLvcHPB29{eHqc$8A(OSvVM+qos)7 z*9dob(eJp!n|7)3bFr=dPcB}w*F0SI-Q4mYe}v0^ggf}mQB{6EwqZNB^eT!w^s9@x z!}DEg`~nKeX)p$3qONLt#d=%Fu!98W$sK!@f+djp)y4tv-fkABJ&1(EYY}?1V zg>T%<9eNx4P|4TT_(eFrOBcUKjj!Ss{-jYC|F9arm|OVMxGw&BHGT=U?Z0!&Z}4-E zovOh$Ca1DXv271<)ernD_dkzKU>pBfm0!j!KJ@q4#&>X!y*U@#n3RvOO-^vLT~Bex zUrp)kQjKjsjD6??_MJJ{_G{I0Je=pxpXNM&bs=|jD8n7yRi(x+=bXPi&N)x7;*PDX z;f{5_uf|`%l|Fe3SNejBd$zuZdv@(=HNJ+U%MN2-Im|uz9)o>_^rH*8#ndmbZyn_x zX?sQIS6-!my*Nklhf^!ma(tXS`T^(Wf6IOEg{9p0zW*&X-p{#x*K_WxOS$hIcml^0 z4r=n}|4G{aTV{R!+X*cM0zd!=00AHX1b_e#00KY&2mk>f@c%ObY5z}Khb;8N6^~Sm zl)qlSwe0z_l_ftb?k#F|-t36mZ?cWzWOx99b0V;}%LVk+I z`2;WW(E&atj%4^~e|jV(MsuH8$-Qa|vrL3Vk#Ml1gAF6k3=8leW>W;qx3irr&fXnD zo?Mw8w2|!yhEVV3cByRni#5`N8C;7L{=N(`i+b1y%Bvb$Z+Kse>d`ii4YMeb9_2GW zF}5?oM^mvR@5i!AoHwV@>%R!Gq4vFH7Z%1Lq_UXJIpFvc1rQv;P!Ai9eDXL}oE46V z?KQ3{wz_EVcGarNx0~d!MWydOQGF#BYjla}a*>8l;Ro1+A(T$US9&B(9u(@1pipmT zr|KYTu1(?YF6=*IhUaxYDp$^N9PgnvJwib!DTU%k9&ug{s%15W7vOBVt1B$S1#5+8*qYTizW;J&|ZI%O;1#ab3FK ze>PBXFY{^o13B$Vf0(RYE+g81{R|K_o*kmbPY@Ay51EmKPI8q`5LBM`kZu}AVRmyT z)5fA;DAGNp3vOjPd)XevCwF#-6^G`VUf4XnynM@kpud7zPxoQS<>d%f&{r?L(XXe(QS?An_x`NU@)_Za?8(=5n(vHBm1wa|%v`(cxGI zubrk#TSz8uW@9Hi#bBluviJ zVQfqah19tGdfdOh%IV&+g0jfpUYFgSz+Xlejg5$D`F&Jgg==3}myOmNA{I=HE^@h7 zte_?px`t%K+xdAE4T(s%Tt^Z1@<^aRoft``g-)sAeX pd.DataFrame: def node_properties_to_table(table, node_properties, node_id): - if isinstance(node_id, int): - node_id = [node_id] # update DataFrame table_node_df = getattr(table, "node").df for column, value in node_properties.items(): - table_node_df.loc[table_node_df.node_id.isin(node_id), [column]] = value + table_node_df.loc[node_id, [column]] = value class BasinResults(BaseModel): @@ -50,7 +48,7 @@ def basin_results(self): @property def next_node_id(self): - return self.node_table().df.node_id.max() + 1 + return self.node_table().df.index.max() + 1 def find_node_id(self, ds_node_id=None, us_node_id=None, **kwargs) -> int: """Find a node_id by it's properties""" @@ -87,15 +85,43 @@ def find_node_id(self, ds_node_id=None, us_node_id=None, **kwargs) -> int: @property def unassigned_basin_area(self): """Get unassigned basin area""" - return self.basin.area.df[~self.basin.area.df.node_id.isin(self.basin.node.df.node_id)] + return self.basin.area.df[~self.basin.area.df.node_id.isin(self.basin.node.df.index)] @property def basin_node_without_area(self): """Get basin node without area""" - return self.basin.node.df[~self.basin.node.df.node_id.isin(self.basin.area.df.node_id)] + return self.basin.node.df[~self.basin.node.df.index.isin(self.basin.area.df.node_id)] + + def upstream_node_id(self, node_id: int): + """Get upstream node_id(s)""" + return self.edge.df.set_index("to_node_id").loc[node_id].from_node_id + + def upstream_profile(self, node_id: int): + """Get upstream basin-profile""" + upstream_node_id = self.upstream_node_id(node_id) + + node_type = self.node_table().df.loc[upstream_node_id].node_type + if node_type != "Basin": + raise ValueError(f"Upstream node_type is not a Basin, but {node_type}") + else: + return self.basin.profile[upstream_node_id] + + def downstream_node_id(self, node_id: int): + """Get downstream node_id(s)""" + return self.edge.df.set_index("from_node_id").loc[node_id].to_node_id + + def downstream_profile(self, node_id: int): + """Get upstream basin-profile""" + downstream_node_id = self.downstream_node_id(node_id) + + node_type = self.node_table().df.loc[downstream_node_id].node_type + if node_type != "Basin": + raise ValueError(f"Upstream node_type is not a Basin, but {node_type}") + else: + return self.basin.profile[downstream_node_id] def get_node_type(self, node_id: int): - return self.node_table().df.set_index("node_id").at[node_id, "node_type"] + return self.node_table().df.at[node_id, "node_type"] def get_node(self, node_id: int): """Return model-node by node_id""" @@ -113,7 +139,10 @@ def remove_node(self, node_id: int, remove_edges: bool = False): for attr in table.model_fields.keys(): df = getattr(table, attr).df if df is not None: - getattr(table, attr).df = df[df.node_id != node_id] + if node_id in df.columns: + getattr(table, attr).df = df[df.node_id != node_id] + else: + getattr(table, attr).df = df[df.index != node_id] if remove_edges and (self.edge.df is not None): for row in self.edge.df[self.edge.df.from_node_id == node_id].itertuples(): @@ -126,22 +155,28 @@ def remove_node(self, node_id: int, remove_edges: bool = False): ) def update_node(self, node_id, node_type, data, node_properties: dict = {}): - """Update a node type and/or data""" - # get existing network node_type - existing_node_type = self.node_table().df.set_index("node_id").at[node_id, "node_type"] + existing_node_type = self.node_table().df.at[node_id, "node_type"] # read existing table table = getattr(self, pascal_to_snake_case(existing_node_type)) # save node, so we can add it later - node_dict = table.node.df[table.node.df["node_id"] == node_id].iloc[0].to_dict() + node_dict = table.node.df.loc[node_id].to_dict() node_dict.pop("node_type") + node_dict["node_id"] = node_id # remove node from all tables for attr in table.model_fields.keys(): df = getattr(table, attr).df if df is not None: - getattr(table, attr).df = df[df.node_id != node_id] + if "node_id" in df.columns: + getattr(table, attr).df = df[df.node_id != node_id] + else: + getattr(table, attr).df = df[df.index != node_id] + + # remove from used node-ids so we can add it again in the same table + if node_id in table._parent._used_node_ids: + table._parent._used_node_ids.node_ids.remove(node_id) # add to table table = getattr(self, pascal_to_snake_case(node_type)) @@ -155,10 +190,6 @@ def update_node(self, node_id, node_type, data, node_properties: dict = {}): node_properties = {**node_properties, **node_dict} node_properties_to_table(table, node_properties, node_id) - # change type in edge table - self.edge.df.loc[self.edge.df["from_node_id"] == node_id, ["from_node_type"]] = node_type - self.edge.df.loc[self.edge.df["to_node_id"] == node_id, ["to_node_type"]] = node_type - def add_control_node( self, to_node_id: int | list, @@ -226,10 +257,6 @@ def reverse_edge(self, from_node_id: int, to_node_id: int): self.edge.df.loc[edge_id, ["from_node_id"]] = edge_data["to_node_id"] self.edge.df.loc[edge_id, ["to_node_id"]] = edge_data["from_node_id"] - # revert node types - self.edge.df.loc[edge_id, ["from_node_type"]] = edge_data["to_node_type"] - self.edge.df.loc[edge_id, ["to_node_type"]] = edge_data["from_node_type"] - # revert geometry self.edge.df.loc[edge_id, ["geometry"]] = edge_data["geometry"].reverse() @@ -282,7 +309,7 @@ def fix_unassigned_basin_area(self, method: str = "within", distance: float = 10 """ if self.basin.node.df is not None: if self.basin.area.df is not None: - basin_area_df = self.basin.area.df[~self.basin.area.df.node_id.isin(self.basin.node.df.node_id)] + basin_area_df = self.basin.area.df[~self.basin.area.df.node_id.isin(self.basin.node.df.index)] for row in basin_area_df.itertuples(): if method == "within": @@ -299,18 +326,18 @@ def fix_unassigned_basin_area(self, method: str = "within", distance: float = 10 ValueError(f"Supported methods are 'within' or 'closest', got '{method}'.") # check if basin_nodes within area are not yet assigned an area - basin_df = basin_df[~basin_df.node_id.isin(self.basin.area.df.node_id)] + basin_df = basin_df[~basin_df.index.isin(self.basin.area.df.node_id)] # if we have one node left we are done if len(basin_df) == 1: - self.basin.area.df.loc[row.Index, ["node_id"]] = basin_df.iloc[0].node_id + self.basin.area.df.loc[row.Index, ["node_id"]] = basin_df.index[0] else: raise ValueError("Assign Basin Area to your model first") else: raise ValueError("Assign a Basin Node to your model first") def reset_edge_geometry(self, edge_ids: list | None = None): - node_df = self.node_table().df.set_index("node_id") + node_df = self.node_table().df if edge_ids is not None: df = self.edge.df[self.edge.df.index.isin(edge_ids)] else: @@ -319,3 +346,13 @@ def reset_edge_geometry(self, edge_ids: list | None = None): for row in df.itertuples(): geometry = LineString([node_df.at[row.from_node_id, "geometry"], node_df.at[row.to_node_id, "geometry"]]) self.edge.df.loc[row.Index, ["geometry"]] = geometry + + @property + def edge_from_node_type(self): + node_df = self.node_table().df + return self.edge.df.from_node_id.apply(lambda x: node_df.at[x, "node_type"] if x in node_df.index else None) + + @property + def edge_to_node_type(self): + node_df = self.node_table().df + return self.edge.df.to_node_id.apply(lambda x: node_df.at[x, "node_type"] if x in node_df.index else None) diff --git a/src/ribasim_nl/ribasim_nl/network_validator.py b/src/ribasim_nl/ribasim_nl/network_validator.py index ba95378..72472c5 100644 --- a/src/ribasim_nl/ribasim_nl/network_validator.py +++ b/src/ribasim_nl/ribasim_nl/network_validator.py @@ -26,7 +26,7 @@ def check_node_connectivity(row, node_df, tolerance=1.0) -> bool: def check_internal_basin(row, edge_df) -> bool: if row.node_type == "Basin": - return row.node_id not in edge_df.from_node_id.to_numpy() + return row.name not in edge_df.from_node_id.to_numpy() else: return False @@ -81,7 +81,7 @@ def edge_missing_nodes(self): def edge_incorrect_from_node(self): """Check if the `from_node_type` in edge-table in matches the `node_type` of the corresponding node in the node-table""" - node_df = self.node_df.set_index("node_id") + node_df = self.node_df mask = ~self.edge_df.apply( lambda row: node_df.at[row["from_node_id"], "node_type"] == row["from_node_type"] if row["from_node_id"] in node_df.index @@ -92,7 +92,7 @@ def edge_incorrect_from_node(self): def edge_incorrect_to_node(self): """Check if the `to_node_type` in edge-table in matches the `node_type` of the corresponding node in the node-table""" - node_df = self.node_df.set_index("node_id") + node_df = self.node_df mask = ~self.edge_df.apply( lambda row: node_df.at[row["to_node_id"], "node_type"] == row["to_node_type"] if row["to_node_id"] in node_df.index @@ -103,7 +103,7 @@ def edge_incorrect_to_node(self): def edge_incorrect_connectivity(self): """Check if the geometries of the `from_node_id` and `to_node_id` are on the start and end vertices of the edge-geometry within tolerance (default=1m)""" - node_df = self.node_df.set_index("node_id") + node_df = self.node_df mask = self.edge_df.apply( lambda row: check_node_connectivity(row=row, node_df=node_df, tolerance=self.tolerance), axis=1 ) @@ -112,5 +112,5 @@ def edge_incorrect_connectivity(self): def edge_incorrect_type_connectivity(self, from_node_type="ManningResistance", to_node_type="LevelBoundary"): """Check edges that contain wrong connectivity""" - mask = (self.edge_df.from_node_type == from_node_type) & (self.edge_df.to_node_type == to_node_type) + mask = (self.model.edge_from_node_type == from_node_type) & (self.model.edge_to_node_type == to_node_type) return self.edge_df[mask] diff --git a/src/ribasim_nl/ribasim_nl/structure_node.py b/src/ribasim_nl/ribasim_nl/structure_node.py new file mode 100644 index 0000000..0e628ef --- /dev/null +++ b/src/ribasim_nl/ribasim_nl/structure_node.py @@ -0,0 +1,84 @@ +from math import pi + +from ribasim.nodes import outlet, tabulated_rating_curve + + +def calculate_area(width: float, shape: str, height: float | None = None): + """Calculate flow-area of a cross-section""" + # shapes that only use width + if shape == "round": + return pi * (width / 2) ** 2 + + # shapes that need height + elif height is None: + raise ValueError(f"for shape {shape} height cannot be None") + elif shape in ["rectangle"]: + return width * height + elif shape in ["ellipse"]: + return pi * (width / 2) * (height / 2) + + # shapes not implemented + else: + raise ValueError(f"shape {shape} not implemented") + + +def calculate_velocity( + level: float, + crest_level, +): + """Calculate velocity over a weir-type structure.""" + if crest_level > level: + return 0 + else: + return ((2 / 3) * 9.81 * (level - crest_level)) ** (1 / 2) + + +def calculate_flow_rate( + level: float, + crest_level: float, + width: float, + height: float | None = None, + loss_coefficient: float = 0.63, + shape: str = "rectangle", +): + velocity = calculate_velocity(level=level, crest_level=crest_level) + area = width * ((2 / 3) * (level - crest_level)) + if height is not None: + area = min(area, calculate_area(width=width, shape=shape, height=height)) + + return round(loss_coefficient * area * velocity, 2) + + +def get_outlet( + crest_level: float, width: float, shape: str = "rectangle", height: float | None = None, max_velocity: float = 1 +) -> outlet.Static: + """Return an outlet curve from structure-data""" + area = calculate_area(width=width, shape=shape, height=height) + flow_rate = round(area * max_velocity, 2) + + return outlet.Static(flow_rate=[flow_rate], min_upstream_level=crest_level) + + +def get_tabulated_rating_curve( + crest_level, + width, + loss_coefficient: float = 0.63, + height: float | None = None, + shape: str = "rectangle", + levels: list[float] = [0, 0.05, 0.1, 0.25, 0.5], +) -> tabulated_rating_curve.Static: + """Return a tabulated-rating curve from structure-data""" + level = [round(crest_level, 2) + i for i in levels] + flow_rate = [ + calculate_flow_rate( + level=i, + crest_level=crest_level, + width=width, + height=height, + shape=shape, + loss_coefficient=loss_coefficient, + ) + for i in level + ] + + return tabulated_rating_curve.Static(level=level, flow_rate=flow_rate)