From 2ff8c42ca0ac11913e4c971a67a59e07cc036ae7 Mon Sep 17 00:00:00 2001 From: dbhart Date: Thu, 31 Aug 2023 12:15:38 -0400 Subject: [PATCH 01/21] Adding results files to fresh branch --- wntr/sim/results.py | 272 ++++++++++++++++++++ wntr/tests/test_multiple_simulations.py | 315 +++++++++++++++--------- 2 files changed, 469 insertions(+), 118 deletions(-) diff --git a/wntr/sim/results.py b/wntr/sim/results.py index 12a863988..ced561d2f 100644 --- a/wntr/sim/results.py +++ b/wntr/sim/results.py @@ -1,5 +1,6 @@ import datetime import enum +import numpy as np class ResultsStatus(enum.IntEnum): @@ -10,6 +11,41 @@ class ResultsStatus(enum.IntEnum): class SimulationResults(object): """ Water network simulation results class. + + A small number of mathematical and statistical functions are also provided. + These functions are applied to all dataframes within the results object (or + between two results objects) by name, elementwise. + + Assuming ``A`` and ``B`` are both results objects that have the same time + indices for the results and which describe the same water network physical + model (i.e., have the same nodes and links), then the following functions + are defined: + + ================== ============================================================ + Example function Description + ------------------ ------------------------------------------------------------ + ``C = A + B`` Add the values from A and B for each property + ``C = A - B`` Subtract the property values in B from A + ``C = A / B`` Divide the property values in A by the values in B + ``C = A / n`` Divide the property values in A by n [int]; + note that this only makes sense if calculating an average + ``C = pow(A, p)`` Raise the property values in A to the p-th power; + note the syntax ``C = pow(A, p, mod)`` can also be used + ``C = abs(A)`` Take the absolute value of the property values in A + ``C = -A`` Take the negative of all property values in A + ``C = +A`` Take the positive of all property values in A + ``C = A.max()`` Get the maximum value for each property for node/link + ``C = A.min()`` Get the minimum value for each property for node/link + ``n = A.len()`` Get the number of timesteps within A + ``C = A.sqrt()`` Take the element-wise square root for all properties + ``C = A.sum()`` Take the sum of each property across time for each node/link + ================== ============================================================ + + As an example, to calculate the relative difference between the results of two + simulations, one could do: ``rel_dif = abs(A - B) / A`` (warning - this will operate + on link statuses as well, which may result in meaningless results for that + parameter). + """ def __init__(self): @@ -19,3 +55,239 @@ def __init__(self): self.network_name = None self.link = None self.node = None + + def __add__(self, other): + if not isinstance(other, SimulationResults): + raise ValueError("operating on a results object requires both be SimulationResults") + new = SimulationResults() + new.link = dict() + new.node = dict() + new.network_name = "{}[{}] + {}[{}]".format( + self.network_name, self.timestamp, other.network_name, other.timestamp + ) + for key in self.link.keys(): + if key in other.link: + new.link[key] = self.link[key] + other.link[key] + for key in self.node.keys(): + if key in other.node: + new.node[key] = self.node[key] + other.node[key] + return new + + def __sub__(self, other): + if not isinstance(other, SimulationResults): + raise ValueError("operating on a results object requires both be SimulationResults") + new = SimulationResults() + new.link = dict() + new.node = dict() + new.network_name = "{}[{}] - {}[{}]".format( + self.network_name, self.timestamp, other.network_name, other.timestamp + ) + for key in self.link.keys(): + if key in other.link: + new.link[key] = self.link[key] - other.link[key] + for key in self.node.keys(): + if key in other.node: + new.node[key] = self.node[key] - other.node[key] + return new + + def __abs__(self): + new = SimulationResults() + new.link = dict() + new.node = dict() + new.network_name = "|{}[{}]|".format(self.network_name, self.timestamp) + for key in self.link.keys(): + new.link[key] = abs(self.link[key]) + for key in self.node.keys(): + new.node[key] = abs(self.node[key]) + return new + + def __neg__(self): + new = SimulationResults() + new.link = dict() + new.node = dict() + new.network_name = "-{}[{}]".format(self.network_name, self.timestamp) + for key in self.link.keys(): + new.link[key] = -self.link[key] + for key in self.node.keys(): + new.node[key] = -self.node[key] + return new + + def min(self): + """Min operates on each axis separately, therefore it needs to be a function. + The built-in ``min`` function will not work.""" + new = SimulationResults() + new.link = dict() + new.node = dict() + new.network_name = "min({}[{}])".format(self.network_name, self.timestamp) + for key in self.link.keys(): + new.link[key] = self.link[key].min(axis=0) + for key in self.node.keys(): + new.node[key] = self.node[key].min(axis=0) + return new + + def max(self): + """Max operates on each axis separately, therefore it needs to be a function. + The built-in ``max`` function will not work.""" + new = SimulationResults() + new.link = dict() + new.node = dict() + new.network_name = "max({}[{}])".format(self.network_name, self.timestamp) + for key in self.link.keys(): + new.link[key] = self.link[key].max(axis=0) + for key in self.node.keys(): + new.node[key] = self.node[key].max(axis=0) + return new + + def len(self): + """This is not an iterator, but there is still a meaning to calling its length. + However, this means that ``len`` must be a function called from the object, not + the builtin function.""" + for key in self.link.keys(): + return len(self.link[key]) + + def sqrt(self): + """Element-wise square root of all values.""" + new = SimulationResults() + new.link = dict() + new.node = dict() + new.network_name = "sqrt({}[{}])".format(self.network_name, self.timestamp) + for key in self.link.keys(): + new.link[key] = np.sqrt(self.link[key]) + for key in self.node.keys(): + new.node[key] = np.sqrt(self.node[key]) + return new + + def sum(self): + """Sum across time for each node/link for each property.""" + new = SimulationResults() + new.link = dict() + new.node = dict() + new.network_name = "sum({}[{}])".format(self.network_name, self.timestamp) + for key in self.link.keys(): + new.link[key] = self.link[key].sum(axis=0) + for key in self.node.keys(): + new.node[key] = self.node[key].sum(axis=0) + return new + + def __pos__(self): + new = SimulationResults() + new.link = dict() + new.node = dict() + new.network_name = "+{}[{}]".format(self.network_name, self.timestamp) + for key in self.link.keys(): + new.link[key] = +self.link[key] + for key in self.node.keys(): + new.node[key] = +self.node[key] + return new + + def __truediv__(self, other): + new = SimulationResults() + new.link = dict() + new.node = dict() + if isinstance(other, SimulationResults): + new.network_name = "{}[{}] / {}[{}]".format( + self.network_name, self.timestamp, other.network_name, other.timestamp + ) + for key in self.link.keys(): + if key in other.link: + new.link[key] = self.link[key] / other.link[key] + for key in self.node.keys(): + if key in other.node: + new.node[key] = self.node[key] / other.node[key] + return new + elif isinstance(other, int): + new.network_name = "{}[{}] / {}".format(self.network_name, self.timestamp, other) + for key in self.link.keys(): + new.link[key] = self.link[key] / other + for key in self.node.keys(): + new.node[key] = self.node[key] / other + return new + else: + raise ValueError( + "operating on a results object requires divisor be a SimulationResults or a float" + ) + + def __pow__(self, exp, mod=None): + new = SimulationResults() + new.link = dict() + new.node = dict() + new.network_name = "{}[{}] ** {}".format(self.network_name, self.timestamp, exp) + for key in self.link.keys(): + new.link[key] = pow(self.link[key], exp, mod) + for key in self.node.keys(): + new.node[key] = pow(self.node[key], exp, mod) + return new + + return new + + def _adjust_time(self, ts: int): + """ + Adjust the time index for the results object by `ts`. + + Parameters + ---------- + ts : int + The number of seconds by which to adjust the result dataframe index + """ + ts = int(ts) + for key in self.link.keys(): + self.link[key].index += ts + for key in self.node.keys(): + self.node[key].index += ts + + def append_results_from(self, other): + """ + Combine two results objects into a single, new result object. + If the times overlap, then the results from the `other` object will take precedence + over the values in the calling object. I.e., given ``A.append_results_from(B)``, + where ``A`` and ``B`` + are both `SimluationResults`, any results from ``A`` that relate to times equal to or + greater than the starting time of results in ``B`` will be dropped. + + .. warning:: + + This operations will be performed "in-place" and will change ``A`` + + + Parameters + ---------- + other : SimulationResults + Results objects from a different, and subsequent, simulation. + + Raises + ------ + ValueError + if `other` is the wrong type + + """ + if not isinstance(other, SimulationResults): + raise ValueError("operating on a results object requires both be SimulationResults") + start_time = other.node["head"].index.values[0] + keep = self.node["head"].index.values < start_time + for key in self.link.keys(): + if key in other.link: + t2 = self.link[key].loc[keep].append(other.link[key]) + self.link[key] = t2 + else: + temp = other.link["flowrate"] * np.nan + t2 = self.link[key].loc[keep].append(temp) + self.link[key] = t2 + for key in other.link.keys(): + if key not in self.link.keys(): + temp = self.link["flowrate"] * np.nan + t2 = temp.loc[keep].append(other.link[key]) + self.link[key] = t2 + for key in self.node.keys(): + if key in other.node: + t2 = self.node[key].loc[keep].append(other.node[key]) + self.node[key] = t2 + else: + temp = other.node["head"] * np.nan + t2 = self.node[key].loc[keep].append(temp) + self.node[key] = t2 + for key in other.node.keys(): + if key not in self.node.keys(): + temp = self.node["head"] * np.nan + t2 = temp.loc[keep].append(other.node[key]) + self.node[key] = t2 + diff --git a/wntr/tests/test_multiple_simulations.py b/wntr/tests/test_multiple_simulations.py index 2bb344eaf..62d21d521 100644 --- a/wntr/tests/test_multiple_simulations.py +++ b/wntr/tests/test_multiple_simulations.py @@ -9,6 +9,16 @@ test_datadir = join(testdir, "networks_for_testing") ex_datadir = join(testdir, "..", "..", "examples", "networks") +tolerances = { + "flowrate": 1.0e-5, # 10 mL/s + "velocity": 1.0e-2, # 0.01 m/s + "demand": 1.0e-5, # 10 mL/s + "head": 1.0e-2, # 0.01 m + "pressure": 1.0e-2, # 0.01 m H2O + "headloss": 1.0e-2, # 0.01 + "status": 0.5, # i.e., 0 since this is integer + "setting": 1.0e-2, # 0.01 +} class TestResetInitialValues(unittest.TestCase): @classmethod @@ -16,6 +26,7 @@ def setUpClass(self): import wntr self.wntr = wntr + self.tol = 1e-8 inp_file = join(ex_datadir, "Net3.inp") self.wn = self.wntr.network.WaterNetworkModel(inp_file) @@ -28,54 +39,46 @@ def setUpClass(self): self.wn.reset_initial_values() self.res2 = sim.run_sim(solver_options={"TOL": 1e-8}) + self.res4 = abs(self.res1 - self.res2).max() + @classmethod def tearDownClass(self): pass def test_link_flowrate(self): for link_name, link in self.wn.links(): - for t in self.res1.time: - self.assertAlmostEqual( - self.res1.link["flowrate"].at[t, link_name], - self.res2.link["flowrate"].at[t, link_name], - 7, - ) + self.assertLess( + self.res4.link["flowrate"].at[link_name], + tolerances["flowrate"], + ) def test_link_velocity(self): for link_name, link in self.wn.links(): - for t in self.res1.link["velocity"].index: - self.assertAlmostEqual( - self.res1.link["velocity"].at[t, link_name], - self.res2.link["velocity"].at[t, link_name], - 7, - ) + self.assertLess( + self.res4.link["velocity"].at[link_name], + tolerances["velocity"] + ) def test_node_demand(self): for node_name, node in self.wn.nodes(): - for t in self.res1.node["demand"].index: - self.assertAlmostEqual( - self.res1.node["demand"].at[t, node_name], - self.res2.node["demand"].at[t, node_name], - 7, - ) + self.assertLess( + self.res4.node["demand"].at[node_name], + tolerances["demand"] + ) def test_node_head(self): for node_name, node in self.wn.nodes(): - for t in self.res1.node["head"].index: - self.assertAlmostEqual( - self.res1.node["head"].at[t, node_name], - self.res2.node["head"].at[t, node_name], - 7, - ) + self.assertLess( + self.res4.node["head"].at[node_name], + tolerances["head"] + ) def test_node_pressure(self): for node_name, node in self.wn.nodes(): - for t in self.res1.node["pressure"].index: - self.assertAlmostEqual( - self.res1.node["pressure"].at[t, node_name], - self.res2.node["pressure"].at[t, node_name], - 7, - ) + self.assertLess( + self.res4.node["pressure"].at[node_name], + tolerances["pressure"] + ) class TestStopStartSim(unittest.TestCase): @@ -100,17 +103,96 @@ def setUpClass(self): self.wn.options.time.duration = 10 * 3600 sim = self.wntr.sim.WNTRSimulator(self.wn) self.res2 = sim.run_sim(solver_options={'TOL':1e-8}) - self.wn.options.time.duration = 24*3600 + self.wn.set_initial_conditions(self.res2) + self.wn.options.time.pattern_start = self.wn.options.time.pattern_start + 10 * 3600 + self.wn.options.time.duration = 14 * 3600 + sim = self.wntr.sim.WNTRSimulator(self.wn) self.res3 = sim.run_sim(solver_options={'TOL':1e-8}) + self.res3._adjust_time(10*3600) + # node_res = {} + # link_res = {} + # for key in self.res2.node.keys(): + # node_res[key] = pd.concat([self.res2.node[key],self.res3.node[key]],axis=0) + # for key in self.res2.link.keys(): + # link_res[key] = pd.concat([self.res2.link[key],self.res3.link[key]],axis=0) + # self.res2.node = node_res + # self.res2.link = link_res + self.res2.append_results_from(self.res3) + self.res4 = abs(self.res1 - self.res2).max() - node_res = {} - link_res = {} - for key in self.res2.node.keys(): - node_res[key] = pd.concat([self.res2.node[key],self.res3.node[key]],axis=0) - for key in self.res2.link.keys(): - link_res[key] = pd.concat([self.res2.link[key],self.res3.link[key]],axis=0) - self.res2.node = node_res - self.res2.link = link_res + @classmethod + def tearDownClass(self): + pass + + def test_link_flowrate(self): + for link_name, link in self.wn.links(): + print(link_name, self.res1.link["flowrate"].loc[:,link_name].describe(), abs(self.res1 - self.res2).link["flowrate"].loc[:,link_name].describe()) + self.assertLess( + self.res4.link["flowrate"].at[link_name], + tolerances["flowrate"], + ) + + def test_link_velocity(self): + for link_name, link in self.wn.links(): + self.assertLess( + self.res4.link["velocity"].at[link_name], + tolerances["velocity"] + ) + + def test_node_demand(self): + for node_name, node in self.wn.nodes(): + self.assertLess( + self.res4.node["demand"].at[node_name], + tolerances["demand"] + ) + + def test_node_head(self): + for node_name, node in self.wn.nodes(): + self.assertLess( + self.res4.node["head"].at[node_name], + tolerances["head"] + ) + + def test_node_pressure(self): + for node_name, node in self.wn.nodes(): + self.assertLess( + self.res4.node["pressure"].at[node_name], + tolerances["pressure"] + ) + + + +class TestStopStartEpanetSim(unittest.TestCase): + @classmethod + def setUpClass(self): + import wntr + + self.wntr = wntr + + inp_file = join(ex_datadir, "Net3.inp") + + parser = self.wntr.epanet.InpFile() + self.wn = parser.read(inp_file) + self.wn.options.time.hydraulic_timestep = 3600 + self.wn.options.time.duration = 24 * 3600 + sim = self.wntr.sim.EpanetSimulator(self.wn) + self.res1 = sim.run_sim() + + parser = self.wntr.epanet.InpFile() + self.wn2 = parser.read(inp_file) + self.wn2.options.time.hydraulic_timestep = 3600 + self.wn2.options.time.duration = 11 * 3600 + sim = self.wntr.sim.EpanetSimulator(self.wn2) + self.res2 = sim.run_sim() + self.wn2.set_initial_conditions(self.res2) + self.wn2.options.time.pattern_start = self.wn2.options.time.pattern_start + 11 * 3600 + self.wn2.options.time.duration = 13 * 3600 + sim = self.wntr.sim.EpanetSimulator(self.wn2) + self.res3 = sim.run_sim() + self.res3._adjust_time(11*3600) + + self.res2.append_results_from(self.res3) + self.res4 = abs(self.res1 - self.res2).max() @classmethod def tearDownClass(self): @@ -118,48 +200,59 @@ def tearDownClass(self): def test_link_flowrate(self): for link_name, link in self.wn.links(): - for t in self.res1.link["flowrate"].index: - self.assertAlmostEqual( - self.res1.link["flowrate"].at[t, link_name], - self.res2.link["flowrate"].at[t, link_name], - 7, - ) + self.assertLess( + self.res4.link["flowrate"].at[link_name], + tolerances["flowrate"], + ) def test_link_velocity(self): for link_name, link in self.wn.links(): - for t in self.res1.link["velocity"].index: - self.assertAlmostEqual( - self.res1.link["velocity"].at[t, link_name], - self.res2.link["velocity"].at[t, link_name], - 7, - ) + self.assertLess( + self.res4.link["velocity"].at[link_name], + tolerances["velocity"] + ) + + def test_link_headloss(self): + for link_name, link in self.wn.links(): + self.assertLess( + self.res4.link["headloss"].at[link_name], + tolerances["headloss"] + ) + + def test_link_status(self): + for link_name, link in self.wn.links(): + self.assertLess( + self.res4.link["status"].at[link_name], + tolerances["status"] + ) + + def test_link_setting(self): + for link_name, link in self.wn.links(): + self.assertLess( + self.res4.link["setting"].at[link_name], + tolerances["setting"] + ) def test_node_demand(self): for node_name, node in self.wn.nodes(): - for t in self.res1.node["demand"].index: - self.assertAlmostEqual( - self.res1.node["demand"].at[t, node_name], - self.res2.node["demand"].at[t, node_name], - 7, - ) + self.assertLess( + self.res4.node["demand"].at[node_name], + tolerances["demand"] + ) def test_node_head(self): for node_name, node in self.wn.nodes(): - for t in self.res1.node["head"].index: - self.assertAlmostEqual( - self.res1.node["head"].at[t, node_name], - self.res2.node["head"].at[t, node_name], - 7, - ) + self.assertLess( + self.res4.node["head"].at[node_name], + tolerances["head"] + ) def test_node_pressure(self): for node_name, node in self.wn.nodes(): - for t in self.res1.node["pressure"].index: - self.assertAlmostEqual( - self.res1.node["pressure"].at[t, node_name], - self.res2.node["pressure"].at[t, node_name], - 7, - ) + self.assertLess( + self.res4.node["pressure"].at[node_name], + tolerances["pressure"] + ) class TestPickle(unittest.TestCase): @@ -179,80 +272,66 @@ def setUpClass(self): self.res1 = sim.run_sim(solver_options={"TOL": 1e-8}) parser = self.wntr.epanet.InpFile() - self.wn = parser.read(inp_file) - self.wn.options.time.hydraulic_timestep = 3600 - self.wn.options.time.duration = 10 * 3600 - sim = self.wntr.sim.WNTRSimulator(self.wn) + self.wn2 = parser.read(inp_file) + self.wn2.options.time.hydraulic_timestep = 3600 + self.wn2.options.time.duration = 10 * 3600 + sim = self.wntr.sim.WNTRSimulator(self.wn2) self.res2 = sim.run_sim(solver_options={"TOL": 1e-8}) + self.wn2.set_initial_conditions(self.res2, 36000) + self.wn2.options.time.pattern_start = self.wn2.options.time.pattern_start + 10 * 3600 f = open("temp.pickle", "wb") - pickle.dump(self.wn, f) + pickle.dump(self.wn2, f) f.close() f = open("temp.pickle", "rb") - wn2 = pickle.load(f) + self.wn2 = pickle.load(f) f.close() - #wn2.set_initial_conditions(self.res2) - wn2.options.time.duration = 24*3600 - sim = self.wntr.sim.WNTRSimulator(wn2) + self.wn2.options.time.duration = 14*3600 + sim = self.wntr.sim.WNTRSimulator(self.wn2) self.res3 = sim.run_sim(solver_options={'TOL':1e-8}) - - node_res = {} - link_res = {} - for key in self.res2.node.keys(): - node_res[key] = pd.concat([self.res2.node[key],self.res3.node[key]],axis=0) - for key in self.res2.link.keys(): - link_res[key] = pd.concat([self.res2.link[key],self.res3.link[key]],axis=0) - - self.res2.node = node_res - self.res2.link = link_res + self.res3._adjust_time(10*3600) + + self.res2.append_results_from(self.res3) + self.res4 = abs(self.res1 - self.res2).max() @classmethod def tearDownClass(self): pass + def test_link_flowrate(self): for link_name, link in self.wn.links(): - for t in self.res1.link["flowrate"].index: - self.assertAlmostEqual( - self.res1.link["flowrate"].at[t, link_name], - self.res2.link["flowrate"].at[t, link_name], - 7, - ) + self.assertLess( + self.res4.link["flowrate"].at[link_name], + tolerances["flowrate"], + ) def test_link_velocity(self): for link_name, link in self.wn.links(): - for t in self.res1.link["velocity"].index: - self.assertAlmostEqual( - self.res1.link["velocity"].at[t, link_name], - self.res2.link["velocity"].at[t, link_name], - 7, - ) + self.assertLess( + self.res4.link["velocity"].at[link_name], + tolerances["velocity"] + ) def test_node_demand(self): for node_name, node in self.wn.nodes(): - for t in self.res1.node["demand"].index: - self.assertAlmostEqual( - self.res1.node["demand"].at[t, node_name], - self.res2.node["demand"].at[t, node_name], - 7, - ) + self.assertLess( + self.res4.node["demand"].at[node_name], + tolerances["demand"] + ) def test_node_head(self): for node_name, node in self.wn.nodes(): - for t in self.res1.node["head"].index: - self.assertAlmostEqual( - self.res1.node["head"].at[t, node_name], - self.res2.node["head"].at[t, node_name], - 7, - ) + self.assertLess( + self.res4.node["head"].at[node_name], + tolerances["head"] + ) def test_node_pressure(self): for node_name, node in self.wn.nodes(): - for t in self.res1.node["pressure"].index: - self.assertAlmostEqual( - self.res1.node["pressure"].at[t, node_name], - self.res2.node["pressure"].at[t, node_name], - 7, - ) + self.assertLess( + self.res4.node["pressure"].at[node_name], + tolerances["pressure"] + ) if __name__ == "__main__": From f24f670bc1f3016a0f9ff04966f5a0172c8dce32 Mon Sep 17 00:00:00 2001 From: kbonney Date: Thu, 31 Aug 2023 14:13:52 -0400 Subject: [PATCH 02/21] Updating append method to use pd.concat --- wntr/sim/results.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/wntr/sim/results.py b/wntr/sim/results.py index ced561d2f..4328229f7 100644 --- a/wntr/sim/results.py +++ b/wntr/sim/results.py @@ -1,6 +1,7 @@ import datetime import enum import numpy as np +import pandas as pd class ResultsStatus(enum.IntEnum): @@ -266,28 +267,28 @@ def append_results_from(self, other): keep = self.node["head"].index.values < start_time for key in self.link.keys(): if key in other.link: - t2 = self.link[key].loc[keep].append(other.link[key]) + t2 = pd.concat([self.link[key].loc[keep], other.link[key]]) self.link[key] = t2 else: temp = other.link["flowrate"] * np.nan - t2 = self.link[key].loc[keep].append(temp) + t2 = pd.concat([self.link[key].loc[keep], temp]) self.link[key] = t2 for key in other.link.keys(): if key not in self.link.keys(): temp = self.link["flowrate"] * np.nan - t2 = temp.loc[keep].append(other.link[key]) + t2 = pd.concat([temp.loc[keep], other.link[key]]) self.link[key] = t2 for key in self.node.keys(): if key in other.node: - t2 = self.node[key].loc[keep].append(other.node[key]) + t2 = pd.concat([self.node[key].loc[keep], other.node[key]]) self.node[key] = t2 else: temp = other.node["head"] * np.nan - t2 = self.node[key].loc[keep].append(temp) + t2 = pd.concat([self.node[key].loc[keep], temp]) self.node[key] = t2 for key in other.node.keys(): if key not in self.node.keys(): temp = self.node["head"] * np.nan - t2 = temp.loc[keep].append(other.node[key]) + t2 = pd.concat([temp.loc[keep],other.node[key]]) self.node[key] = t2 From 17554e7704dec54eac69901553b37ac8c4561f1e Mon Sep 17 00:00:00 2001 From: dbhart Date: Thu, 31 Aug 2023 14:32:41 -0400 Subject: [PATCH 03/21] Adding shift method to controls --- wntr/network/controls.py | 42 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/wntr/network/controls.py b/wntr/network/controls.py index 1555e2ddd..c1528a77a 100644 --- a/wntr/network/controls.py +++ b/wntr/network/controls.py @@ -276,6 +276,25 @@ def __init__(self): def _reset(self): pass + def _shift(self, value): + """ + Shift any SimTimeConditions within larger condition rules by value seconds (backward). + + I.e., if a control is scheduled at simulation time 7200 and you shift by 3600, the new + control threshold with be sim time 3600. + + Parameters + ---------- + value : float + seconds to subtract from threshold + + Returns + ------- + bool + is this still a valid control? + """ + return True + @abc.abstractmethod def requires(self): """ @@ -608,6 +627,13 @@ def _compare(self, other): return False return True + def _shift(self, value): + self._threshold -= value + if self._threshold >= 0: + return True + self._threshold = 0 + return False + @property def name(self): if not self._repeat: @@ -1019,6 +1045,11 @@ def _reset(self): self._condition_1._reset() self._condition_2._reset() + def _shift(self, value): + success1 = self._condition_1._shift(value) + success2 = self._condition_2._shift(value) + return success1 or success2 + def _compare(self, other): """ Parameters @@ -1085,6 +1116,11 @@ def _reset(self): self._condition_1._reset() self._condition_2._reset() + def _shift(self, value): + success1 = self._condition_1._shift(value) + success2 = self._condition_2._shift(value) + return success1 or success2 + def _compare(self, other): """ Parameters @@ -2035,6 +2071,9 @@ def epanet_control_type(self): control_type: _ControlType """ return self._control_type + + def _shift(self, step): + return self._condition._shift(step) def requires(self): req = self._condition.requires() @@ -2200,6 +2239,9 @@ def __init__(self, condition, then_action: BaseControlAction, priority=ControlPr else: self._control_type = _ControlType.postsolve + def _shift(self, step): + return self._condition._shift(step) + @classmethod def _time_control(cls, wnm, run_at_time, time_flag, daily_flag, control_action, name=None): """ From 47305688dffb2040ea9d64bccbb3a264dd44399f Mon Sep 17 00:00:00 2001 From: dbhart Date: Thu, 31 Aug 2023 14:33:02 -0400 Subject: [PATCH 04/21] Adding set_initial_conditions method to water network model --- wntr/network/model.py | 132 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 132 insertions(+) diff --git a/wntr/network/model.py b/wntr/network/model.py index 938da1421..4a2d7e0b1 100644 --- a/wntr/network/model.py +++ b/wntr/network/model.py @@ -1564,7 +1564,139 @@ def reset_initial_values(self): for name, control in self.controls(): control._reset() + def set_initial_conditions(self, results, ts=None, remove_controls=True, warn=False): + """ + Set the initial conditions of the network based on prior simulation results. + + Parameters + ---------- + results : SimulationResults + Results from a prior simulation + ts : int, optional + The time value (in seconds) from the results to use to select initial conditions, + by default None (which will use the final values) + remove_controls : bool, optional + If a rule or control has a SimTimeCondition that now occurs prior to simulation start, remove + the control, by default True. + warn : bool + Send a warning to the logger that the rule has been deleted, by default False. + When False, information is sent to the logger at the `info` level. + + Returns + ------- + list + Control names that have been, when `remove_controls is True`, + or need to be, when `remove_controls is False`, + removed from the water network model + + + Raises + ------ + NameError + If both `ts` and `idx` are passed in + IndexError + If `ts` is passed, but no such time exists in the results + ValueError + If the time selected is not a multiple of the pattern timestep + + + """ + if ts is None: + end_time = results.node['demand'].index[-1] + else: + ts = int(ts) + if ts in results.node['demand'].index: + end_time = ts + else: + raise IndexError('There is no time "{}" in the results'.format(ts)) + + # if end_time / self.options.time.pattern_timestep != end_time // self.options.time.pattern_timestep: + # raise ValueError('You must give a time step that is a multiple of the pattern_timestep ({})'.format(self.options.time.pattern_timestep)) + + self.sim_time = 0.0 + self._prev_sim_time = None + + for name, node in self.nodes(Junction): + node._head = None + node._demand = None + node._pressure = None + try: node.initial_quality = float(results.node['quality'].loc[end_time, name]) + except KeyError: pass + node._leak_demand = None + node._leak_status = False + node._is_isolated = False + + for name, node in self.nodes(Tank): + node._head = None + node._demand = None + node._pressure = None + node.init_level = float(results.node['head'].loc[end_time, name] - node.elevation) + try: node.initial_quality = float(results.node['quality'].loc[end_time, name]) + except KeyError: pass + node._prev_head = node.head + node._leak_demand = None + node._leak_status = False + node._is_isolated = False + + for name, node in self.nodes(Reservoir): + node._head = None + node._demand = None + node._pressure = None + try: node.initial_quality = float(results.node['quality'].loc[end_time, name]) + except KeyError: pass + node._leak_demand = None + node._is_isolated = False + + for name, link in self.links(Pipe): + link.initial_status = results.link['status'].loc[end_time, name] + try: link.initial_setting = results.link['setting'].loc[end_time, name] + except KeyError: link.initial_setting = link.setting + link._user_status = link.initial_status + link._internal_status = LinkStatus.Active + link._is_isolated = False + link._flow = None + link._prev_setting = None + + for name, link in self.links(Pump): + link.initial_status = results.link['status'].loc[end_time, name] + try: link.initial_setting = results.link['setting'].loc[end_time, name] + except KeyError: link.initial_setting = link.setting + link._user_status = link.initial_status + link._setting = link.initial_setting + link._internal_status = LinkStatus.Active + link._is_isolated = False + link._flow = None + if isinstance(link, PowerPump): + link.power = link._base_power + link._prev_setting = None + + for name, link in self.links(Valve): + link.initial_status = results.link['status'].loc[end_time, name] + try: link.initial_setting = results.link['setting'].loc[end_time, name] + except KeyError: link.initial_setting = link.setting + # print(name, link.initial_status, link.initial_setting) + link._user_status = link.initial_status + link._setting = link.initial_setting + link._internal_status = LinkStatus.Active + link._is_isolated = False + link._flow = None + link._prev_setting = None + + to_delete = [] + for name, control in self.controls(): + control._reset() + still_good = control._shift(end_time) + if not still_good: + to_delete.append(name) + + for name in to_delete: + msg = 'Rule {} {} removed from the network'.format(name, 'has been' if remove_controls else 'needs to be') + if warn: logger.warning(msg) + else: logger.info(msg) + if remove_controls: + self.remove_control(name) + return to_delete class PatternRegistry(Registry): """A registry for patterns.""" From a55e2e921d6216a37fe4fcde4f8e133e81986b0a Mon Sep 17 00:00:00 2001 From: kbonney Date: Mon, 6 Nov 2023 11:27:46 -0500 Subject: [PATCH 05/21] rename append function --- wntr/sim/results.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/wntr/sim/results.py b/wntr/sim/results.py index 4328229f7..70500f466 100644 --- a/wntr/sim/results.py +++ b/wntr/sim/results.py @@ -236,11 +236,11 @@ def _adjust_time(self, ts: int): for key in self.node.keys(): self.node[key].index += ts - def append_results_from(self, other): + def append(self, other): """ Combine two results objects into a single, new result object. If the times overlap, then the results from the `other` object will take precedence - over the values in the calling object. I.e., given ``A.append_results_from(B)``, + over the values in the calling object. I.e., given ``A.append(B)``, where ``A`` and ``B`` are both `SimluationResults`, any results from ``A`` that relate to times equal to or greater than the starting time of results in ``B`` will be dropped. @@ -262,7 +262,7 @@ def append_results_from(self, other): """ if not isinstance(other, SimulationResults): - raise ValueError("operating on a results object requires both be SimulationResults") + raise ValueError("operating on a results object requires both to be SimulationResults") start_time = other.node["head"].index.values[0] keep = self.node["head"].index.values < start_time for key in self.link.keys(): From ff180f382efe7efe3864f9e6b85c208b3ab518df Mon Sep 17 00:00:00 2001 From: kbonney Date: Mon, 6 Nov 2023 11:28:16 -0500 Subject: [PATCH 06/21] rename append function --- wntr/tests/test_multiple_simulations.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/wntr/tests/test_multiple_simulations.py b/wntr/tests/test_multiple_simulations.py index 62d21d521..c266c38ab 100644 --- a/wntr/tests/test_multiple_simulations.py +++ b/wntr/tests/test_multiple_simulations.py @@ -117,7 +117,7 @@ def setUpClass(self): # link_res[key] = pd.concat([self.res2.link[key],self.res3.link[key]],axis=0) # self.res2.node = node_res # self.res2.link = link_res - self.res2.append_results_from(self.res3) + self.res2.append(self.res3) self.res4 = abs(self.res1 - self.res2).max() @classmethod @@ -191,7 +191,7 @@ def setUpClass(self): self.res3 = sim.run_sim() self.res3._adjust_time(11*3600) - self.res2.append_results_from(self.res3) + self.res2.append(self.res3) self.res4 = abs(self.res1 - self.res2).max() @classmethod @@ -290,7 +290,7 @@ def setUpClass(self): self.res3 = sim.run_sim(solver_options={'TOL':1e-8}) self.res3._adjust_time(10*3600) - self.res2.append_results_from(self.res3) + self.res2.append(self.res3) self.res4 = abs(self.res1 - self.res2).max() @classmethod From 97b97bfb3f5c1b719f1389414bdf0b6c97d157c4 Mon Sep 17 00:00:00 2001 From: kbonney Date: Tue, 14 Nov 2023 09:49:53 -0500 Subject: [PATCH 07/21] overhauling results object by adding baseclass to allow for other results types --- wntr/sim/results.py | 379 ++++++++++++++++++++++---------------------- 1 file changed, 192 insertions(+), 187 deletions(-) diff --git a/wntr/sim/results.py b/wntr/sim/results.py index 70500f466..424fcc5b0 100644 --- a/wntr/sim/results.py +++ b/wntr/sim/results.py @@ -9,9 +9,13 @@ class ResultsStatus(enum.IntEnum): error = 0 -class SimulationResults(object): - """ - Water network simulation results class. +class ResultsBase: + """Base class for results of various simulations carried out by WNTR. + The basic structure of a subclass is an object with one or more attributes + containing dictionaries of dataframes. + Subclasses are differentiated by defining the class attribute `data_attributes` + which determines the names of the attributes which will hold dictionaries of + DataFrames. A small number of mathematical and statistical functions are also provided. These functions are applied to all dataframes within the results object (or @@ -21,205 +25,164 @@ class SimulationResults(object): indices for the results and which describe the same water network physical model (i.e., have the same nodes and links), then the following functions are defined: - - ================== ============================================================ - Example function Description - ------------------ ------------------------------------------------------------ - ``C = A + B`` Add the values from A and B for each property - ``C = A - B`` Subtract the property values in B from A - ``C = A / B`` Divide the property values in A by the values in B - ``C = A / n`` Divide the property values in A by n [int]; - note that this only makes sense if calculating an average - ``C = pow(A, p)`` Raise the property values in A to the p-th power; - note the syntax ``C = pow(A, p, mod)`` can also be used - ``C = abs(A)`` Take the absolute value of the property values in A - ``C = -A`` Take the negative of all property values in A - ``C = +A`` Take the positive of all property values in A - ``C = A.max()`` Get the maximum value for each property for node/link - ``C = A.min()`` Get the minimum value for each property for node/link - ``n = A.len()`` Get the number of timesteps within A - ``C = A.sqrt()`` Take the element-wise square root for all properties - ``C = A.sum()`` Take the sum of each property across time for each node/link - ================== ============================================================ - - As an example, to calculate the relative difference between the results of two - simulations, one could do: ``rel_dif = abs(A - B) / A`` (warning - this will operate - on link statuses as well, which may result in meaningless results for that - parameter). - """ - def __init__(self): - # Simulation time series self.timestamp = str(datetime.datetime.now()) self.network_name = None - self.link = None - self.node = None + for attr in self.data_attributes: + self.__setattr__(attr, dict()) def __add__(self, other): - if not isinstance(other, SimulationResults): - raise ValueError("operating on a results object requires both be SimulationResults") - new = SimulationResults() - new.link = dict() - new.node = dict() + if not isinstance(other, type(self)): + raise ValueError(f"operating on a results object requires both be {type(self)}") + new = type(self.res)() new.network_name = "{}[{}] + {}[{}]".format( self.network_name, self.timestamp, other.network_name, other.timestamp ) - for key in self.link.keys(): - if key in other.link: - new.link[key] = self.link[key] + other.link[key] - for key in self.node.keys(): - if key in other.node: - new.node[key] = self.node[key] + other.node[key] + + for attr in new.data_attributes: + for key in self.__getattr__(attr).keys(): + if key in other.__getattr__(attr).keys(): + self_dict = self.__getattr__(attr) + other_dict = other.__getattr__(attr) + new.__getattr__(attr)[key] = self_dict[key] + other_dict[key] return new - + + def __sub__(self, other): - if not isinstance(other, SimulationResults): - raise ValueError("operating on a results object requires both be SimulationResults") - new = SimulationResults() - new.link = dict() - new.node = dict() + if not isinstance(other, type(self)): + raise ValueError(f"operating on a results object requires both be {type(self)}") + new = type(self)() new.network_name = "{}[{}] - {}[{}]".format( self.network_name, self.timestamp, other.network_name, other.timestamp ) - for key in self.link.keys(): - if key in other.link: - new.link[key] = self.link[key] - other.link[key] - for key in self.node.keys(): - if key in other.node: - new.node[key] = self.node[key] - other.node[key] + for attr in new.data_attributes: + for key in self.__getattr__(attr).keys(): + if key in other.__getattr__(attr).keys(): + self_dict = self.__getattr__(attr) + other_dict = other.__getattr__(attr) + new.__getattr__(attr)[key] = self_dict[key] - other_dict[key] return new def __abs__(self): - new = SimulationResults() - new.link = dict() - new.node = dict() + new = type(self)() new.network_name = "|{}[{}]|".format(self.network_name, self.timestamp) - for key in self.link.keys(): - new.link[key] = abs(self.link[key]) - for key in self.node.keys(): - new.node[key] = abs(self.node[key]) + + for attr in new.data_attributes: + for key in self.__getattr__(attr).keys(): + self_dict = self.__getattr__(attr) + self_dict[key] = abs(self_dict[key]) return new def __neg__(self): - new = SimulationResults() - new.link = dict() - new.node = dict() + new = type(self)() new.network_name = "-{}[{}]".format(self.network_name, self.timestamp) - for key in self.link.keys(): - new.link[key] = -self.link[key] - for key in self.node.keys(): - new.node[key] = -self.node[key] + + for attr in new.data_attributes: + for key in self.__getattr__(attr).keys(): + self_dict = self.__getattr__(attr) + self_dict[key] = -self_dict[key] return new def min(self): """Min operates on each axis separately, therefore it needs to be a function. The built-in ``min`` function will not work.""" - new = SimulationResults() - new.link = dict() - new.node = dict() + new = type(self)() new.network_name = "min({}[{}])".format(self.network_name, self.timestamp) - for key in self.link.keys(): - new.link[key] = self.link[key].min(axis=0) - for key in self.node.keys(): - new.node[key] = self.node[key].min(axis=0) + + for attr in new.data_attributes: + for key in self.__getattr__(attr).keys(): + self_dict = self.__getattr__(attr) + self_dict[key] = self_dict[key].min(axis=0) return new def max(self): """Max operates on each axis separately, therefore it needs to be a function. The built-in ``max`` function will not work.""" - new = SimulationResults() - new.link = dict() - new.node = dict() + new = type(self)() new.network_name = "max({}[{}])".format(self.network_name, self.timestamp) - for key in self.link.keys(): - new.link[key] = self.link[key].max(axis=0) - for key in self.node.keys(): - new.node[key] = self.node[key].max(axis=0) + + for attr in new.data_attributes: + for key in self.__getattr__(attr).keys(): + self_dict = self.__getattr__(attr) + self_dict[key] = self_dict[key].max(axis=0) return new - def len(self): - """This is not an iterator, but there is still a meaning to calling its length. - However, this means that ``len`` must be a function called from the object, not - the builtin function.""" - for key in self.link.keys(): - return len(self.link[key]) - - def sqrt(self): - """Element-wise square root of all values.""" - new = SimulationResults() - new.link = dict() - new.node = dict() - new.network_name = "sqrt({}[{}])".format(self.network_name, self.timestamp) - for key in self.link.keys(): - new.link[key] = np.sqrt(self.link[key]) - for key in self.node.keys(): - new.node[key] = np.sqrt(self.node[key]) - return new + # def len(self): + # """This is not an iterator, but there is still a meaning to calling its length. + # However, this means that ``len`` must be a function called from the object, not + # the builtin function.""" + # for key in self.link.keys(): + # return len(self.link[key]) + + # def sqrt(self): + # """Element-wise square root of all values.""" + # new = type(self)() + # new.link = dict() + # new.node = dict() + # new.network_name = "sqrt({}[{}])".format(self.network_name, self.timestamp) + # for key in self.link.keys(): + # new.link[key] = np.sqrt(self.link[key]) + # for key in self.node.keys(): + # new.node[key] = np.sqrt(self.node[key]) + # return new def sum(self): """Sum across time for each node/link for each property.""" - new = SimulationResults() - new.link = dict() - new.node = dict() + new = type(self)() new.network_name = "sum({}[{}])".format(self.network_name, self.timestamp) - for key in self.link.keys(): - new.link[key] = self.link[key].sum(axis=0) - for key in self.node.keys(): - new.node[key] = self.node[key].sum(axis=0) + + for attr in new.data_attributes: + for key in self.__getattr__(attr).keys(): + self_dict = self.__getattr__(attr) + self_dict[key] = self_dict[key].sum(axis=0) return new def __pos__(self): - new = SimulationResults() - new.link = dict() - new.node = dict() + new = type(self)() new.network_name = "+{}[{}]".format(self.network_name, self.timestamp) - for key in self.link.keys(): - new.link[key] = +self.link[key] - for key in self.node.keys(): - new.node[key] = +self.node[key] + for attr in new.data_attributes: + for key in self.__getattr__(attr).keys(): + self_dict = self.__getattr__(attr) + self_dict[key] = +self_dict[key] return new - def __truediv__(self, other): - new = SimulationResults() - new.link = dict() - new.node = dict() - if isinstance(other, SimulationResults): - new.network_name = "{}[{}] / {}[{}]".format( - self.network_name, self.timestamp, other.network_name, other.timestamp - ) - for key in self.link.keys(): - if key in other.link: - new.link[key] = self.link[key] / other.link[key] - for key in self.node.keys(): - if key in other.node: - new.node[key] = self.node[key] / other.node[key] - return new - elif isinstance(other, int): - new.network_name = "{}[{}] / {}".format(self.network_name, self.timestamp, other) - for key in self.link.keys(): - new.link[key] = self.link[key] / other - for key in self.node.keys(): - new.node[key] = self.node[key] / other - return new - else: - raise ValueError( - "operating on a results object requires divisor be a SimulationResults or a float" - ) - - def __pow__(self, exp, mod=None): - new = SimulationResults() - new.link = dict() - new.node = dict() - new.network_name = "{}[{}] ** {}".format(self.network_name, self.timestamp, exp) - for key in self.link.keys(): - new.link[key] = pow(self.link[key], exp, mod) - for key in self.node.keys(): - new.node[key] = pow(self.node[key], exp, mod) - return new - - return new + # def __truediv__(self, other): + # new = type(self)() + # new.link = dict() + # new.node = dict() + # if isinstance(other, type(self)): + # new.network_name = "{}[{}] / {}[{}]".format( + # self.network_name, self.timestamp, other.network_name, other.timestamp + # ) + # for key in self.link.keys(): + # if key in other.link: + # new.link[key] = self.link[key] / other.link[key] + # for key in self.node.keys(): + # if key in other.node: + # new.node[key] = self.node[key] / other.node[key] + # return new + # elif isinstance(other, int): + # new.network_name = "{}[{}] / {}".format(self.network_name, self.timestamp, other) + # for key in self.link.keys(): + # new.link[key] = self.link[key] / other + # for key in self.node.keys(): + # new.node[key] = self.node[key] / other + # return new + # else: + # raise ValueError(f"operating on a results object requires both be {type(self)}") + + # def __pow__(self, exp, mod=None): + # new = type(self)() + # new.link = dict() + # new.node = dict() + # new.network_name = "{}[{}] ** {}".format(self.network_name, self.timestamp, exp) + # for key in self.link.keys(): + # new.link[key] = pow(self.link[key], exp, mod) + # for key in self.node.keys(): + # new.node[key] = pow(self.node[key], exp, mod) + # return new def _adjust_time(self, ts: int): """ @@ -231,10 +194,11 @@ def _adjust_time(self, ts: int): The number of seconds by which to adjust the result dataframe index """ ts = int(ts) - for key in self.link.keys(): - self.link[key].index += ts - for key in self.node.keys(): - self.node[key].index += ts + + for attr in self.data_attributes: + for key in self.__getattr__(attr).keys(): + self_dict = self.__getattr__(attr) + self_dict[key].index += ts def append(self, other): """ @@ -261,34 +225,75 @@ def append(self, other): if `other` is the wrong type """ - if not isinstance(other, SimulationResults): - raise ValueError("operating on a results object requires both to be SimulationResults") + if not isinstance(other, type(self)): + raise ValueError(f"operating on a results object requires both be {type(self)}") start_time = other.node["head"].index.values[0] keep = self.node["head"].index.values < start_time - for key in self.link.keys(): - if key in other.link: - t2 = pd.concat([self.link[key].loc[keep], other.link[key]]) - self.link[key] = t2 - else: - temp = other.link["flowrate"] * np.nan - t2 = pd.concat([self.link[key].loc[keep], temp]) - self.link[key] = t2 - for key in other.link.keys(): - if key not in self.link.keys(): - temp = self.link["flowrate"] * np.nan - t2 = pd.concat([temp.loc[keep], other.link[key]]) - self.link[key] = t2 - for key in self.node.keys(): - if key in other.node: - t2 = pd.concat([self.node[key].loc[keep], other.node[key]]) - self.node[key] = t2 - else: - temp = other.node["head"] * np.nan - t2 = pd.concat([self.node[key].loc[keep], temp]) - self.node[key] = t2 - for key in other.node.keys(): - if key not in self.node.keys(): - temp = self.node["head"] * np.nan - t2 = pd.concat([temp.loc[keep],other.node[key]]) - self.node[key] = t2 + for attr in self.data_attributes: + self_dict = self.__getattr__(attr) + other_dict = other.__getattr__(attr) + for key in self_dict.keys()+other_dict.keys(): + if key in self_dict.keys() and key in other_dict.keys(): + t2 = pd.concat([self_dict[key].loc[keep], other_dict[key]]) + self_dict[key] = t2 + elif key not in other_dict.keys(): + temp = other_dict[list(other_dict.keys())[0]] * np.nan + t2 = pd.concat([self_dict[key].loc[keep], temp]) + self_dict[key] = t2 + elif key not in self_dict.keys(): + temp = self_dict[list(self_dict.keys())[0]] * np.nan + t2 = pd.concat([temp.loc[keep], other_dict[key]]) + self_dict[key] = t2 + +class WasteWaterResults(ResultsBase): + data_attributes = ["attr1", "attr2"] + def __init__(self): + super(WasteWaterResults, self).__init__() + +class MSXResults(ResultsBase): + data_attributes = ["attr3", "attr4"] + def __init__(self): + super(WasteWaterResults, self).__init__() + +class HydraulicResults(ResultsBase): + """ + Water network simulation results class. + A small number of mathematical and statistical functions are also provided. + These functions are applied to all dataframes within the results object (or + between two results objects) by name, elementwise. + + Assuming ``A`` and ``B`` are both results objects that have the same time + indices for the results and which describe the same water network physical + model (i.e., have the same nodes and links), then the following functions + are defined: + + ================== ============================================================ + Example function Description + ------------------ ------------------------------------------------------------ + ``C = A + B`` Add the values from A and B for each property + ``C = A - B`` Subtract the property values in B from A + ``C = A / B`` Divide the property values in A by the values in B + ``C = A / n`` Divide the property values in A by n [int]; + note that this only makes sense if calculating an average + ``C = pow(A, p)`` Raise the property values in A to the p-th power; + note the syntax ``C = pow(A, p, mod)`` can also be used + ``C = abs(A)`` Take the absolute value of the property values in A + ``C = -A`` Take the negative of all property values in A + ``C = +A`` Take the positive of all property values in A + ``C = A.max()`` Get the maximum value for each property for node/link + ``C = A.min()`` Get the minimum value for each property for node/link + ``n = A.len()`` Get the number of timesteps within A + ``C = A.sqrt()`` Take the element-wise square root for all properties + ``C = A.sum()`` Take the sum of each property across time for each node/link + ================== ============================================================ + + As an example, to calculate the relative difference between the results of two + simulations, one could do: ``rel_dif = abs(A - B) / A`` (warning - this will operate + on link statuses as well, which may result in meaningless results for that + parameter). + + """ + data_attributes = ["link", "node"] + def __init__(self): + super(HydraulicResults, self).__init__() From b5e1d0fed60e2fba06bf7331aaebcb3c39b94a7e Mon Sep 17 00:00:00 2001 From: kbonney Date: Tue, 14 Nov 2023 09:50:24 -0500 Subject: [PATCH 08/21] refactoring changes for results name change --- wntr/epanet/io.py | 4 ++-- wntr/sim/__init__.py | 2 +- wntr/sim/core.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/wntr/epanet/io.py b/wntr/epanet/io.py index 27936ba3c..6d2506bc8 100644 --- a/wntr/epanet/io.py +++ b/wntr/epanet/io.py @@ -2511,7 +2511,7 @@ def __init__(self, result_types=None, network=False, energy=False, statistics=Fa self.chem_units = None self.inp_file = None self.report_file = None - self.results = wntr.sim.SimulationResults() + self.results = wntr.sim.HydraulicResults() if result_types is None: self.items = [ member for name, member in ResultType.__members__.items() ] else: @@ -2599,7 +2599,7 @@ def read(self, filename, convergence_error=False, darcy_weisbach=False, convert= object returns a WaterNetworkResults object """ - self.results = wntr.sim.SimulationResults() + self.results = wntr.sim.HydraulicResults() logger.debug('Read binary EPANET data from %s',filename) dt_str = 'u1' #.format(self.idlen) diff --git a/wntr/sim/__init__.py b/wntr/sim/__init__.py index 3c485b2c6..0f7f9405c 100644 --- a/wntr/sim/__init__.py +++ b/wntr/sim/__init__.py @@ -3,6 +3,6 @@ simulations using the water network model. """ from wntr.sim.core import WaterNetworkSimulator, WNTRSimulator -from wntr.sim.results import SimulationResults +from wntr.sim.results import HydraulicResults from wntr.sim.solvers import NewtonSolver from wntr.sim.epanet import EpanetSimulator \ No newline at end of file diff --git a/wntr/sim/core.py b/wntr/sim/core.py index 663b5e5a0..a0bf8d596 100644 --- a/wntr/sim/core.py +++ b/wntr/sim/core.py @@ -1228,7 +1228,7 @@ def run_sim(self, solver=NewtonSolver, backup_solver=None, solver_options=None, self._register_controls_with_observers() node_res, link_res = wntr.sim.hydraulics.initialize_results_dict(self._wn) - results = wntr.sim.results.SimulationResults() + results = wntr.sim.results.HydraulicResults() results.error_code = None results.time = [] results.network_name = self._wn.name From 0648d09a79b042919a79fab3afc99c0bd611b500 Mon Sep 17 00:00:00 2001 From: kbonney Date: Wed, 15 Nov 2023 09:23:53 -0500 Subject: [PATCH 09/21] Updates to results object. Removed base class but kept data_attributes for generality --- wntr/epanet/io.py | 4 +- wntr/sim/__init__.py | 2 +- wntr/sim/core.py | 2 +- wntr/sim/results.py | 254 +++++++++++++++++++------------------------ 4 files changed, 113 insertions(+), 149 deletions(-) diff --git a/wntr/epanet/io.py b/wntr/epanet/io.py index 6d2506bc8..27936ba3c 100644 --- a/wntr/epanet/io.py +++ b/wntr/epanet/io.py @@ -2511,7 +2511,7 @@ def __init__(self, result_types=None, network=False, energy=False, statistics=Fa self.chem_units = None self.inp_file = None self.report_file = None - self.results = wntr.sim.HydraulicResults() + self.results = wntr.sim.SimulationResults() if result_types is None: self.items = [ member for name, member in ResultType.__members__.items() ] else: @@ -2599,7 +2599,7 @@ def read(self, filename, convergence_error=False, darcy_weisbach=False, convert= object returns a WaterNetworkResults object """ - self.results = wntr.sim.HydraulicResults() + self.results = wntr.sim.SimulationResults() logger.debug('Read binary EPANET data from %s',filename) dt_str = 'u1' #.format(self.idlen) diff --git a/wntr/sim/__init__.py b/wntr/sim/__init__.py index 0f7f9405c..3c485b2c6 100644 --- a/wntr/sim/__init__.py +++ b/wntr/sim/__init__.py @@ -3,6 +3,6 @@ simulations using the water network model. """ from wntr.sim.core import WaterNetworkSimulator, WNTRSimulator -from wntr.sim.results import HydraulicResults +from wntr.sim.results import SimulationResults from wntr.sim.solvers import NewtonSolver from wntr.sim.epanet import EpanetSimulator \ No newline at end of file diff --git a/wntr/sim/core.py b/wntr/sim/core.py index a0bf8d596..663b5e5a0 100644 --- a/wntr/sim/core.py +++ b/wntr/sim/core.py @@ -1228,7 +1228,7 @@ def run_sim(self, solver=NewtonSolver, backup_solver=None, solver_options=None, self._register_controls_with_observers() node_res, link_res = wntr.sim.hydraulics.initialize_results_dict(self._wn) - results = wntr.sim.results.HydraulicResults() + results = wntr.sim.results.SimulationResults() results.error_code = None results.time = [] results.network_name = self._wn.name diff --git a/wntr/sim/results.py b/wntr/sim/results.py index 424fcc5b0..0ef592a39 100644 --- a/wntr/sim/results.py +++ b/wntr/sim/results.py @@ -4,18 +4,9 @@ import pandas as pd -class ResultsStatus(enum.IntEnum): - converged = 1 - error = 0 - - -class ResultsBase: - """Base class for results of various simulations carried out by WNTR. - The basic structure of a subclass is an object with one or more attributes - containing dictionaries of dataframes. - Subclasses are differentiated by defining the class attribute `data_attributes` - which determines the names of the attributes which will hold dictionaries of - DataFrames. +class SimulationResults: + """ + Water network simulation results class. A small number of mathematical and statistical functions are also provided. These functions are applied to all dataframes within the results object (or @@ -25,23 +16,49 @@ class ResultsBase: indices for the results and which describe the same water network physical model (i.e., have the same nodes and links), then the following functions are defined: + + ================== ============================================================ + Example function Description + ------------------ ------------------------------------------------------------ + ``C = A + B`` Add the values from A and B for each property + ``C = A - B`` Subtract the property values in B from A + ``C = A / B`` Divide the property values in A by the values in B + ``C = A / n`` Divide the property values in A by n [int]; + note that this only makes sense if calculating an average + ``C = pow(A, p)`` Raise the property values in A to the p-th power; + note the syntax ``C = pow(A, p, mod)`` can also be used + ``C = abs(A)`` Take the absolute value of the property values in A + ``C = -A`` Take the negative of all property values in A + ``C = +A`` Take the positive of all property values in A + ``C = A.max()`` Get the maximum value for each property for node/link + ``C = A.min()`` Get the minimum value for each property for node/link + ``C = A.sum()`` Take the sum of each property across time for each node/link + ================== ============================================================ + + As an example, to calculate the relative difference between the results of two + simulations, one could do: ``rel_dif = abs(A - B) / A`` (warning - this will operate + on link statuses as well, which may result in meaningless results for that + parameter). + """ + _data_attributes = ["link", "node"] def __init__(self): # Simulation time series self.timestamp = str(datetime.datetime.now()) self.network_name = None - for attr in self.data_attributes: + for attr in self._data_attributes: self.__setattr__(attr, dict()) + def __add__(self, other): - if not isinstance(other, type(self)): - raise ValueError(f"operating on a results object requires both be {type(self)}") - new = type(self.res)() + if not isinstance(other, SimulationResults): + raise ValueError(f"operating on a results object requires both be SimulationResults") + new = SimulationResults() new.network_name = "{}[{}] + {}[{}]".format( self.network_name, self.timestamp, other.network_name, other.timestamp ) - for attr in new.data_attributes: + for attr in new._data_attributes: for key in self.__getattr__(attr).keys(): if key in other.__getattr__(attr).keys(): self_dict = self.__getattr__(attr) @@ -51,13 +68,13 @@ def __add__(self, other): def __sub__(self, other): - if not isinstance(other, type(self)): - raise ValueError(f"operating on a results object requires both be {type(self)}") - new = type(self)() + if not isinstance(other, SimulationResults): + raise ValueError(f"operating on a results object requires both be SimulationResults") + new = SimulationResults() new.network_name = "{}[{}] - {}[{}]".format( self.network_name, self.timestamp, other.network_name, other.timestamp ) - for attr in new.data_attributes: + for attr in new._data_attributes: for key in self.__getattr__(attr).keys(): if key in other.__getattr__(attr).keys(): self_dict = self.__getattr__(attr) @@ -66,20 +83,20 @@ def __sub__(self, other): return new def __abs__(self): - new = type(self)() + new = SimulationResults() new.network_name = "|{}[{}]|".format(self.network_name, self.timestamp) - for attr in new.data_attributes: + for attr in new._data_attributes: for key in self.__getattr__(attr).keys(): self_dict = self.__getattr__(attr) self_dict[key] = abs(self_dict[key]) return new def __neg__(self): - new = type(self)() + new = SimulationResults() new.network_name = "-{}[{}]".format(self.network_name, self.timestamp) - for attr in new.data_attributes: + for attr in new._data_attributes: for key in self.__getattr__(attr).keys(): self_dict = self.__getattr__(attr) self_dict[key] = -self_dict[key] @@ -88,10 +105,10 @@ def __neg__(self): def min(self): """Min operates on each axis separately, therefore it needs to be a function. The built-in ``min`` function will not work.""" - new = type(self)() + new = SimulationResults() new.network_name = "min({}[{}])".format(self.network_name, self.timestamp) - for attr in new.data_attributes: + for attr in new._data_attributes: for key in self.__getattr__(attr).keys(): self_dict = self.__getattr__(attr) self_dict[key] = self_dict[key].min(axis=0) @@ -100,89 +117,75 @@ def min(self): def max(self): """Max operates on each axis separately, therefore it needs to be a function. The built-in ``max`` function will not work.""" - new = type(self)() + new = SimulationResults() new.network_name = "max({}[{}])".format(self.network_name, self.timestamp) - for attr in new.data_attributes: + for attr in new._data_attributes: for key in self.__getattr__(attr).keys(): self_dict = self.__getattr__(attr) self_dict[key] = self_dict[key].max(axis=0) return new - # def len(self): - # """This is not an iterator, but there is still a meaning to calling its length. - # However, this means that ``len`` must be a function called from the object, not - # the builtin function.""" - # for key in self.link.keys(): - # return len(self.link[key]) - - # def sqrt(self): - # """Element-wise square root of all values.""" - # new = type(self)() - # new.link = dict() - # new.node = dict() - # new.network_name = "sqrt({}[{}])".format(self.network_name, self.timestamp) - # for key in self.link.keys(): - # new.link[key] = np.sqrt(self.link[key]) - # for key in self.node.keys(): - # new.node[key] = np.sqrt(self.node[key]) - # return new - def sum(self): """Sum across time for each node/link for each property.""" - new = type(self)() + new = SimulationResults() new.network_name = "sum({}[{}])".format(self.network_name, self.timestamp) - for attr in new.data_attributes: + for attr in new._data_attributes: for key in self.__getattr__(attr).keys(): self_dict = self.__getattr__(attr) self_dict[key] = self_dict[key].sum(axis=0) return new def __pos__(self): - new = type(self)() + new = SimulationResults() new.network_name = "+{}[{}]".format(self.network_name, self.timestamp) - for attr in new.data_attributes: + for attr in new._data_attributes: for key in self.__getattr__(attr).keys(): self_dict = self.__getattr__(attr) self_dict[key] = +self_dict[key] return new - # def __truediv__(self, other): - # new = type(self)() - # new.link = dict() - # new.node = dict() - # if isinstance(other, type(self)): - # new.network_name = "{}[{}] / {}[{}]".format( - # self.network_name, self.timestamp, other.network_name, other.timestamp - # ) - # for key in self.link.keys(): - # if key in other.link: - # new.link[key] = self.link[key] / other.link[key] - # for key in self.node.keys(): - # if key in other.node: - # new.node[key] = self.node[key] / other.node[key] - # return new - # elif isinstance(other, int): - # new.network_name = "{}[{}] / {}".format(self.network_name, self.timestamp, other) - # for key in self.link.keys(): - # new.link[key] = self.link[key] / other - # for key in self.node.keys(): - # new.node[key] = self.node[key] / other - # return new - # else: - # raise ValueError(f"operating on a results object requires both be {type(self)}") + def __truediv__(self, other): + new = SimulationResults() + + if isinstance(other, SimulationResults): + new.network_name = "{}[{}] / {}[{}]".format( + self.network_name, self.timestamp, other.network_name, other.timestamp + ) + for attr in new._data_attributes: + for key in self.__getattr__(attr).keys(): + if key in other.__getattr__(attr).keys(): + self_dict = self.__getattr__(attr) + other_dict = other.__getattr__(attr) + new.__getattr__(attr)[key] = self_dict[key] / other_dict[key] + return new + + + elif isinstance(other, int): + new.network_name = "{}[{}] / {}".format(self.network_name, self.timestamp, other) + for key in self.link.keys(): + new.link[key] = self.link[key] / other + for key in self.node.keys(): + new.node[key] = self.node[key] / other + + for attr in new._data_attributes: + for key in self.__getattr__(attr).keys(): + self_dict = self.__getattr__(attr) + new.__getattr__(attr)[key] = self_dict[key] / other + return new + + else: + raise ValueError(f"using / on a results object requires the divisor to be SimulationResults or int") - # def __pow__(self, exp, mod=None): - # new = type(self)() - # new.link = dict() - # new.node = dict() - # new.network_name = "{}[{}] ** {}".format(self.network_name, self.timestamp, exp) - # for key in self.link.keys(): - # new.link[key] = pow(self.link[key], exp, mod) - # for key in self.node.keys(): - # new.node[key] = pow(self.node[key], exp, mod) - # return new + def __pow__(self, exp, mod=None): + new = SimulationResults() + new.network_name = "{}[{}] ** {}".format(self.network_name, self.timestamp, exp) + for attr in new._data_attributes: + for key in self.__getattr__(attr).keys(): + self_dict = self.__getattr__(attr) + self_dict[key] = pow(self_dict[key], exp, mod) + return new def _adjust_time(self, ts: int): """ @@ -192,13 +195,18 @@ def _adjust_time(self, ts: int): ---------- ts : int The number of seconds by which to adjust the result dataframe index + + Returns + ------- + self: SimulationResults """ ts = int(ts) - for attr in self.data_attributes: + for attr in self._data_attributes: for key in self.__getattr__(attr).keys(): self_dict = self.__getattr__(attr) self_dict[key].index += ts + return self def append(self, other): """ @@ -218,6 +226,10 @@ def append(self, other): ---------- other : SimulationResults Results objects from a different, and subsequent, simulation. + + Returns + ------- + self : SimulationResults Raises ------ @@ -225,11 +237,12 @@ def append(self, other): if `other` is the wrong type """ - if not isinstance(other, type(self)): - raise ValueError(f"operating on a results object requires both be {type(self)}") + if not isinstance(other, SimulationResults): + raise ValueError(f"operating on a results object requires both be SimulationResults") + #NOTE: Below two lines assume that results object has the node attribute and "head" key start_time = other.node["head"].index.values[0] keep = self.node["head"].index.values < start_time - for attr in self.data_attributes: + for attr in self._data_attributes: self_dict = self.__getattr__(attr) other_dict = other.__getattr__(attr) for key in self_dict.keys()+other_dict.keys(): @@ -237,63 +250,14 @@ def append(self, other): t2 = pd.concat([self_dict[key].loc[keep], other_dict[key]]) self_dict[key] = t2 elif key not in other_dict.keys(): - temp = other_dict[list(other_dict.keys())[0]] * np.nan + # grab first key to get the shape of the DF + first_key = list(other_dict.keys())[0] + temp = other_dict[first_key] * np.nan t2 = pd.concat([self_dict[key].loc[keep], temp]) self_dict[key] = t2 elif key not in self_dict.keys(): - temp = self_dict[list(self_dict.keys())[0]] * np.nan + first_key = list(self_dict.keys())[0] + temp = self_dict[first_key] * np.nan t2 = pd.concat([temp.loc[keep], other_dict[key]]) self_dict[key] = t2 - -class WasteWaterResults(ResultsBase): - data_attributes = ["attr1", "attr2"] - def __init__(self): - super(WasteWaterResults, self).__init__() - -class MSXResults(ResultsBase): - data_attributes = ["attr3", "attr4"] - def __init__(self): - super(WasteWaterResults, self).__init__() - -class HydraulicResults(ResultsBase): - """ - Water network simulation results class. - - A small number of mathematical and statistical functions are also provided. - These functions are applied to all dataframes within the results object (or - between two results objects) by name, elementwise. - - Assuming ``A`` and ``B`` are both results objects that have the same time - indices for the results and which describe the same water network physical - model (i.e., have the same nodes and links), then the following functions - are defined: - - ================== ============================================================ - Example function Description - ------------------ ------------------------------------------------------------ - ``C = A + B`` Add the values from A and B for each property - ``C = A - B`` Subtract the property values in B from A - ``C = A / B`` Divide the property values in A by the values in B - ``C = A / n`` Divide the property values in A by n [int]; - note that this only makes sense if calculating an average - ``C = pow(A, p)`` Raise the property values in A to the p-th power; - note the syntax ``C = pow(A, p, mod)`` can also be used - ``C = abs(A)`` Take the absolute value of the property values in A - ``C = -A`` Take the negative of all property values in A - ``C = +A`` Take the positive of all property values in A - ``C = A.max()`` Get the maximum value for each property for node/link - ``C = A.min()`` Get the minimum value for each property for node/link - ``n = A.len()`` Get the number of timesteps within A - ``C = A.sqrt()`` Take the element-wise square root for all properties - ``C = A.sum()`` Take the sum of each property across time for each node/link - ================== ============================================================ - - As an example, to calculate the relative difference between the results of two - simulations, one could do: ``rel_dif = abs(A - B) / A`` (warning - this will operate - on link statuses as well, which may result in meaningless results for that - parameter). - - """ - data_attributes = ["link", "node"] - def __init__(self): - super(HydraulicResults, self).__init__() + return self From fa76d592da42e016480e2797c2147fae40791b5b Mon Sep 17 00:00:00 2001 From: kbonney Date: Fri, 17 Nov 2023 16:34:16 -0500 Subject: [PATCH 10/21] updating results object --- wntr/sim/results.py | 78 ++++++++++++++++++++++----------------------- 1 file changed, 39 insertions(+), 39 deletions(-) diff --git a/wntr/sim/results.py b/wntr/sim/results.py index 0ef592a39..bff280080 100644 --- a/wntr/sim/results.py +++ b/wntr/sim/results.py @@ -47,8 +47,7 @@ def __init__(self): self.timestamp = str(datetime.datetime.now()) self.network_name = None for attr in self._data_attributes: - self.__setattr__(attr, dict()) - + setattr(self, attr, dict()) def __add__(self, other): if not isinstance(other, SimulationResults): @@ -59,11 +58,11 @@ def __add__(self, other): ) for attr in new._data_attributes: - for key in self.__getattr__(attr).keys(): - if key in other.__getattr__(attr).keys(): - self_dict = self.__getattr__(attr) - other_dict = other.__getattr__(attr) - new.__getattr__(attr)[key] = self_dict[key] + other_dict[key] + for key in getattr(self, attr).keys(): + if key in getattr(other, attr).keys(): + self_dict = getattr(self, attr) + other_dict = getattr(other, attr) + getattr(new, attr)[key] = self_dict[key] + other_dict[key] return new @@ -75,11 +74,11 @@ def __sub__(self, other): self.network_name, self.timestamp, other.network_name, other.timestamp ) for attr in new._data_attributes: - for key in self.__getattr__(attr).keys(): - if key in other.__getattr__(attr).keys(): - self_dict = self.__getattr__(attr) - other_dict = other.__getattr__(attr) - new.__getattr__(attr)[key] = self_dict[key] - other_dict[key] + for key in getattr(self, attr).keys(): + if key in getattr(other, attr).keys(): + self_dict = getattr(self, attr) + other_dict = getattr(other, attr) + getattr(new, attr)[key] = self_dict[key] - other_dict[key] return new def __abs__(self): @@ -87,8 +86,8 @@ def __abs__(self): new.network_name = "|{}[{}]|".format(self.network_name, self.timestamp) for attr in new._data_attributes: - for key in self.__getattr__(attr).keys(): - self_dict = self.__getattr__(attr) + for key in getattr(self, attr).keys(): + self_dict = getattr(self, attr) self_dict[key] = abs(self_dict[key]) return new @@ -97,8 +96,8 @@ def __neg__(self): new.network_name = "-{}[{}]".format(self.network_name, self.timestamp) for attr in new._data_attributes: - for key in self.__getattr__(attr).keys(): - self_dict = self.__getattr__(attr) + for key in getattr(self, attr).keys(): + self_dict = getattr(self, attr) self_dict[key] = -self_dict[key] return new @@ -109,8 +108,8 @@ def min(self): new.network_name = "min({}[{}])".format(self.network_name, self.timestamp) for attr in new._data_attributes: - for key in self.__getattr__(attr).keys(): - self_dict = self.__getattr__(attr) + for key in getattr(self, attr).keys(): + self_dict = getattr(self, attr) self_dict[key] = self_dict[key].min(axis=0) return new @@ -121,8 +120,8 @@ def max(self): new.network_name = "max({}[{}])".format(self.network_name, self.timestamp) for attr in new._data_attributes: - for key in self.__getattr__(attr).keys(): - self_dict = self.__getattr__(attr) + for key in getattr(self, attr).keys(): + self_dict = getattr(self, attr) self_dict[key] = self_dict[key].max(axis=0) return new @@ -132,8 +131,8 @@ def sum(self): new.network_name = "sum({}[{}])".format(self.network_name, self.timestamp) for attr in new._data_attributes: - for key in self.__getattr__(attr).keys(): - self_dict = self.__getattr__(attr) + for key in getattr(self, attr).keys(): + self_dict = getattr(self, attr) self_dict[key] = self_dict[key].sum(axis=0) return new @@ -141,8 +140,8 @@ def __pos__(self): new = SimulationResults() new.network_name = "+{}[{}]".format(self.network_name, self.timestamp) for attr in new._data_attributes: - for key in self.__getattr__(attr).keys(): - self_dict = self.__getattr__(attr) + for key in getattr(self, attr).keys(): + self_dict = getattr(self, attr) self_dict[key] = +self_dict[key] return new @@ -154,11 +153,11 @@ def __truediv__(self, other): self.network_name, self.timestamp, other.network_name, other.timestamp ) for attr in new._data_attributes: - for key in self.__getattr__(attr).keys(): - if key in other.__getattr__(attr).keys(): - self_dict = self.__getattr__(attr) - other_dict = other.__getattr__(attr) - new.__getattr__(attr)[key] = self_dict[key] / other_dict[key] + for key in getattr(self, attr).keys(): + if key in getattr(other, attr).keys(): + self_dict = getattr(self, attr) + other_dict = getattr(other, attr) + getattr(new, attr)[key] = self_dict[key] / other_dict[key] return new @@ -170,9 +169,9 @@ def __truediv__(self, other): new.node[key] = self.node[key] / other for attr in new._data_attributes: - for key in self.__getattr__(attr).keys(): - self_dict = self.__getattr__(attr) - new.__getattr__(attr)[key] = self_dict[key] / other + for key in getattr(self, attr).keys(): + self_dict = getattr(self, attr) + getattr(new, attr)[key] = self_dict[key] / other return new else: @@ -182,8 +181,8 @@ def __pow__(self, exp, mod=None): new = SimulationResults() new.network_name = "{}[{}] ** {}".format(self.network_name, self.timestamp, exp) for attr in new._data_attributes: - for key in self.__getattr__(attr).keys(): - self_dict = self.__getattr__(attr) + for key in getattr(self, attr).keys(): + self_dict = getattr(self, attr) self_dict[key] = pow(self_dict[key], exp, mod) return new @@ -203,8 +202,8 @@ def _adjust_time(self, ts: int): ts = int(ts) for attr in self._data_attributes: - for key in self.__getattr__(attr).keys(): - self_dict = self.__getattr__(attr) + for key in getattr(self, attr).keys(): + self_dict = getattr(self, attr) self_dict[key].index += ts return self @@ -243,9 +242,10 @@ def append(self, other): start_time = other.node["head"].index.values[0] keep = self.node["head"].index.values < start_time for attr in self._data_attributes: - self_dict = self.__getattr__(attr) - other_dict = other.__getattr__(attr) - for key in self_dict.keys()+other_dict.keys(): + self_dict = getattr(self, attr) + other_dict = getattr(other, attr) + all_keys = list(self_dict.keys()) + list(other_dict.keys()) + for key in all_keys: if key in self_dict.keys() and key in other_dict.keys(): t2 = pd.concat([self_dict[key].loc[keep], other_dict[key]]) self_dict[key] = t2 From 6930db97636f096785b6a7688dbd102854bc7843 Mon Sep 17 00:00:00 2001 From: kbonney Date: Fri, 17 Nov 2023 17:47:19 -0500 Subject: [PATCH 11/21] moved stopstart tests to own file --- wntr/tests/test_multiple_simulations.py | 174 ---------------------- wntr/tests/test_stop_start_sim.py | 182 ++++++++++++++++++++++++ 2 files changed, 182 insertions(+), 174 deletions(-) create mode 100644 wntr/tests/test_stop_start_sim.py diff --git a/wntr/tests/test_multiple_simulations.py b/wntr/tests/test_multiple_simulations.py index c266c38ab..cd290f9c4 100644 --- a/wntr/tests/test_multiple_simulations.py +++ b/wntr/tests/test_multiple_simulations.py @@ -81,180 +81,6 @@ def test_node_pressure(self): ) -class TestStopStartSim(unittest.TestCase): - @classmethod - def setUpClass(self): - import wntr - - self.wntr = wntr - - inp_file = join(ex_datadir, "Net3.inp") - - parser = self.wntr.epanet.InpFile() - self.wn = parser.read(inp_file) - self.wn.options.time.hydraulic_timestep = 3600 - self.wn.options.time.duration = 24 * 3600 - sim = self.wntr.sim.WNTRSimulator(self.wn) - self.res1 = sim.run_sim(solver_options={"TOL": 1e-8}) - - parser = self.wntr.epanet.InpFile() - self.wn = parser.read(inp_file) - self.wn.options.time.hydraulic_timestep = 3600 - self.wn.options.time.duration = 10 * 3600 - sim = self.wntr.sim.WNTRSimulator(self.wn) - self.res2 = sim.run_sim(solver_options={'TOL':1e-8}) - self.wn.set_initial_conditions(self.res2) - self.wn.options.time.pattern_start = self.wn.options.time.pattern_start + 10 * 3600 - self.wn.options.time.duration = 14 * 3600 - sim = self.wntr.sim.WNTRSimulator(self.wn) - self.res3 = sim.run_sim(solver_options={'TOL':1e-8}) - self.res3._adjust_time(10*3600) - # node_res = {} - # link_res = {} - # for key in self.res2.node.keys(): - # node_res[key] = pd.concat([self.res2.node[key],self.res3.node[key]],axis=0) - # for key in self.res2.link.keys(): - # link_res[key] = pd.concat([self.res2.link[key],self.res3.link[key]],axis=0) - # self.res2.node = node_res - # self.res2.link = link_res - self.res2.append(self.res3) - self.res4 = abs(self.res1 - self.res2).max() - - @classmethod - def tearDownClass(self): - pass - - def test_link_flowrate(self): - for link_name, link in self.wn.links(): - print(link_name, self.res1.link["flowrate"].loc[:,link_name].describe(), abs(self.res1 - self.res2).link["flowrate"].loc[:,link_name].describe()) - self.assertLess( - self.res4.link["flowrate"].at[link_name], - tolerances["flowrate"], - ) - - def test_link_velocity(self): - for link_name, link in self.wn.links(): - self.assertLess( - self.res4.link["velocity"].at[link_name], - tolerances["velocity"] - ) - - def test_node_demand(self): - for node_name, node in self.wn.nodes(): - self.assertLess( - self.res4.node["demand"].at[node_name], - tolerances["demand"] - ) - - def test_node_head(self): - for node_name, node in self.wn.nodes(): - self.assertLess( - self.res4.node["head"].at[node_name], - tolerances["head"] - ) - - def test_node_pressure(self): - for node_name, node in self.wn.nodes(): - self.assertLess( - self.res4.node["pressure"].at[node_name], - tolerances["pressure"] - ) - - - -class TestStopStartEpanetSim(unittest.TestCase): - @classmethod - def setUpClass(self): - import wntr - - self.wntr = wntr - - inp_file = join(ex_datadir, "Net3.inp") - - parser = self.wntr.epanet.InpFile() - self.wn = parser.read(inp_file) - self.wn.options.time.hydraulic_timestep = 3600 - self.wn.options.time.duration = 24 * 3600 - sim = self.wntr.sim.EpanetSimulator(self.wn) - self.res1 = sim.run_sim() - - parser = self.wntr.epanet.InpFile() - self.wn2 = parser.read(inp_file) - self.wn2.options.time.hydraulic_timestep = 3600 - self.wn2.options.time.duration = 11 * 3600 - sim = self.wntr.sim.EpanetSimulator(self.wn2) - self.res2 = sim.run_sim() - self.wn2.set_initial_conditions(self.res2) - self.wn2.options.time.pattern_start = self.wn2.options.time.pattern_start + 11 * 3600 - self.wn2.options.time.duration = 13 * 3600 - sim = self.wntr.sim.EpanetSimulator(self.wn2) - self.res3 = sim.run_sim() - self.res3._adjust_time(11*3600) - - self.res2.append(self.res3) - self.res4 = abs(self.res1 - self.res2).max() - - @classmethod - def tearDownClass(self): - pass - - def test_link_flowrate(self): - for link_name, link in self.wn.links(): - self.assertLess( - self.res4.link["flowrate"].at[link_name], - tolerances["flowrate"], - ) - - def test_link_velocity(self): - for link_name, link in self.wn.links(): - self.assertLess( - self.res4.link["velocity"].at[link_name], - tolerances["velocity"] - ) - - def test_link_headloss(self): - for link_name, link in self.wn.links(): - self.assertLess( - self.res4.link["headloss"].at[link_name], - tolerances["headloss"] - ) - - def test_link_status(self): - for link_name, link in self.wn.links(): - self.assertLess( - self.res4.link["status"].at[link_name], - tolerances["status"] - ) - - def test_link_setting(self): - for link_name, link in self.wn.links(): - self.assertLess( - self.res4.link["setting"].at[link_name], - tolerances["setting"] - ) - - def test_node_demand(self): - for node_name, node in self.wn.nodes(): - self.assertLess( - self.res4.node["demand"].at[node_name], - tolerances["demand"] - ) - - def test_node_head(self): - for node_name, node in self.wn.nodes(): - self.assertLess( - self.res4.node["head"].at[node_name], - tolerances["head"] - ) - - def test_node_pressure(self): - for node_name, node in self.wn.nodes(): - self.assertLess( - self.res4.node["pressure"].at[node_name], - tolerances["pressure"] - ) - - class TestPickle(unittest.TestCase): @classmethod def setUpClass(self): diff --git a/wntr/tests/test_stop_start_sim.py b/wntr/tests/test_stop_start_sim.py new file mode 100644 index 000000000..3017752c9 --- /dev/null +++ b/wntr/tests/test_stop_start_sim.py @@ -0,0 +1,182 @@ +import unittest +from os.path import abspath, dirname, join +import wntr + + +testdir = dirname(abspath(str(__file__))) +test_datadir = join(testdir, "networks_for_testing") +ex_datadir = join(testdir, "..", "..", "examples", "networks") + + +tolerances = { + "flowrate": 1.0e-5, # 10 mL/s + "velocity": 1.0e-2, # 0.01 m/s + "demand": 1.0e-5, # 10 mL/s + "head": 1.0e-2, # 0.01 m + "pressure": 1.0e-2, # 0.01 m H2O + "headloss": 1.0e-2, # 0.01 + "status": 0.5, # i.e., 0 since this is integer + "setting": 1.0e-2, # 0.01 +} + +class TestStopStartSim(unittest.TestCase): + @classmethod + def setUpClass(self): + inp_file = join(ex_datadir, "Net3.inp") + simulator = wntr.sim.WNTRSimulator + + # straight 24 hour simulation + self.wn = wntr.network.WaterNetworkModel(inp_file) + self.wn.options.time.hydraulic_timestep = 3600 + self.wn.options.time.duration = 24 * 3600 + sim = simulator(self.wn) + self.res1 = sim.run_sim(solver_options={"TOL": 1e-8}) + + # 24 hour simulation done in one 10 hour chunk and one 14 hour chunk + self.wn = wntr.network.WaterNetworkModel(inp_file) + self.wn.options.time.hydraulic_timestep = 3600 + self.wn.options.time.duration = 10 * 3600 + sim = simulator(self.wn) + self.res2 = sim.run_sim(solver_options={'TOL':1e-8}) + self.wn.set_initial_conditions(self.res2) + self.wn.options.time.pattern_start = self.wn.options.time.pattern_start + 10 * 3600 + self.wn.options.time.duration = 14 * 3600 + sim = simulator(self.wn) + self.res3 = sim.run_sim(solver_options={'TOL':1e-8}) + self.res3._adjust_time(10*3600) + self.res2.append(self.res3) + self.res4 = abs(self.res1 - self.res2).max() + + @classmethod + def tearDownClass(self): + pass + + def test_link_flowrate(self): + for link_name, link in self.wn.links(): + print(link_name, self.res1.link["flowrate"].loc[:,link_name].describe(), abs(self.res1 - self.res2).link["flowrate"].loc[:,link_name].describe()) + self.assertLess( + self.res4.link["flowrate"].at[link_name], + tolerances["flowrate"], + ) + + def test_link_velocity(self): + for link_name, link in self.wn.links(): + self.assertLess( + self.res4.link["velocity"].at[link_name], + tolerances["velocity"] + ) + + def test_node_demand(self): + for node_name, node in self.wn.nodes(): + self.assertLess( + self.res4.node["demand"].at[node_name], + tolerances["demand"] + ) + + def test_node_head(self): + for node_name, node in self.wn.nodes(): + self.assertLess( + self.res4.node["head"].at[node_name], + tolerances["head"] + ) + + def test_node_pressure(self): + for node_name, node in self.wn.nodes(): + self.assertLess( + self.res4.node["pressure"].at[node_name], + tolerances["pressure"] + ) + + + +class TestStopStartEpanetSim(unittest.TestCase): + @classmethod + def setUpClass(self): + inp_file = join(ex_datadir, "Net3.inp") + + # straight 24 hour simulation + self.wn = wntr.network.WaterNetworkModel(inp_file) + self.wn.options.time.hydraulic_timestep = 3600 + self.wn.options.time.duration = 24 * 3600 + sim = wntr.sim.EpanetSimulator(self.wn) + self.res1 = sim.run_sim() + + # 24 hour simulation done in one 10 hour chunk and one 14 hour chunk + self.wn = wntr.network.WaterNetworkModel(inp_file) + self.wn.options.time.hydraulic_timestep = 3600 + self.wn.options.time.duration = 11 * 3600 + sim = wntr.sim.EpanetSimulator(self.wn) + self.res2 = sim.run_sim() + self.wn.set_initial_conditions(self.res2) + self.wn.options.time.pattern_start = self.wn.options.time.pattern_start + 11 * 3600 + self.wn.options.time.duration = 13 * 3600 + sim = wntr.sim.EpanetSimulator(self.wn) + self.res3 = sim.run_sim() + self.res3._adjust_time(11*3600) + + self.res2.append(self.res3) + self.res4 = abs(self.res1 - self.res2).max() + + @classmethod + def tearDownClass(self): + pass + + def test_link_flowrate(self): + for link_name, link in self.wn.links(): + self.assertLess( + self.res4.link["flowrate"].at[link_name], + tolerances["flowrate"], + ) + + def test_link_velocity(self): + for link_name, link in self.wn.links(): + self.assertLess( + self.res4.link["velocity"].at[link_name], + tolerances["velocity"] + ) + + def test_link_headloss(self): + for link_name, link in self.wn.links(): + self.assertLess( + self.res4.link["headloss"].at[link_name], + tolerances["headloss"] + ) + + def test_link_status(self): + for link_name, link in self.wn.links(): + self.assertLess( + self.res4.link["status"].at[link_name], + tolerances["status"] + ) + + def test_link_setting(self): + for link_name, link in self.wn.links(): + self.assertLess( + self.res4.link["setting"].at[link_name], + tolerances["setting"] + ) + + def test_node_demand(self): + for node_name, node in self.wn.nodes(): + self.assertLess( + self.res4.node["demand"].at[node_name], + tolerances["demand"] + ) + + def test_node_head(self): + for node_name, node in self.wn.nodes(): + self.assertLess( + self.res4.node["head"].at[node_name], + tolerances["head"] + ) + + def test_node_pressure(self): + for node_name, node in self.wn.nodes(): + self.assertLess( + self.res4.node["pressure"].at[node_name], + tolerances["pressure"] + ) + + +if __name__ == "__main__": + unittest.main() \ No newline at end of file From c136e00c7605676ea9ed0c5100d08db2cc79672a Mon Sep 17 00:00:00 2001 From: kbonney Date: Fri, 17 Nov 2023 17:47:31 -0500 Subject: [PATCH 12/21] fixed bugs and simplified append method --- wntr/sim/results.py | 50 +++++++++++++++++++++++++++------------------ 1 file changed, 30 insertions(+), 20 deletions(-) diff --git a/wntr/sim/results.py b/wntr/sim/results.py index bff280080..9cf1c723b 100644 --- a/wntr/sim/results.py +++ b/wntr/sim/results.py @@ -88,7 +88,8 @@ def __abs__(self): for attr in new._data_attributes: for key in getattr(self, attr).keys(): self_dict = getattr(self, attr) - self_dict[key] = abs(self_dict[key]) + new_dict = getattr(new, attr) + new_dict[key] = abs(self_dict[key]) return new def __neg__(self): @@ -98,7 +99,8 @@ def __neg__(self): for attr in new._data_attributes: for key in getattr(self, attr).keys(): self_dict = getattr(self, attr) - self_dict[key] = -self_dict[key] + new_dict = getattr(new, attr) + new_dict[key] = -self_dict[key] return new def min(self): @@ -106,11 +108,11 @@ def min(self): The built-in ``min`` function will not work.""" new = SimulationResults() new.network_name = "min({}[{}])".format(self.network_name, self.timestamp) - for attr in new._data_attributes: for key in getattr(self, attr).keys(): self_dict = getattr(self, attr) - self_dict[key] = self_dict[key].min(axis=0) + new_dict = getattr(new, attr) + new_dict[key] = self_dict[key].min(axis=0) return new def max(self): @@ -122,7 +124,8 @@ def max(self): for attr in new._data_attributes: for key in getattr(self, attr).keys(): self_dict = getattr(self, attr) - self_dict[key] = self_dict[key].max(axis=0) + new_dict = getattr(new, attr) + new_dict[key] = self_dict[key].max(axis=0) return new def sum(self): @@ -133,7 +136,8 @@ def sum(self): for attr in new._data_attributes: for key in getattr(self, attr).keys(): self_dict = getattr(self, attr) - self_dict[key] = self_dict[key].sum(axis=0) + new_dict = getattr(new, attr) + new_dict[key] = self_dict[key].sum(axis=0) return new def __pos__(self): @@ -142,7 +146,8 @@ def __pos__(self): for attr in new._data_attributes: for key in getattr(self, attr).keys(): self_dict = getattr(self, attr) - self_dict[key] = +self_dict[key] + new_dict = getattr(new, attr) + new_dict[key] = +self_dict[key] return new def __truediv__(self, other): @@ -239,25 +244,30 @@ def append(self, other): if not isinstance(other, SimulationResults): raise ValueError(f"operating on a results object requires both be SimulationResults") #NOTE: Below two lines assume that results object has the node attribute and "head" key - start_time = other.node["head"].index.values[0] - keep = self.node["head"].index.values < start_time + for attr in self._data_attributes: self_dict = getattr(self, attr) other_dict = getattr(other, attr) + + # query for any dataframe to get shape of df in each results object + self_attr_dataframe = list(self_dict.values())[0] + other_attr_dataframe = list(other_dict.values())[0] + + all_keys = list(self_dict.keys()) + list(other_dict.keys()) for key in all_keys: + self_df = self_dict[key] + other_df = other_dict[key] + overlap = self_df.index.intersection(other_df.index) if key in self_dict.keys() and key in other_dict.keys(): - t2 = pd.concat([self_dict[key].loc[keep], other_dict[key]]) - self_dict[key] = t2 + t2 = pd.concat([self_df[~self_df.index.isin(overlap)], other_df]) + self_df = t2 elif key not in other_dict.keys(): - # grab first key to get the shape of the DF - first_key = list(other_dict.keys())[0] - temp = other_dict[first_key] * np.nan - t2 = pd.concat([self_dict[key].loc[keep], temp]) - self_dict[key] = t2 + temp = other_attr_dataframe * np.nan + t2 = pd.concat([self_df[~self_df.index.isin(overlap)], temp]) + self_df = t2 elif key not in self_dict.keys(): - first_key = list(self_dict.keys())[0] - temp = self_dict[first_key] * np.nan - t2 = pd.concat([temp.loc[keep], other_dict[key]]) - self_dict[key] = t2 + temp = self_attr_dataframe * np.nan + t2 = pd.concat([temp[~temp.index.isin(overlap)], other_df]) + self_df = t2 return self From f3c5e8063a3b3875f387fc0d1094d2396739f283 Mon Sep 17 00:00:00 2001 From: kbonney Date: Fri, 17 Nov 2023 17:47:40 -0500 Subject: [PATCH 13/21] beginning a test file for results object --- wntr/tests/test_results.py | 76 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 76 insertions(+) create mode 100644 wntr/tests/test_results.py diff --git a/wntr/tests/test_results.py b/wntr/tests/test_results.py new file mode 100644 index 000000000..1d58a1e40 --- /dev/null +++ b/wntr/tests/test_results.py @@ -0,0 +1,76 @@ +# These tests run a demand-driven simulation with both WNTR and Epanet and compare the results for the example networks +import copy +import unittest +from os.path import abspath, dirname, join + +import pandas as pd + +testdir = dirname(abspath(str(__file__))) +test_datadir = join(testdir, "networks_for_testing") +ex_datadir = join(testdir, "..", "..", "examples", "networks") + +tolerances = { + "flowrate": 1.0e-5, # 10 mL/s + "velocity": 1.0e-2, # 0.01 m/s + "demand": 1.0e-5, # 10 mL/s + "head": 1.0e-2, # 0.01 m + "pressure": 1.0e-2, # 0.01 m H2O + "headloss": 1.0e-2, # 0.01 + "status": 0.5, # i.e., 0 since this is integer + "setting": 1.0e-2, # 0.01 +} + +class TestResultsOperations(unittest.TestCase): + @classmethod + def setUpClass(self): + import wntr + + self.wntr = wntr + self.tol = 1e-8 + self.ts = 3600 + + inp_file = join(ex_datadir, "Net3.inp") + self.wn = self.wntr.network.WaterNetworkModel(inp_file) + self.wn.options.time.hydraulic_timestep = 3600 + self.wn.options.time.duration = 4 * 3600 + + sim = self.wntr.sim.WNTRSimulator(self.wn) + self.res = sim.run_sim(solver_options={"TOL": self.tol}) + + @classmethod + def tearDownClass(self): + pass + + def test_adjust_time(self): + res_copy = copy.deepcopy(self.res) + res_copy._adjust_time(self.ts) + + for key in self.res.link.keys(): + assert self.res.link[key].index + self.ts == res_copy.node[key].index + for key in self.res.node.keys(): + assert self.res.node[key].index + self.ts == res_copy.node[key].index + + def test_append(self): + res_copy1 = copy.deepcopy(self.res) + res_copy2 = copy.deepcopy(self.res) + res_copy2._adjust_time(self.wn.options.time.duration) + res_copy1.append(res_copy2) + pass + + def test_arithmetic(self): + added_res = self.res + self.res + subtracted_res = self.res - self.res + divided_res = self.res / self.res + int_divided_res = self.res / 2 + pow_res = pow(self.res, 1/2) + abs_res = abs(self.res) + neg_res = -self.res + pos_res = +self.res + max_res = self.res.max() + min_res = self.res.min() + summed_res = self.res.sum() + foo + + +if __name__ == "__main__": + unittest.main() From 2346d00a4e6db89c6121949e6adfd5dd8b594714 Mon Sep 17 00:00:00 2001 From: kbonney Date: Mon, 20 Nov 2023 11:04:42 -0500 Subject: [PATCH 14/21] separating tests into more descriptive file names. simplifying node/link comparisons --- wntr/tests/test_multiple_simulations.py | 164 ------------------------ wntr/tests/test_pickle_reset.py | 74 +++++++++++ wntr/tests/test_reset_initial_values.py | 58 +++++++++ wntr/tests/test_stop_start_sim.py | 127 +++++------------- 4 files changed, 161 insertions(+), 262 deletions(-) delete mode 100644 wntr/tests/test_multiple_simulations.py create mode 100644 wntr/tests/test_pickle_reset.py create mode 100644 wntr/tests/test_reset_initial_values.py diff --git a/wntr/tests/test_multiple_simulations.py b/wntr/tests/test_multiple_simulations.py deleted file mode 100644 index cd290f9c4..000000000 --- a/wntr/tests/test_multiple_simulations.py +++ /dev/null @@ -1,164 +0,0 @@ -# These tests run a demand-driven simulation with both WNTR and Epanet and compare the results for the example networks -import pickle -import unittest -from os.path import abspath, dirname, join - -import pandas as pd - -testdir = dirname(abspath(str(__file__))) -test_datadir = join(testdir, "networks_for_testing") -ex_datadir = join(testdir, "..", "..", "examples", "networks") - -tolerances = { - "flowrate": 1.0e-5, # 10 mL/s - "velocity": 1.0e-2, # 0.01 m/s - "demand": 1.0e-5, # 10 mL/s - "head": 1.0e-2, # 0.01 m - "pressure": 1.0e-2, # 0.01 m H2O - "headloss": 1.0e-2, # 0.01 - "status": 0.5, # i.e., 0 since this is integer - "setting": 1.0e-2, # 0.01 -} - -class TestResetInitialValues(unittest.TestCase): - @classmethod - def setUpClass(self): - import wntr - - self.wntr = wntr - self.tol = 1e-8 - - inp_file = join(ex_datadir, "Net3.inp") - self.wn = self.wntr.network.WaterNetworkModel(inp_file) - self.wn.options.time.hydraulic_timestep = 3600 - self.wn.options.time.duration = 24 * 3600 - - sim = self.wntr.sim.WNTRSimulator(self.wn) - self.res1 = sim.run_sim(solver_options={"TOL": 1e-8}) - - self.wn.reset_initial_values() - self.res2 = sim.run_sim(solver_options={"TOL": 1e-8}) - - self.res4 = abs(self.res1 - self.res2).max() - - @classmethod - def tearDownClass(self): - pass - - def test_link_flowrate(self): - for link_name, link in self.wn.links(): - self.assertLess( - self.res4.link["flowrate"].at[link_name], - tolerances["flowrate"], - ) - - def test_link_velocity(self): - for link_name, link in self.wn.links(): - self.assertLess( - self.res4.link["velocity"].at[link_name], - tolerances["velocity"] - ) - - def test_node_demand(self): - for node_name, node in self.wn.nodes(): - self.assertLess( - self.res4.node["demand"].at[node_name], - tolerances["demand"] - ) - - def test_node_head(self): - for node_name, node in self.wn.nodes(): - self.assertLess( - self.res4.node["head"].at[node_name], - tolerances["head"] - ) - - def test_node_pressure(self): - for node_name, node in self.wn.nodes(): - self.assertLess( - self.res4.node["pressure"].at[node_name], - tolerances["pressure"] - ) - - -class TestPickle(unittest.TestCase): - @classmethod - def setUpClass(self): - import wntr - - self.wntr = wntr - - inp_file = join(ex_datadir, "Net3.inp") - - parser = self.wntr.epanet.InpFile() - self.wn = parser.read(inp_file) - self.wn.options.time.hydraulic_timestep = 3600 - self.wn.options.time.duration = 24 * 3600 - sim = self.wntr.sim.WNTRSimulator(self.wn) - self.res1 = sim.run_sim(solver_options={"TOL": 1e-8}) - - parser = self.wntr.epanet.InpFile() - self.wn2 = parser.read(inp_file) - self.wn2.options.time.hydraulic_timestep = 3600 - self.wn2.options.time.duration = 10 * 3600 - sim = self.wntr.sim.WNTRSimulator(self.wn2) - self.res2 = sim.run_sim(solver_options={"TOL": 1e-8}) - self.wn2.set_initial_conditions(self.res2, 36000) - self.wn2.options.time.pattern_start = self.wn2.options.time.pattern_start + 10 * 3600 - f = open("temp.pickle", "wb") - pickle.dump(self.wn2, f) - f.close() - f = open("temp.pickle", "rb") - self.wn2 = pickle.load(f) - f.close() - self.wn2.options.time.duration = 14*3600 - sim = self.wntr.sim.WNTRSimulator(self.wn2) - self.res3 = sim.run_sim(solver_options={'TOL':1e-8}) - self.res3._adjust_time(10*3600) - - self.res2.append(self.res3) - self.res4 = abs(self.res1 - self.res2).max() - - @classmethod - def tearDownClass(self): - pass - - - def test_link_flowrate(self): - for link_name, link in self.wn.links(): - self.assertLess( - self.res4.link["flowrate"].at[link_name], - tolerances["flowrate"], - ) - - def test_link_velocity(self): - for link_name, link in self.wn.links(): - self.assertLess( - self.res4.link["velocity"].at[link_name], - tolerances["velocity"] - ) - - def test_node_demand(self): - for node_name, node in self.wn.nodes(): - self.assertLess( - self.res4.node["demand"].at[node_name], - tolerances["demand"] - ) - - def test_node_head(self): - for node_name, node in self.wn.nodes(): - self.assertLess( - self.res4.node["head"].at[node_name], - tolerances["head"] - ) - - def test_node_pressure(self): - for node_name, node in self.wn.nodes(): - self.assertLess( - self.res4.node["pressure"].at[node_name], - tolerances["pressure"] - ) - - -if __name__ == "__main__": - unittest.main() diff --git a/wntr/tests/test_pickle_reset.py b/wntr/tests/test_pickle_reset.py new file mode 100644 index 000000000..e102321fb --- /dev/null +++ b/wntr/tests/test_pickle_reset.py @@ -0,0 +1,74 @@ +# Test the use of pickling and then unpickling a water network object for resetting values +import pickle +import unittest +from os.path import abspath, dirname, join + +import pandas as pd +import wntr + +testdir = dirname(abspath(str(__file__))) +test_datadir = join(testdir, "networks_for_testing") +ex_datadir = join(testdir, "..", "..", "examples", "networks") + +tolerances = { + "flowrate": 1.0e-5, # 10 mL/s + "velocity": 1.0e-2, # 0.01 m/s + "demand": 1.0e-5, # 10 mL/s + "head": 1.0e-2, # 0.01 m + "pressure": 1.0e-2, # 0.01 m H2O + "headloss": 1.0e-2, # 0.01 + "status": 0.5, # i.e., 0 since this is integer + "setting": 1.0e-2, # 0.01 +} + +class TestPickle(unittest.TestCase): + @classmethod + def setUpClass(self): + + inp_file = join(ex_datadir, "Net3.inp") + + self.wn = wntr.network.WaterNetworkModel(inp_file) + self.wn.options.time.hydraulic_timestep = 3600 + self.wn.options.time.duration = 24 * 3600 + sim = wntr.sim.WNTRSimulator(self.wn) + self.res1 = sim.run_sim(solver_options={"TOL": 1e-8}) + + self.wn2 = wntr.network.WaterNetworkModel(inp_file) + self.wn2.options.time.hydraulic_timestep = 3600 + self.wn2.options.time.duration = 10 * 3600 + sim = wntr.sim.WNTRSimulator(self.wn2) + self.res2 = sim.run_sim(solver_options={"TOL": 1e-8}) + self.wn2.set_initial_conditions(self.res2, 36000) + self.wn2.options.time.pattern_start = self.wn2.options.time.pattern_start + 10 * 3600 + f = open("temp.pickle", "wb") + pickle.dump(self.wn2, f) + f.close() + f = open("temp.pickle", "rb") + self.wn2 = pickle.load(f) + f.close() + self.wn2.options.time.duration = 14*3600 + sim = wntr.sim.WNTRSimulator(self.wn2) + self.res3 = sim.run_sim(solver_options={'TOL':1e-8}) + self.res3._adjust_time(10*3600) + + self.res2.append(self.res3) + self.res4 = abs(self.res1 - self.res2).max() + + @classmethod + def tearDownClass(self): + pass + + def test_nodes(self): + node_keys = ["demand", "head", "pressure"] + for key in node_keys: + max_res_diff = self.res4.node[key].max() + self.assertLess(max_res_diff, tolerances[key]) + + def test_links(self): + link_keys = ["flowrate", "velocity", "status", "setting"] + for key in link_keys: + max_res_diff = self.res4.link[key].max() + self.assertLess(max_res_diff, tolerances[key]) + +if __name__ == "__main__": + unittest.main() diff --git a/wntr/tests/test_reset_initial_values.py b/wntr/tests/test_reset_initial_values.py new file mode 100644 index 000000000..25033cb9e --- /dev/null +++ b/wntr/tests/test_reset_initial_values.py @@ -0,0 +1,58 @@ +# These tests run a demand-driven simulation with both WNTR and Epanet and compare the results for the example networks +import pickle +import unittest +from os.path import abspath, dirname, join + +import pandas as pd + +testdir = dirname(abspath(str(__file__))) +test_datadir = join(testdir, "networks_for_testing") +ex_datadir = join(testdir, "..", "..", "examples", "networks") + +tolerances = { + "flowrate": 1.0e-5, # 10 mL/s + "velocity": 1.0e-2, # 0.01 m/s + "demand": 1.0e-5, # 10 mL/s + "head": 1.0e-2, # 0.01 m + "pressure": 1.0e-2, # 0.01 m H2O + "headloss": 1.0e-2, # 0.01 + "status": 0.5, # i.e., 0 since this is integer + "setting": 1.0e-2, # 0.01 +} + +class TestResetInitialValues(unittest.TestCase): + @classmethod + def setUpClass(self): + import wntr + + self.wntr = wntr + self.tol = 1e-8 + + inp_file = join(ex_datadir, "Net3.inp") + self.wn = self.wntr.network.WaterNetworkModel(inp_file) + self.wn.options.time.hydraulic_timestep = 3600 + self.wn.options.time.duration = 24 * 3600 + + sim = self.wntr.sim.WNTRSimulator(self.wn) + self.res1 = sim.run_sim(solver_options={"TOL": 1e-8}) + + self.wn.reset_initial_values() + self.res2 = sim.run_sim(solver_options={"TOL": 1e-8}) + + self.res4 = abs(self.res1 - self.res2).max() + + @classmethod + def tearDownClass(self): + pass + + def test_nodes(self): + node_keys = ["demand", "head", "pressure"] + for key in node_keys: + max_res_diff = self.res4.node[key].max() + self.assertLess(max_res_diff, tolerances[key]) + + def test_links(self): + link_keys = ["flowrate", "velocity", "status", "setting"] + for key in link_keys: + max_res_diff = self.res4.link[key].max() + self.assertLess(max_res_diff, tolerances[key]) diff --git a/wntr/tests/test_stop_start_sim.py b/wntr/tests/test_stop_start_sim.py index 3017752c9..d303dcf66 100644 --- a/wntr/tests/test_stop_start_sim.py +++ b/wntr/tests/test_stop_start_sim.py @@ -11,7 +11,7 @@ tolerances = { "flowrate": 1.0e-5, # 10 mL/s "velocity": 1.0e-2, # 0.01 m/s - "demand": 1.0e-5, # 10 mL/s + "demand": 1.0e-4, # 10 mL/s "head": 1.0e-2, # 0.01 m "pressure": 1.0e-2, # 0.01 m H2O "headloss": 1.0e-2, # 0.01 @@ -50,43 +50,18 @@ def setUpClass(self): @classmethod def tearDownClass(self): pass - - def test_link_flowrate(self): - for link_name, link in self.wn.links(): - print(link_name, self.res1.link["flowrate"].loc[:,link_name].describe(), abs(self.res1 - self.res2).link["flowrate"].loc[:,link_name].describe()) - self.assertLess( - self.res4.link["flowrate"].at[link_name], - tolerances["flowrate"], - ) - - def test_link_velocity(self): - for link_name, link in self.wn.links(): - self.assertLess( - self.res4.link["velocity"].at[link_name], - tolerances["velocity"] - ) - - def test_node_demand(self): - for node_name, node in self.wn.nodes(): - self.assertLess( - self.res4.node["demand"].at[node_name], - tolerances["demand"] - ) - - def test_node_head(self): - for node_name, node in self.wn.nodes(): - self.assertLess( - self.res4.node["head"].at[node_name], - tolerances["head"] - ) - - def test_node_pressure(self): - for node_name, node in self.wn.nodes(): - self.assertLess( - self.res4.node["pressure"].at[node_name], - tolerances["pressure"] - ) - + + def test_nodes(self): + node_keys = ["demand", "head", "pressure"] + for key in node_keys: + max_res_diff = self.res4.node[key].max() + self.assertLess(max_res_diff, tolerances[key]) + + def test_links(self): + link_keys = ["flowrate", "velocity", "status", "setting"] + for key in link_keys: + max_res_diff = self.res4.link[key].max() + self.assertLess(max_res_diff, tolerances[key]) class TestStopStartEpanetSim(unittest.TestCase): @@ -104,15 +79,15 @@ def setUpClass(self): # 24 hour simulation done in one 10 hour chunk and one 14 hour chunk self.wn = wntr.network.WaterNetworkModel(inp_file) self.wn.options.time.hydraulic_timestep = 3600 - self.wn.options.time.duration = 11 * 3600 + self.wn.options.time.duration = 10 * 3600 sim = wntr.sim.EpanetSimulator(self.wn) self.res2 = sim.run_sim() self.wn.set_initial_conditions(self.res2) - self.wn.options.time.pattern_start = self.wn.options.time.pattern_start + 11 * 3600 - self.wn.options.time.duration = 13 * 3600 + self.wn.options.time.pattern_start = self.wn.options.time.pattern_start + 10 * 3600 + self.wn.options.time.duration = 14 * 3600 sim = wntr.sim.EpanetSimulator(self.wn) self.res3 = sim.run_sim() - self.res3._adjust_time(11*3600) + self.res3._adjust_time(10*3600) self.res2.append(self.res3) self.res4 = abs(self.res1 - self.res2).max() @@ -120,62 +95,18 @@ def setUpClass(self): @classmethod def tearDownClass(self): pass - - def test_link_flowrate(self): - for link_name, link in self.wn.links(): - self.assertLess( - self.res4.link["flowrate"].at[link_name], - tolerances["flowrate"], - ) - - def test_link_velocity(self): - for link_name, link in self.wn.links(): - self.assertLess( - self.res4.link["velocity"].at[link_name], - tolerances["velocity"] - ) - - def test_link_headloss(self): - for link_name, link in self.wn.links(): - self.assertLess( - self.res4.link["headloss"].at[link_name], - tolerances["headloss"] - ) - - def test_link_status(self): - for link_name, link in self.wn.links(): - self.assertLess( - self.res4.link["status"].at[link_name], - tolerances["status"] - ) - - def test_link_setting(self): - for link_name, link in self.wn.links(): - self.assertLess( - self.res4.link["setting"].at[link_name], - tolerances["setting"] - ) - - def test_node_demand(self): - for node_name, node in self.wn.nodes(): - self.assertLess( - self.res4.node["demand"].at[node_name], - tolerances["demand"] - ) - - def test_node_head(self): - for node_name, node in self.wn.nodes(): - self.assertLess( - self.res4.node["head"].at[node_name], - tolerances["head"] - ) - - def test_node_pressure(self): - for node_name, node in self.wn.nodes(): - self.assertLess( - self.res4.node["pressure"].at[node_name], - tolerances["pressure"] - ) + + def test_nodes(self): + node_keys = ["demand", "head", "pressure"] + for key in node_keys: + max_res_diff = self.res4.node[key].max() + self.assertLess(max_res_diff, tolerances[key]) + + def test_links(self): + link_keys = ["flowrate", "velocity", "status", "setting", "headloss"] + for key in link_keys: + max_res_diff = self.res4.link[key].max() + self.assertLess(max_res_diff, tolerances[key]) if __name__ == "__main__": From 6069f2d99844bc46641ddf522c8ac6443deb432b Mon Sep 17 00:00:00 2001 From: kbonney Date: Mon, 20 Nov 2023 11:04:56 -0500 Subject: [PATCH 15/21] update test results --- wntr/tests/test_results.py | 91 +++++++++++++++++++++++++++++++------- 1 file changed, 74 insertions(+), 17 deletions(-) diff --git a/wntr/tests/test_results.py b/wntr/tests/test_results.py index 1d58a1e40..5ea5b9f73 100644 --- a/wntr/tests/test_results.py +++ b/wntr/tests/test_results.py @@ -2,6 +2,7 @@ import copy import unittest from os.path import abspath, dirname, join +from pandas.testing import assert_frame_equal, assert_series_equal, assert_index_equal import pandas as pd @@ -45,31 +46,87 @@ def test_adjust_time(self): res_copy = copy.deepcopy(self.res) res_copy._adjust_time(self.ts) - for key in self.res.link.keys(): - assert self.res.link[key].index + self.ts == res_copy.node[key].index - for key in self.res.node.keys(): - assert self.res.node[key].index + self.ts == res_copy.node[key].index + for attr in self.res._data_attributes: + for key in getattr(self.res, attr).keys(): + assert_index_equal(getattr(self.res, attr)[key].index + self.ts, getattr(res_copy, attr)[key].index) def test_append(self): res_copy1 = copy.deepcopy(self.res) res_copy2 = copy.deepcopy(self.res) res_copy2._adjust_time(self.wn.options.time.duration) res_copy1.append(res_copy2) - pass + assert res_copy1.node["head"].shape == (9,97) + assert (res_copy1.node["head"].loc[0] == res_copy1.node["head"].loc[14400]).all() def test_arithmetic(self): - added_res = self.res + self.res - subtracted_res = self.res - self.res - divided_res = self.res / self.res - int_divided_res = self.res / 2 - pow_res = pow(self.res, 1/2) - abs_res = abs(self.res) - neg_res = -self.res - pos_res = +self.res - max_res = self.res.max() - min_res = self.res.min() - summed_res = self.res.sum() - foo + df = self.res.node["head"] + + # addition + temp_res = self.res + self.res + temp_df = temp_res.node["head"] + test_df = df + df + assert_frame_equal(temp_df, test_df) + + # subtraction + temp_res = self.res - self.res + temp_df = temp_res.node["head"] + test_df = df - df + assert_frame_equal(temp_df, test_df) + + # division + temp_res = self.res / self.res + temp_df = temp_res.node["head"] + test_df = df / df + assert_frame_equal(temp_df, test_df) + + # int division + temp_res = self.res / 2 + temp_df = temp_res.node["head"] + test_df = df / 2 + assert_frame_equal(temp_df, test_df) + + # power + temp_res = pow(self.res, 1/2) + temp_df = temp_res.node["head"] + test_df = pow(df, 1/2) + assert_frame_equal(temp_df, test_df) + + # abs + temp_res = abs(self.res) + temp_df = temp_res.node["head"] + test_df = abs(df) + assert_frame_equal(temp_df, test_df) + + # neg + temp_res = -self.res + temp_df = temp_res.node["head"] + test_df = -df + assert_frame_equal(temp_df, test_df) + + # pos + temp_res = +self.res + temp_df = temp_res.node["head"] + test_df = +df + assert_frame_equal(temp_df, test_df) + + # max + temp_res = self.res.max() + temp_df = temp_res.node["head"] + test_df = df.max(axis=0) + assert_series_equal(temp_df, test_df) + + # min + temp_res = self.res.min() + temp_df = temp_res.node["head"] + test_df = df.min(axis=0) + assert_series_equal(temp_df, test_df) + + # sum + temp_res = self.res.sum() + temp_df = temp_res.node["head"] + test_df = df.sum(axis=0) + assert_series_equal(temp_df, test_df) + if __name__ == "__main__": From 95a274788c8b7353aedcdd4dc5e3148ebebe5991 Mon Sep 17 00:00:00 2001 From: kbonney Date: Mon, 20 Nov 2023 11:05:09 -0500 Subject: [PATCH 16/21] bug fix results object --- wntr/sim/results.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/wntr/sim/results.py b/wntr/sim/results.py index 9cf1c723b..ad8c430df 100644 --- a/wntr/sim/results.py +++ b/wntr/sim/results.py @@ -42,6 +42,7 @@ class SimulationResults: """ _data_attributes = ["link", "node"] + def __init__(self): # Simulation time series self.timestamp = str(datetime.datetime.now()) @@ -188,7 +189,8 @@ def __pow__(self, exp, mod=None): for attr in new._data_attributes: for key in getattr(self, attr).keys(): self_dict = getattr(self, attr) - self_dict[key] = pow(self_dict[key], exp, mod) + new_dict = getattr(new, attr) + new_dict[key] = pow(self_dict[key], exp, mod) return new def _adjust_time(self, ts: int): @@ -260,14 +262,11 @@ def append(self, other): other_df = other_dict[key] overlap = self_df.index.intersection(other_df.index) if key in self_dict.keys() and key in other_dict.keys(): - t2 = pd.concat([self_df[~self_df.index.isin(overlap)], other_df]) - self_df = t2 + self_dict[key] = pd.concat([self_df[~self_df.index.isin(overlap)], other_df]) elif key not in other_dict.keys(): temp = other_attr_dataframe * np.nan - t2 = pd.concat([self_df[~self_df.index.isin(overlap)], temp]) - self_df = t2 + self_dict[key] = pd.concat([self_df[~self_df.index.isin(overlap)], temp]) elif key not in self_dict.keys(): temp = self_attr_dataframe * np.nan - t2 = pd.concat([temp[~temp.index.isin(overlap)], other_df]) - self_df = t2 + self_dict[key] = pd.concat([temp[~temp.index.isin(overlap)], other_df]) return self From a7272a66c10bb22c52cfdca3b84cdc5a97fce8fb Mon Sep 17 00:00:00 2001 From: kbonney Date: Mon, 20 Nov 2023 11:20:46 -0500 Subject: [PATCH 17/21] updating test pickle --- wntr/tests/test_pickle_reset.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/wntr/tests/test_pickle_reset.py b/wntr/tests/test_pickle_reset.py index e102321fb..5d04b5975 100644 --- a/wntr/tests/test_pickle_reset.py +++ b/wntr/tests/test_pickle_reset.py @@ -1,4 +1,5 @@ # Test the use of pickling and then unpickling a water network object for resetting values +# Also tests pickling and unpickling in the middle of a simulation import pickle import unittest from os.path import abspath, dirname, join @@ -30,22 +31,37 @@ def setUpClass(self): self.wn = wntr.network.WaterNetworkModel(inp_file) self.wn.options.time.hydraulic_timestep = 3600 self.wn.options.time.duration = 24 * 3600 + + # pickle WN before running sims + f = open("temp.pickle", "wb") + pickle.dump(self.wn, f) + f.close() + + + # run sim with initial wn sim = wntr.sim.WNTRSimulator(self.wn) self.res1 = sim.run_sim(solver_options={"TOL": 1e-8}) + + # load pickled inital wn + f = open("temp.pickle", "rb") + self.wn2 = pickle.load(f) + f.close() - self.wn2 = wntr.network.WaterNetworkModel(inp_file) self.wn2.options.time.hydraulic_timestep = 3600 self.wn2.options.time.duration = 10 * 3600 sim = wntr.sim.WNTRSimulator(self.wn2) self.res2 = sim.run_sim(solver_options={"TOL": 1e-8}) self.wn2.set_initial_conditions(self.res2, 36000) self.wn2.options.time.pattern_start = self.wn2.options.time.pattern_start + 10 * 3600 + + # pickle wn and reload and set conditions from previous sim f = open("temp.pickle", "wb") pickle.dump(self.wn2, f) f.close() f = open("temp.pickle", "rb") self.wn2 = pickle.load(f) f.close() + self.wn2.options.time.duration = 14*3600 sim = wntr.sim.WNTRSimulator(self.wn2) self.res3 = sim.run_sim(solver_options={'TOL':1e-8}) From 3bccc3467567e511ccf58f8b8e01c915c3a7bec2 Mon Sep 17 00:00:00 2001 From: kbonney Date: Mon, 20 Nov 2023 11:21:09 -0500 Subject: [PATCH 18/21] adding ability to run as script to test --- wntr/tests/test_reset_initial_values.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/wntr/tests/test_reset_initial_values.py b/wntr/tests/test_reset_initial_values.py index 25033cb9e..d142201a7 100644 --- a/wntr/tests/test_reset_initial_values.py +++ b/wntr/tests/test_reset_initial_values.py @@ -56,3 +56,6 @@ def test_links(self): for key in link_keys: max_res_diff = self.res4.link[key].max() self.assertLess(max_res_diff, tolerances[key]) + +if __name__ == "__main__": + unittest.main() From 29fa5c9db44a4e3632d639f7ca305768ef3a47ea Mon Sep 17 00:00:00 2001 From: kbonney Date: Mon, 20 Nov 2023 12:11:39 -0500 Subject: [PATCH 19/21] added back in resultsstatus class --- wntr/sim/results.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/wntr/sim/results.py b/wntr/sim/results.py index ad8c430df..1c069b758 100644 --- a/wntr/sim/results.py +++ b/wntr/sim/results.py @@ -4,6 +4,10 @@ import pandas as pd +class ResultsStatus(enum.IntEnum): + converged = 1 + error = 0 + class SimulationResults: """ Water network simulation results class. From 07b3ff8b3939a8164502e94011aa09104aab14f8 Mon Sep 17 00:00:00 2001 From: kbonney Date: Mon, 8 Jan 2024 13:14:21 -0500 Subject: [PATCH 20/21] Pulling in updates to status conversion from realtime branch --- wntr/epanet/io.py | 23 +++++++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/wntr/epanet/io.py b/wntr/epanet/io.py index 27936ba3c..ef1ed4369 100644 --- a/wntr/epanet/io.py +++ b/wntr/epanet/io.py @@ -2808,11 +2808,26 @@ def read(self, filename, convergence_error=False, darcy_weisbach=False, convert= self.results.link["headloss"] = pd.DataFrame(data=headloss, columns=linknames, index=reporttimes) status = np.array(df['linkstatus']) + if self.convert_status: - status[status <= 2] = 0 - status[status == 3] = 1 - status[status >= 5] = 1 - status[status == 4] = 2 + """ + EPANET status codes + 0 = closed (max head exceeded) + 1 = temporarily closed + 2 = closed + 3 = open + 4 = active (partially open) + 5 = open (max flow exceeded) + 6 = open (flow setting not met) + 7 = open (press setting not met) + """ + # 0 = 0, treat closed (max head exceeded) pump as closed + # 1 = 1, treat temporarily closed pipe as open + status[status == 2] = 0 # 2 = 0, closed + status[status == 3] = 1 # 3 = 1, open + status[status == 4] = 2 # 4 = 2, active + status[status >= 5] = 1 # 5,6,7 = 1, treat valve open under different conditions as open + self.results.link['status'] = pd.DataFrame(data=status, columns=linknames, index=reporttimes) setting = np.array(df['linksetting']) From a94cb7c008f305098c6a4ffb90af8bf8b6ba95f1 Mon Sep 17 00:00:00 2001 From: kbonney Date: Fri, 7 Jun 2024 15:04:24 -0400 Subject: [PATCH 21/21] adding a test for stepwise simulation (24 1hr steps) --- wntr/tests/test_stepwise_sim.py | 82 +++++++++++++++++++++++++++++++++ 1 file changed, 82 insertions(+) create mode 100644 wntr/tests/test_stepwise_sim.py diff --git a/wntr/tests/test_stepwise_sim.py b/wntr/tests/test_stepwise_sim.py new file mode 100644 index 000000000..b1fa165dc --- /dev/null +++ b/wntr/tests/test_stepwise_sim.py @@ -0,0 +1,82 @@ +import unittest +from os.path import abspath, dirname, join +import wntr +import pandas as pd + +testdir = dirname(abspath(str(__file__))) +test_datadir = join(testdir, "networks_for_testing") +ex_datadir = join(testdir, "..", "..", "examples", "networks") + +tolerances = { + "flowrate": 1.0e-5, # 10 mL/s + "velocity": 1.0e-2, # 0.01 m/s + "demand": 1.0e-4, # 10 mL/s + "head": 1.0e-2, # 0.01 m + "pressure": 1.0e-2, # 0.01 m H2O + "headloss": 1.0e-2, # 0.01 + "status": 0.5, # i.e., 0 since this is integer + "setting": 1.0e-2, # 0.01 +} + + +class TestStepwiseSim(unittest.TestCase): + @classmethod + def setUpClass(self): + + inp_file = join(ex_datadir, "Net3.inp") + + # straight 24 hour simulation + wn = wntr.network.WaterNetworkModel(inp_file) + wn.options.time.hydraulic_timestep = 3600 + wn.options.time.duration = 24 * 3600 + sim = wntr.sim.EpanetSimulator(wn) + continuous_res = sim.run_sim() + + # 24 hour simulation done in 24 1-hour chunks + wn = wntr.network.WaterNetworkModel(inp_file) + wn.options.time.hydraulic_timestep = 3600 + wn.options.time.duration = 1 * 3600 + + for i in range(0,24): + # run simulation for ith step + sim = wntr.sim.EpanetSimulator(wn) + i_res = sim.run_sim() + + # update wn with ith results + wn.set_initial_conditions(i_res) + wn.options.time.pattern_start = wn.options.time.pattern_start + (1 * 3600) + + + # adjust time of ith results + start_time = i * 3600 + i_res._adjust_time(start_time) + + # concatenate step results + if i == 0: + stepwise_res = i_res + else: + stepwise_res.append(i_res, overwrite=True) + + self.diff_res = abs(continuous_res - stepwise_res).max() + + def test_nodes(self): + node_keys = ["demand", "head", "pressure"] + for key in node_keys: + max_res_diff = self.diff_res.node[key].max() + self.assertLess(max_res_diff, tolerances[key]) + + def test_links(self): + link_keys = ["flowrate", "velocity", "status", "setting", "headloss"] + for key in link_keys: + max_res_diff = self.diff_res.link[key].max() + self.assertLess(max_res_diff, tolerances[key]) + + + +# self.diff_res.node["pressure"].plot() + +# self.diff_res.link["status"].plot() + + +if __name__ == "__main__": + unittest.main() \ No newline at end of file