diff --git a/disco/cli/disco.py b/disco/cli/disco.py index e8e0aa79..7b64112c 100644 --- a/disco/cli/disco.py +++ b/disco/cli/disco.py @@ -21,6 +21,7 @@ from disco.cli.make_summary_tables import make_summary_tables from disco.cli.compute_hosting_capacity import compute_hosting_capacity from disco.cli.plots import plot +from disco.cli.select_time_points import select_time_points from disco.cli.summarize_hosting_capacity import summarize_hosting_capacity from disco.cli.config_generic_models import config_generic_models from disco.cli.upgrade_cost_analysis import upgrade_cost_analysis @@ -54,6 +55,7 @@ def cli(): cli.add_command(ingest_tables) cli.add_command(install_extensions) cli.add_command(make_summary_tables) +cli.add_command(select_time_points) cli.add_command(summarize_hosting_capacity) cli.add_command(upgrade_cost_analysis) cli.add_command(hosting_capacity_by_timestep) diff --git a/disco/cli/select_time_points.py b/disco/cli/select_time_points.py new file mode 100644 index 00000000..e9913cfe --- /dev/null +++ b/disco/cli/select_time_points.py @@ -0,0 +1,148 @@ +#!/usr/bin/env python + +""" +Down-selects load shape time points in the circuit based on +user-specified critical conditions. +""" + +import logging +import shutil +import sys +from pathlib import Path + +import click + +from jade.loggers import setup_logging +from jade.utils.utils import get_cli_string + +from disco.preprocess.select_timepoints2 import ( + CriticalCondition, + DemandCategory, + GenerationCategory, + main, +) + + +logger = logging.getLogger(__name__) + + +@click.command() +@click.argument("master_file", type=click.Path(exists=True), callback=lambda *x: Path(x[2])) +@click.option( + "-c", + "--critical-conditions", + type=click.Choice([x.value for x in CriticalCondition]), + default=tuple(x.value for x in CriticalCondition), + show_default=True, + multiple=True, + callback=lambda *x: tuple(CriticalCondition(y) for y in x[2]), + help="critical conditions to use for time-point selection", +) +@click.option( + "-d", + "--demand-categories", + type=click.Choice([x.value for x in DemandCategory]), + default=tuple(x.value for x in DemandCategory), + show_default=True, + multiple=True, + callback=lambda *x: tuple(DemandCategory(y) for y in x[2]), + help="Demand-based devices to use in time-point selection algorithm", +) +@click.option( + "-g", + "--generation-categories", + type=click.Choice([x.value for x in GenerationCategory]), + default=tuple(x.value for x in GenerationCategory), + show_default=True, + multiple=True, + callback=lambda *x: tuple(GenerationCategory(y) for y in x[2]), + help="Generation-based devices to use in time-point selection algorithm", +) +@click.option( + "--feederhead-only", + default=False, + # is_flag=True, + show_default=True, + help="Select critical timepoints for feederhead bus only.", +) +@click.option( + "-o", + "--output", + default="output_time_points", + callback=lambda *x: Path(x[2]), + help="Output directory", +) +@click.option( + "--create-new-circuit/--no-create-new-circuit", + default=True, + is_flag=True, + show_default=True, + help="Create new circuit with down-selected time points.", +) +@click.option( + "--fix-master-file/--no-fix-master-file", + is_flag=True, + show_default=True, + default=False, + help="Remove commands in the Master.dss file that interfere with time-point selection.", +) +@click.option( + "-f", + "--force", + is_flag=True, + show_default=True, + default=False, + help="Delete output directory if it exists.", +) +@click.option( + "-v", + "--verbose", + is_flag=True, + show_default=True, + default=False, + help="Enabled debug logging.", +) +def select_time_points( + master_file, + demand_categories, + generation_categories, + critical_conditions, + feederhead_only, + output, + create_new_circuit, + fix_master_file, + force, + verbose, +): + """Select load shape time points in the circuit based on the specified critical conditions. + + By default, the Master.dss file is not allowed to enable time-series mode. Specify + --fix-master-file to disable time-series mode and other disallowed parameters. + + """ + if output.exists(): + if force: + shutil.rmtree(output) + else: + print( + f"Output directory {output} exists. Choose a different path or set --force.", + file=sys.stderr, + ) + sys.exit(1) + + output.mkdir() + level = logging.DEBUG if verbose else logging.INFO + log_file = output / "disco.log" + setup_logging("disco", log_file, console_level=level, packages=["disco"]) + logger.info(get_cli_string()) + categories = {"demand": demand_categories, "generation": generation_categories} + main( + master_file, + categories=categories, + critical_conditions=critical_conditions, + feederhead_only=feederhead_only, + destination_dir=output, + create_new_circuit=create_new_circuit, + fix_master_file=fix_master_file, + recreate_profiles=False, + ) diff --git a/disco/cli/upgrade_cost_analysis.py b/disco/cli/upgrade_cost_analysis.py index 9e7f4dff..3fd67041 100644 --- a/disco/cli/upgrade_cost_analysis.py +++ b/disco/cli/upgrade_cost_analysis.py @@ -20,7 +20,7 @@ from disco.exceptions import DiscoBaseException, get_error_code_from_exception from disco.models.base import OpenDssDeploymentModel from disco.models.upgrade_cost_analysis_generic_input_model import ( - UpgradeCostAnalysisSimulationModel + UpgradeCostAnalysisSimulationModel, UpgradeSimulationParamsModel ) from disco.models.upgrade_cost_analysis_generic_output_model import ( JobUpgradeSummaryOutputModel, @@ -218,7 +218,7 @@ def run( start = time.time() ret = EXIT_CODE_GOOD try: - run_job(job, config, jobs_output_dir, file_log_level) + run_job(job, config, jobs_output_dir) all_failed = False except DiscoBaseException as exc: logger.exception("Unexpected DISCO error in upgrade cost analysis job=%s", job.name) @@ -273,7 +273,7 @@ def _get_return_code_filename(output_dir, job_name): return output_dir / job_name / "return_code" -def run_job(job, config, jobs_output_dir, file_log_level): +def run_job(job, config, jobs_output_dir): job_output_dir = jobs_output_dir / job.name job_output_dir.mkdir(exist_ok=True) job = UpgradeParameters( @@ -284,21 +284,16 @@ def run_job(job, config, jobs_output_dir, file_log_level): feeder="NA", ), ) - + upgrade_simulation_params_names = list(UpgradeSimulationParamsModel.schema()["properties"].keys()) global_config = { "thermal_upgrade_params": config.thermal_upgrade_params.dict(), "voltage_upgrade_params": config.voltage_upgrade_params.dict(), - "upgrade_simulation_params": { - "enable_pydss_controller": config.enable_pydss_controllers, - }, + "upgrade_simulation_params": dict((k, config.dict()[k]) for k in upgrade_simulation_params_names), "upgrade_cost_database": config.upgrade_cost_database, - "dc_ac_ratio": config.dc_ac_ratio, } - global_config["upgrade_simulation_params"]["pydss_controller"] = None - if (config.pydss_controllers.pv_controller is not None) and config.enable_pydss_controllers: - global_config["upgrade_simulation_params"]["pydss_controller"] = ( - config.pydss_controllers.pv_controller.dict(), - ) + # replace PyDSS PV Controller dictionary with Model + if (global_config["upgrade_simulation_params"]["pydss_controllers"]["pv_controller"] is not None) and global_config["upgrade_simulation_params"]["enable_pydss_controllers"]: + global_config["upgrade_simulation_params"]["pydss_controllers"] = config.pydss_controllers.pv_controller simulation = UpgradeSimulation( job=job, @@ -306,13 +301,10 @@ def run_job(job, config, jobs_output_dir, file_log_level): output=jobs_output_dir, ) simulation.run( - dc_ac_ratio=global_config["dc_ac_ratio"], - enable_pydss_solve=global_config["upgrade_simulation_params"]["enable_pydss_controller"], - pydss_controller_model=config.pydss_controllers.pv_controller, thermal_config=global_config["thermal_upgrade_params"], voltage_config=global_config["voltage_upgrade_params"], + upgrade_simulation_params_config=global_config["upgrade_simulation_params"], cost_database_filepath=global_config["upgrade_cost_database"], - verbose=file_log_level == logging.DEBUG, ) diff --git a/disco/exceptions.py b/disco/exceptions.py index dc6e8d3d..9dd9a055 100644 --- a/disco/exceptions.py +++ b/disco/exceptions.py @@ -35,6 +35,10 @@ class OpenDssConvergenceError(DiscoBaseException): """Raise when OpenDSS fails to converge""" +class OpenDssModelDisconnectedError(DiscoBaseException): + """Raise when OpenDSS model has isolated elements""" + + class PyDssConvergenceError(DiscoBaseException): """Raise when PyDSS fails to converge""" diff --git a/disco/extensions/upgrade_simulation/cli.py b/disco/extensions/upgrade_simulation/cli.py index 76641102..24d9452d 100644 --- a/disco/extensions/upgrade_simulation/cli.py +++ b/disco/extensions/upgrade_simulation/cli.py @@ -10,6 +10,9 @@ from disco.extensions.upgrade_simulation.upgrade_simulation import UpgradeSimulation from disco.pydss.pydss_configuration_base import DEFAULT_CONTROLLER_CONFIG_FILE from disco.version import __version__ as disco_version +from disco.models.upgrade_cost_analysis_generic_input_model import ( + UpgradeCostAnalysisSimulationModel, UpgradeSimulationParamsModel +) logger = logging.getLogger(__name__) @@ -52,30 +55,30 @@ def run(config_file, name, output, output_format, verbose): job_global_config=config.job_global_config, output=output ) + upgrade_simulation_params_config = config.job_global_config["upgrade_simulation_params"] try: - upgrade_simulation_params = config.job_global_config["upgrade_simulation_params"] - dc_ac_ratio = upgrade_simulation_params["dc_ac_ratio"] - enable_pydss_controller = upgrade_simulation_params["enable_pydss_controller"] - if enable_pydss_controller: + thermal_config = config.job_global_config["thermal_upgrade_params"] + voltage_config = config.job_global_config["voltage_upgrade_params"] + cost_database_filepath = config.job_global_config["upgrade_cost_database"] + upgrade_simulation_params_names = list(UpgradeSimulationParamsModel.schema()["properties"].keys()) + fields = set(upgrade_simulation_params_names) & set(upgrade_simulation_params_config.keys()) + temp = {key: upgrade_simulation_params_config[key] for key in fields} + upgrade_simulation_params_config = UpgradeSimulationParamsModel(**temp).dict() + + if config.job_global_config["upgrade_simulation_params"]["enable_pydss_controllers"]: pv_controllers = load_data(DEFAULT_CONTROLLER_CONFIG_FILE) - pydss_controller_model = PvControllerModel( - **pv_controllers[upgrade_simulation_params["pydss_controller_name"]] + controller_model = PvControllerModel( + **pv_controllers[config.job_global_config["upgrade_simulation_params"]["pydss_controller_name"]] ) + upgrade_simulation_params_config["pydss_controllers"] = controller_model else: - pv_controllers = None - pydss_controller_model= None + upgrade_simulation_params_config["pydss_controllers"] = None - thermal_config = config.job_global_config["thermal_upgrade_params"] - voltage_config = config.job_global_config["voltage_upgrade_params"] - cost_database_filepath = config.job_global_config["upgrade_cost_database"] ret = simulation.run( - dc_ac_ratio = dc_ac_ratio, - enable_pydss_solve=enable_pydss_controller, - pydss_controller_model=pydss_controller_model, thermal_config=thermal_config, voltage_config=voltage_config, + upgrade_simulation_params_config=upgrade_simulation_params_config, cost_database_filepath=cost_database_filepath, - verbose=verbose ) return ret except Exception: diff --git a/disco/extensions/upgrade_simulation/upgrade_configuration.py b/disco/extensions/upgrade_simulation/upgrade_configuration.py index b8df532d..3309022f 100644 --- a/disco/extensions/upgrade_simulation/upgrade_configuration.py +++ b/disco/extensions/upgrade_simulation/upgrade_configuration.py @@ -30,8 +30,8 @@ def __init__(self, **kwargs): self._pydss_inputs = self.get_default_pydss_config() # Customize pydss config - if "enable_pydss_solve" in kwargs: - self.enable_pydss_solve(kwargs["enable_pydss_solve"]) + if "enable_pydss_controllers" in kwargs: + self.enable_pydss_controllers(kwargs["enable_pydss_controllers"]) @classmethod def auto_config(cls, inputs, **kwargs): @@ -59,8 +59,8 @@ def get_default_pydss_config(): # this method not in use, simply return an empty dict. return {} - def enable_pydss_solve(self, value: bool): - self._pydss_inputs[ConfigType.SIMULATION_CONFIG]["default"]["enable_pydss_solve"] = value + def enable_pydss_controllers(self, value: bool): + self._pydss_inputs[ConfigType.SIMULATION_CONFIG]["default"]["enable_pydss_controllers"] = value if value is True: message = "Enable PyDSS solve." else: diff --git a/disco/extensions/upgrade_simulation/upgrade_simulation.py b/disco/extensions/upgrade_simulation/upgrade_simulation.py index 04fa2e44..e6be67c2 100644 --- a/disco/extensions/upgrade_simulation/upgrade_simulation.py +++ b/disco/extensions/upgrade_simulation/upgrade_simulation.py @@ -114,20 +114,16 @@ def generate_command(job, output, config_file, verbose=False): def run( self, - enable_pydss_solve, - pydss_controller_model, - dc_ac_ratio, thermal_config, voltage_config, + upgrade_simulation_params_config, cost_database_filepath, - verbose=False ): determine_thermal_upgrades( job_name = self.job.name, master_path=self.model.deployment.deployment_file, - enable_pydss_solve=enable_pydss_solve, thermal_config=thermal_config, - pydss_volt_var_model=pydss_controller_model, + upgrade_simulation_params_config=upgrade_simulation_params_config, internal_upgrades_technical_catalog_filepath=self.internal_upgrades_technical_catalog_filepath(), thermal_upgrades_dss_filepath=self.get_thermal_upgrades_dss_file(), upgraded_master_dss_filepath=self.get_upgraded_master_dss_file(), @@ -135,16 +131,13 @@ def run( feeder_stats_json_file = self.get_feeder_stats_json_file(), thermal_upgrades_directory=self.get_thermal_upgrades_directory(), overall_output_summary_filepath=self.get_overall_output_summary_file(), - dc_ac_ratio=dc_ac_ratio, - verbose=verbose ) determine_voltage_upgrades( job_name = self.job.name, master_path=self.model.deployment.deployment_file, - enable_pydss_solve=enable_pydss_solve, - pydss_volt_var_model=pydss_controller_model, thermal_config=thermal_config, voltage_config=voltage_config, + upgrade_simulation_params_config=upgrade_simulation_params_config, thermal_upgrades_dss_filepath=self.get_thermal_upgrades_dss_file(), voltage_upgrades_dss_filepath=self.get_voltage_upgrades_dss_file(), upgraded_master_dss_filepath=self.get_upgraded_master_dss_file(), @@ -152,8 +145,6 @@ def run( feeder_stats_json_file = self.get_feeder_stats_json_file(), voltage_upgrades_directory=self.get_voltage_upgrades_directory(), overall_output_summary_filepath=self.get_overall_output_summary_file(), - dc_ac_ratio=dc_ac_ratio, - verbose=verbose ) compute_all_costs( job_name = self.job.name, diff --git a/disco/extensions/upgrade_simulation/upgrades/automated_thermal_upgrades.py b/disco/extensions/upgrade_simulation/upgrades/automated_thermal_upgrades.py index 243c9cad..7f430754 100644 --- a/disco/extensions/upgrade_simulation/upgrades/automated_thermal_upgrades.py +++ b/disco/extensions/upgrade_simulation/upgrades/automated_thermal_upgrades.py @@ -7,14 +7,14 @@ from jade.utils.utils import load_data, dump_data from .thermal_upgrade_functions import * -from .voltage_upgrade_functions import plot_thermal_violations, plot_voltage_violations, plot_feeder +from .voltage_upgrade_functions import plot_thermal_violations, plot_voltage_violations, plot_feeder, generate_networkx_representation, check_network_connectivity -from disco.models.upgrade_cost_analysis_generic_input_model import UpgradeTechnicalCatalogModel +from disco.models.upgrade_cost_analysis_generic_input_model import UpgradeTechnicalCatalogModel, UpgradeSimulationParamsModel from disco.models.upgrade_cost_analysis_generic_output_model import UpgradeViolationResultModel, AllUpgradesTechnicalResultModel from disco import timer_stats_collector from disco.enums import LoadMultiplierType from disco.exceptions import UpgradesInvalidViolationIncrease - +from disco.extensions.upgrade_simulation.upgrades.common_functions import CircuitSolveParams, ReloadCircuitParams logger = logging.getLogger(__name__) @@ -23,9 +23,8 @@ def determine_thermal_upgrades( job_name, master_path, - enable_pydss_solve, - pydss_volt_var_model, thermal_config, + upgrade_simulation_params_config, internal_upgrades_technical_catalog_filepath, thermal_upgrades_dss_filepath, upgraded_master_dss_filepath, @@ -33,27 +32,24 @@ def determine_thermal_upgrades( feeder_stats_json_file, thermal_upgrades_directory, overall_output_summary_filepath, - dc_ac_ratio, ignore_switch=True, - verbose=False ): start_time = time.time() - logger.info( f"Simulation start time: {start_time}") - initial_simulation_params = {"enable_pydss_solve": enable_pydss_solve, "pydss_volt_var_model": pydss_volt_var_model, - "dc_ac_ratio": dc_ac_ratio} - logger.info("Initial simulation parameters: %s", initial_simulation_params) + logger.info( f"Simulation start time: {start_time}") + analysis_params = SimulationParams(timepoint_multipliers=upgrade_simulation_params_config["timepoint_multipliers"], + timeseries_analysis=upgrade_simulation_params_config["timeseries_analysis"]) + logger.info( f"Analysis Parameters: {analysis_params}") + create_plots = thermal_config["create_plots"] + initial_solve_params = CircuitSolveParams(enable_pydss_controllers=upgrade_simulation_params_config["enable_pydss_controllers"]) + reload_circuit_params = ReloadCircuitParams(dc_ac_ratio=upgrade_simulation_params_config["dc_ac_ratio"], + pydss_volt_var_model=upgrade_simulation_params_config["pydss_controllers"] + ) # start upgrades initial_dss_file_list = [master_path] - simulation_params = reload_dss_circuit(dss_file_list=initial_dss_file_list, commands_list=None, **initial_simulation_params) - timepoint_multipliers = thermal_config["timepoint_multipliers"] - - if timepoint_multipliers is not None: - multiplier_type = LoadMultiplierType.UNIFORM - else: - multiplier_type = LoadMultiplierType.ORIGINAL - simulation_params.update({"timepoint_multipliers": timepoint_multipliers, "multiplier_type": multiplier_type}) - + solve_params = reload_dss_circuit(dss_file_list=initial_dss_file_list, commands_list=None, solve_params=initial_solve_params, reload_circuit_params=reload_circuit_params) + G = generate_networkx_representation() # check if circuit is disconnected + check_network_connectivity(G, raise_exception=True) voltage_upper_limit = thermal_config["voltage_upper_limit"] voltage_lower_limit = thermal_config["voltage_lower_limit"] if thermal_config["read_external_catalog"]: @@ -87,18 +83,19 @@ def determine_thermal_upgrades( xfmr_upgrade_options = pd.DataFrame.from_dict(input_catalog_model.dict(by_alias=True)["transformer"]) # get these feeder details before running powerflow feeder_stats = {"feeder_metadata": {}, "stage_results": []} - feeder_stats["feeder_metadata"].update(get_feeder_stats(dss)) + feeder_stats["feeder_metadata"].update(get_feeder_stats()) ( initial_bus_voltages_df, initial_undervoltage_bus_list, initial_overvoltage_bus_list, initial_buses_with_violations, - ) = get_bus_voltages(voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, **simulation_params) + ) = get_bus_voltage_violations(voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, + solve_params=solve_params) initial_xfmr_loading_df = get_thermal_equipment_info(compute_loading=True, upper_limit=thermal_config["transformer_upper_limit"], - equipment_type="transformer", **simulation_params) + equipment_type="transformer", analysis_params=analysis_params, solve_params=solve_params) initial_line_loading_df = get_thermal_equipment_info(compute_loading=True, upper_limit=thermal_config["line_upper_limit"], - equipment_type="line", ignore_switch=ignore_switch, **simulation_params) + equipment_type="line", ignore_switch=ignore_switch, analysis_params=analysis_params, solve_params=solve_params) initial_overloaded_xfmr_list = list(initial_xfmr_loading_df.loc[initial_xfmr_loading_df["status"] == "overloaded"]["name"].unique()) @@ -108,8 +105,15 @@ def determine_thermal_upgrades( circuit_source = orig_ckt_info['source_bus'] orig_regcontrols_df = get_regcontrol_info(correct_PT_ratio=False) orig_capacitors_df = get_capacitor_info(correct_PT_ratio=False) - feeder_stats["stage_results"].append(get_upgrade_stage_stats(dss, upgrade_stage="initial", upgrade_type="thermal", xfmr_loading_df=initial_xfmr_loading_df, line_loading_df=initial_line_loading_df, - bus_voltages_df=initial_bus_voltages_df, capacitors_df=orig_capacitors_df, regcontrols_df=orig_regcontrols_df) ) + + feeder_stats["stage_results"].append(get_upgrade_stage_stats(upgrade_stage="initial", upgrade_type="thermal", xfmr_loading_df=initial_xfmr_loading_df, line_loading_df=initial_line_loading_df, + bus_voltages_df=initial_bus_voltages_df, capacitors_df=orig_capacitors_df, regcontrols_df=orig_regcontrols_df)) + if upgrade_simulation_params_config["timeseries_analysis"]: + feeder_stats["timeseries_stage_results"] = [] + timeseries_upgrade_stats = get_upgrade_stage_timeseries_stats(upgrade_stage="initial", upgrade_type="thermal", capacitors_df=orig_capacitors_df, regcontrols_df=orig_regcontrols_df, + transformer_upper_limit=thermal_config["transformer_upper_limit"], line_upper_limit=thermal_config["line_upper_limit"], + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, solve_params=solve_params, ignore_switch=ignore_switch) + feeder_stats["timeseries_stage_results"].append(timeseries_upgrade_stats) dump_data(feeder_stats, feeder_stats_json_file, indent=2) # save feeder stats if len(initial_overloaded_xfmr_list) > 0 or len(initial_overloaded_line_list) > 0: n = len(initial_overloaded_xfmr_list) + len(initial_overloaded_line_list) @@ -123,7 +127,7 @@ def determine_thermal_upgrades( else: upgrade_status = "Thermal Upgrades not Required" # status - whether upgrades done or not logger.info(upgrade_status) - scenario = get_scenario_name(enable_pydss_solve, pydss_volt_var_model) + scenario = get_scenario_name(enable_pydss_controllers=solve_params.enable_pydss_controllers, pydss_volt_var_model=reload_circuit_params.pydss_volt_var_model) initial_results = UpgradeViolationResultModel( name=job_name, scenario=scenario, @@ -171,7 +175,7 @@ def determine_thermal_upgrades( while (len(overloaded_line_list) > 0 or len(overloaded_xfmr_list) > 0) and ( iteration_counter < max_upgrade_iteration): line_loading_df = get_thermal_equipment_info(compute_loading=True, upper_limit=thermal_config["line_upper_limit"], - equipment_type="line", ignore_switch=ignore_switch, **simulation_params) + equipment_type="line", ignore_switch=ignore_switch, analysis_params=analysis_params, solve_params=solve_params) overloaded_line_list = list(line_loading_df.loc[line_loading_df["status"] == "overloaded"]["name"].unique()) logger.info(f"Iteration_{iteration_counter}: Determined line loadings.") logger.info(f"Iteration_{iteration_counter}: Number of line violations: {len(overloaded_line_list)}") @@ -182,12 +186,12 @@ def determine_thermal_upgrades( line_design_pu=thermal_config["line_design_pu"], line_upgrade_options=line_upgrade_options.copy(deep=True), parallel_lines_limit=thermal_config["parallel_lines_limit"], - external_upgrades_technical_catalog=external_upgrades_technical_catalog,) + external_upgrades_technical_catalog=external_upgrades_technical_catalog, solve_params=solve_params) logger.info(f"Iteration_{iteration_counter}: Corrected line violations.") commands_list = commands_list + line_commands_list line_upgrades_df = pd.concat([line_upgrades_df, temp_line_upgrades_df]) xfmr_loading_df = get_thermal_equipment_info(compute_loading=True, upper_limit=thermal_config["transformer_upper_limit"], - equipment_type="transformer", **simulation_params) + equipment_type="transformer", analysis_params=analysis_params, solve_params=solve_params) overloaded_xfmr_list = list(xfmr_loading_df.loc[xfmr_loading_df["status"] == "overloaded"]["name"].unique()) logger.info(f"Iteration_{iteration_counter}: Determined xfmr loadings.") logger.info(f"Iteration_{iteration_counter}: Number of xfmr violations: {len(overloaded_xfmr_list)}") @@ -198,16 +202,16 @@ def determine_thermal_upgrades( xfmr_loading_df=xfmr_loading_df, xfmr_design_pu=thermal_config["transformer_design_pu"], xfmr_upgrade_options=xfmr_upgrade_options.copy(deep=True), - parallel_transformers_limit=thermal_config["parallel_transformers_limit"]) + parallel_transformers_limit=thermal_config["parallel_transformers_limit"], solve_params=solve_params) logger.info(f"Iteration_{iteration_counter}: Corrected xfmr violations.") commands_list = commands_list + xfmr_commands_list xfmr_upgrades_df = pd.concat([xfmr_upgrades_df, temp_xfmr_upgrades_df]) # compute loading after upgrades xfmr_loading_df = get_thermal_equipment_info(compute_loading=True, upper_limit=thermal_config["transformer_upper_limit"], - equipment_type="transformer", **simulation_params) + equipment_type="transformer", analysis_params=analysis_params, solve_params=solve_params) overloaded_xfmr_list = list(xfmr_loading_df.loc[xfmr_loading_df["status"] == "overloaded"]["name"].unique()) line_loading_df = get_thermal_equipment_info(compute_loading=True, upper_limit=thermal_config["line_upper_limit"], - equipment_type="line", ignore_switch=ignore_switch, **simulation_params) + equipment_type="line", ignore_switch=ignore_switch, analysis_params=analysis_params, solve_params=solve_params) overloaded_line_list = list(line_loading_df.loc[line_loading_df["status"] == "overloaded"]["name"].unique()) if len(overloaded_line_list) > before_upgrade_num_line_violations: @@ -239,13 +243,15 @@ def determine_thermal_upgrades( redirect_command_list = create_upgraded_master_dss(dss_file_list=initial_dss_file_list + [thermal_upgrades_dss_filepath], upgraded_master_dss_filepath=upgraded_master_dss_filepath, original_master_filename=os.path.basename(master_path)) write_text_file(string_list=redirect_command_list, text_file_path=upgraded_master_dss_filepath) - reload_dss_circuit(dss_file_list=[upgraded_master_dss_filepath], commands_list=None, **simulation_params,) - bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltages(voltage_upper_limit=voltage_upper_limit, - voltage_lower_limit=voltage_lower_limit, **simulation_params) + + reload_dss_circuit(dss_file_list=[upgraded_master_dss_filepath], commands_list=None, solve_params=solve_params, reload_circuit_params=reload_circuit_params) + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltage_violations(voltage_upper_limit=voltage_upper_limit, + voltage_lower_limit=voltage_lower_limit, + analysis_params=analysis_params, solve_params=solve_params) xfmr_loading_df = get_thermal_equipment_info(compute_loading=True, upper_limit=thermal_config["transformer_upper_limit"], - equipment_type="transformer", **simulation_params) + equipment_type="transformer", analysis_params=analysis_params, solve_params=solve_params) line_loading_df = get_thermal_equipment_info(compute_loading=True, upper_limit=thermal_config["line_upper_limit"], - equipment_type="line", ignore_switch=ignore_switch, **simulation_params) + equipment_type="line", ignore_switch=ignore_switch, analysis_params=analysis_params, solve_params=solve_params) overloaded_xfmr_list = list(xfmr_loading_df.loc[xfmr_loading_df["status"] == "overloaded"]["name"].unique()) overloaded_line_list = list(line_loading_df.loc[line_loading_df["status"] == "overloaded"]["name"].unique()) # same equipment could be upgraded(edited) multiple times. Only consider highest capacity upgrade done. original_equipment details are currently not used. @@ -269,8 +275,17 @@ def determine_thermal_upgrades( feeder_stats = {} regcontrols_df = get_regcontrol_info(correct_PT_ratio=False) capacitors_df = get_capacitor_info(correct_PT_ratio=False) - feeder_stats["stage_results"].append( get_upgrade_stage_stats(dss, upgrade_stage="final", upgrade_type="thermal", xfmr_loading_df=xfmr_loading_df, line_loading_df=line_loading_df, - bus_voltages_df=bus_voltages_df, capacitors_df=capacitors_df, regcontrols_df=regcontrols_df) ) + feeder_stats["stage_results"].append(get_upgrade_stage_stats(upgrade_stage="final", upgrade_type="thermal", xfmr_loading_df=xfmr_loading_df, line_loading_df=line_loading_df, + bus_voltages_df=bus_voltages_df, capacitors_df=capacitors_df, regcontrols_df=regcontrols_df)) + if upgrade_simulation_params_config["timeseries_analysis"]: + timeseries_upgrade_stats = get_upgrade_stage_timeseries_stats(upgrade_stage="final", upgrade_type="thermal", capacitors_df=orig_capacitors_df, regcontrols_df=orig_regcontrols_df, + transformer_upper_limit=thermal_config["transformer_upper_limit"], line_upper_limit=thermal_config["line_upper_limit"], + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit,solve_params=solve_params, ignore_switch=ignore_switch ) + feeder_stats["timeseries_stage_results"].append(timeseries_upgrade_stats) + # TODO: add description about this file required. this is to be added to input config: timeseries_analysis + if not "timeseries_metadata" in feeder_stats.keys(): + metadata_filename = os.path.join(os.path.dirname(master_path), "..", "metadata.csv") + feeder_stats["timeseries_metadata"] = pd.read_csv(metadata_filename).to_dict("records") dump_data(feeder_stats, feeder_stats_json_file, indent=2) end_time = time.time() logger.info(f"Simulation end time: {end_time}") diff --git a/disco/extensions/upgrade_simulation/upgrades/automated_voltage_upgrades.py b/disco/extensions/upgrade_simulation/upgrades/automated_voltage_upgrades.py index 4798b443..06a257ed 100644 --- a/disco/extensions/upgrade_simulation/upgrades/automated_voltage_upgrades.py +++ b/disco/extensions/upgrade_simulation/upgrades/automated_voltage_upgrades.py @@ -23,10 +23,9 @@ def determine_voltage_upgrades( job_name, master_path, - enable_pydss_solve, - pydss_volt_var_model, thermal_config, voltage_config, + upgrade_simulation_params_config, thermal_upgrades_dss_filepath, voltage_upgrades_dss_filepath, upgraded_master_dss_filepath, @@ -34,18 +33,10 @@ def determine_voltage_upgrades( feeder_stats_json_file, voltage_upgrades_directory, overall_output_summary_filepath, - dc_ac_ratio, ignore_switch=True, - verbose=False ): start_time = time.time() logger.info(f"Simulation Start time: {start_time}") - timepoint_multipliers = voltage_config["timepoint_multipliers"] - if timepoint_multipliers is not None: - multiplier_type = LoadMultiplierType.UNIFORM - else: - multiplier_type = LoadMultiplierType.ORIGINAL - create_plots = voltage_config["create_plots"] # default_capacitor settings and customization default_capacitor_settings = DEFAULT_CAPACITOR_SETTINGS default_capacitor_settings["capON"] = round( @@ -72,17 +63,24 @@ def determine_voltage_upgrades( if not os.path.exists(thermal_upgrades_dss_filepath): raise Exception(f"AutomatedThermalUpgrade did not produce thermal upgrades dss file") - initial_simulation_params = {"enable_pydss_solve": enable_pydss_solve, "pydss_volt_var_model": pydss_volt_var_model, - "dc_ac_ratio": dc_ac_ratio, "max_control_iterations": voltage_config["max_control_iterations"]} + create_plots = voltage_config["create_plots"] + analysis_params = SimulationParams(timepoint_multipliers=upgrade_simulation_params_config["timepoint_multipliers"], + timeseries_analysis=upgrade_simulation_params_config["timeseries_analysis"]) + initial_solve_params = CircuitSolveParams(enable_pydss_controllers=upgrade_simulation_params_config["enable_pydss_controllers"]) + reload_circuit_params = ReloadCircuitParams( + dc_ac_ratio=upgrade_simulation_params_config["dc_ac_ratio"], + pydss_volt_var_model=upgrade_simulation_params_config["pydss_controllers"], + ) + # start upgrades initial_dss_file_list = [master_path, thermal_upgrades_dss_filepath] - simulation_params = reload_dss_circuit(dss_file_list=initial_dss_file_list, commands_list=None, **initial_simulation_params) - simulation_params.update({"timepoint_multipliers": timepoint_multipliers, "multiplier_type": multiplier_type}) + solve_params = reload_dss_circuit(dss_file_list=initial_dss_file_list, commands_list=None, solve_params=initial_solve_params, reload_circuit_params=reload_circuit_params) + G = generate_networkx_representation() # check if circuit is disconnected + check_network_connectivity(G, raise_exception=True) # reading original objects (before upgrades) orig_ckt_info = get_circuit_info() orig_xfmrs_df = get_thermal_equipment_info(compute_loading=False, equipment_type="transformer") orig_regcontrols_df = get_regcontrol_info(correct_PT_ratio=True, nominal_voltage=voltage_config["nominal_voltage"]) orig_capacitors_df = get_capacitor_info(correct_PT_ratio=True, nominal_voltage=voltage_config['nominal_voltage']) - # Initialize dss upgrades file dss_commands_list = ["//This file has all the voltage upgrades\n"] upgrade_status = '' # status - whether voltage upgrades done or not @@ -93,13 +91,13 @@ def determine_voltage_upgrades( voltage_lower_limit = voltage_config["initial_lower_limit"] initial_xfmr_loading_df = get_thermal_equipment_info(compute_loading=True, upper_limit=thermal_config["transformer_upper_limit"], - equipment_type="transformer", **simulation_params) + equipment_type="transformer", analysis_params=analysis_params, solve_params=solve_params) initial_line_loading_df = get_thermal_equipment_info(compute_loading=True, upper_limit=thermal_config["line_upper_limit"], - equipment_type="line", ignore_switch=ignore_switch, **simulation_params) + equipment_type="line", ignore_switch=ignore_switch, analysis_params=analysis_params, solve_params=solve_params) initial_bus_voltages_df, initial_undervoltage_bus_list, initial_overvoltage_bus_list, \ - initial_buses_with_violations = get_bus_voltages( + initial_buses_with_violations = get_bus_voltage_violations( voltage_upper_limit=thermal_config['voltage_upper_limit'], voltage_lower_limit=thermal_config['voltage_lower_limit'], - **simulation_params) + analysis_params=analysis_params, solve_params=solve_params) initial_overloaded_xfmr_list = list(initial_xfmr_loading_df.loc[initial_xfmr_loading_df['status'] == 'overloaded']['name'].unique()) @@ -110,10 +108,15 @@ def determine_voltage_upgrades( feeder_stats = load_data(feeder_stats_json_file) else: feeder_stats = {"stage_results": []} - feeder_stats["stage_results"].append( get_upgrade_stage_stats(dss, upgrade_stage="initial", upgrade_type="voltage", xfmr_loading_df=initial_xfmr_loading_df, line_loading_df=initial_line_loading_df, + feeder_stats["stage_results"].append(get_upgrade_stage_stats(upgrade_stage="initial", upgrade_type="voltage", xfmr_loading_df=initial_xfmr_loading_df, line_loading_df=initial_line_loading_df, bus_voltages_df=initial_bus_voltages_df, regcontrols_df=orig_regcontrols_df, capacitors_df=orig_capacitors_df) ) + if upgrade_simulation_params_config["timeseries_analysis"]: + timeseries_upgrade_stats = get_upgrade_stage_timeseries_stats(upgrade_stage="initial", upgrade_type="voltage", capacitors_df=orig_capacitors_df, regcontrols_df=orig_regcontrols_df, + transformer_upper_limit=thermal_config["transformer_upper_limit"], line_upper_limit=thermal_config["line_upper_limit"], + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, solve_params=solve_params, ignore_switch=ignore_switch) + feeder_stats["timeseries_stage_results"].append(timeseries_upgrade_stats) dump_data(feeder_stats, feeder_stats_json_file, indent=2) - scenario = get_scenario_name(enable_pydss_solve, pydss_volt_var_model) + scenario = get_scenario_name(enable_pydss_controllers=solve_params.enable_pydss_controllers, pydss_volt_var_model=reload_circuit_params.pydss_volt_var_model) initial_results = UpgradeViolationResultModel( name = job_name, scenario = scenario, @@ -144,8 +147,8 @@ def determine_voltage_upgrades( overall_outputs = {"violation_summary": [temp_results]} dump_data(overall_outputs, overall_output_summary_filepath, indent=2, allow_nan=False) circuit_source = orig_ckt_info["source_bus"] - bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltages( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, **simulation_params) + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltage_violations( + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, solve_params=solve_params) logger.info(f"Number of overvoltage violations: {len(overvoltage_bus_list)}") logger.info(f"Number of undervoltage violations: {len(undervoltage_bus_list)}") # if there are no buses with violations based on initial check, don't get into upgrade process @@ -164,27 +167,31 @@ def determine_voltage_upgrades( upgrade_status = 'Voltage Upgrades Required' # status - whether voltage upgrades done or not logger.info("Voltage Upgrades Required.") comparison_dict = {"original": compute_voltage_violation_severity( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, **simulation_params)} + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, solve_params=solve_params)} best_setting_so_far = "original" # start with capacitors if voltage_config["capacitor_action_flag"] and len(orig_capacitors_df) > 0: - capacitor_dss_commands = determine_capacitor_upgrades(voltage_upper_limit, voltage_lower_limit, default_capacitor_settings, orig_capacitors_df, - voltage_config, deciding_field, fig_folder=os.path.join(voltage_upgrades_directory, "interim"), - create_plots=create_plots, circuit_source=circuit_source,**simulation_params) + capacitor_dss_commands = determine_capacitor_upgrades(voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, + default_capacitor_settings=default_capacitor_settings, orig_capacitors_df=orig_capacitors_df, + voltage_config=voltage_config, deciding_field=deciding_field, fig_folder=os.path.join(voltage_upgrades_directory, "interim"), + create_plots=create_plots, circuit_source=circuit_source, analysis_params=analysis_params, + solve_params=solve_params, title="Bus violations after existing capacitor sweep module_") - bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltages( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, **simulation_params) + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltage_violations( + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, solve_params=solve_params) if (len(buses_with_violations) > 0): # if violations increased after capacitor modifications, remove the capacitor changes. - comparison_dict["after_capacitor_modifications"] = compute_voltage_violation_severity(voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, **simulation_params) + comparison_dict["after_capacitor_modifications"] = compute_voltage_violation_severity(voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, + analysis_params=analysis_params, solve_params=solve_params) if comparison_dict["after_capacitor_modifications"][deciding_field] < comparison_dict[best_setting_so_far][deciding_field]: best_setting_so_far = "after_capacitor_modifications" dss_commands_list = dss_commands_list + capacitor_dss_commands else: - reload_dss_circuit(dss_file_list=initial_dss_file_list, commands_list=dss_commands_list, **simulation_params) - bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltages( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, **simulation_params) + + reload_dss_circuit(dss_file_list=initial_dss_file_list, commands_list=dss_commands_list, solve_params=solve_params, reload_circuit_params=reload_circuit_params) + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltage_violations( + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, solve_params=solve_params) else: logger.info("No capacitor banks exist in the system") # next: existing regulators @@ -199,17 +206,18 @@ def determine_voltage_upgrades( exclude_sub_ltc=True, only_sub_ltc=False, previous_dss_commands_list=dss_commands_list, fig_folder=os.path.join(voltage_upgrades_directory, "interim"), create_plots=create_plots, circuit_source=circuit_source, title="Bus violations after existing vreg sweep", - **simulation_params) + analysis_params=analysis_params, solve_params=solve_params, reload_circuit_params=reload_circuit_params) # added to commands list only if it is different from original dss_commands_list = dss_commands_list + reg_sweep_commands_list # determine voltage violations after changes - bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltages(voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, **simulation_params) + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltage_violations(voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, + analysis_params=analysis_params, solve_params=solve_params) # Writing out the results before adding new devices logger.info("Write upgrades to dss file, before adding new devices.") write_text_file(string_list=dss_commands_list, text_file_path=voltage_upgrades_dss_filepath) # Use this block for adding a substation LTC, correcting its settings and running a sub LTC settings sweep. comparison_dict["before_addition_of_new_device"]= compute_voltage_violation_severity( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, **simulation_params) + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, solve_params=solve_params) best_setting_so_far = "before_addition_of_new_device" if (voltage_config['use_ltc_placement']) and (len(buses_with_violations) > 0): subltc_results_dict = determine_substation_ltc_upgrades(voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, @@ -217,14 +225,15 @@ def determine_voltage_upgrades( default_subltc_settings=default_subltc_settings, voltage_config=voltage_config, dss_file_list=initial_dss_file_list, comparison_dict=comparison_dict, deciding_field=deciding_field, previous_dss_commands_list=dss_commands_list, best_setting_so_far=best_setting_so_far, fig_folder=os.path.join(voltage_upgrades_directory, "interim"), create_plots=create_plots, - default_capacitor_settings=default_capacitor_settings, **simulation_params) + default_capacitor_settings=default_capacitor_settings, solve_params=solve_params, reload_circuit_params=reload_circuit_params, + analysis_params=analysis_params) best_setting_so_far = subltc_results_dict["best_setting_so_far"] comparison_dict = subltc_results_dict["comparison_dict"] subltc_upgrade_commands = subltc_results_dict["subltc_upgrade_commands"] dss_commands_list = dss_commands_list + subltc_upgrade_commands # determine voltage violations after changes - bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltages( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, **simulation_params) + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltage_violations( + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, solve_params=solve_params) if len(buses_with_violations) >= min((100 * len(initial_buses_with_violations)), 500, len(dss.Circuit.AllBusNames())): # if number of buses with violations is very high, the loop for adding new regulators will take very long @@ -240,16 +249,18 @@ def determine_voltage_upgrades( default_regcontrol_settings=default_regcontrol_settings, comparison_dict=comparison_dict, best_setting_so_far=best_setting_so_far, dss_file_list=initial_dss_file_list, previous_dss_commands_list=dss_commands_list, fig_folder=os.path.join(voltage_upgrades_directory, "interim"), - create_plots=create_plots, **simulation_params) + create_plots=create_plots, analysis_params=analysis_params, solve_params=solve_params, reload_circuit_params=reload_circuit_params) best_setting_so_far = new_reg_results_dict["best_setting_so_far"] comparison_dict = new_reg_results_dict["comparison_dict"] new_reg_upgrade_commands = new_reg_results_dict["new_reg_upgrade_commands"] dss_commands_list = dss_commands_list + new_reg_upgrade_commands # determine voltage violations after changes - bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltages( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, **simulation_params) + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltage_violations( + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, solve_params=solve_params) - dss_commands_list.append(f"Set MaxControlIter={simulation_params['max_control_iterations']}") + if reload_circuit_params.max_control_iterations is not None: + dss_commands_list.append(f"Set MaxControlIter={reload_circuit_params.max_control_iterations}") + if any("new " in string.lower() for string in dss_commands_list): # if new equipment is added. dss_commands_list.append("CalcVoltageBases") dss_commands_list.append("Solve") @@ -257,7 +268,7 @@ def determine_voltage_upgrades( redirect_command_list = create_upgraded_master_dss(dss_file_list=initial_dss_file_list + [voltage_upgrades_dss_filepath], upgraded_master_dss_filepath=upgraded_master_dss_filepath, original_master_filename=os.path.basename(master_path)) write_text_file(string_list=redirect_command_list, text_file_path=upgraded_master_dss_filepath) - reload_dss_circuit(dss_file_list=[upgraded_master_dss_filepath], commands_list=None, **simulation_params,) + reload_dss_circuit(dss_file_list=[upgraded_master_dss_filepath], commands_list=None, solve_params=solve_params, reload_circuit_params=reload_circuit_params) # reading new objects (after upgrades) new_ckt_info = get_circuit_info() new_regcontrols_df = get_regcontrol_info(correct_PT_ratio=True, nominal_voltage=voltage_config["nominal_voltage"]) @@ -269,13 +280,13 @@ def determine_voltage_upgrades( m = AllUpgradesTechnicalResultModel(voltage=all_processed) temp = { "voltage": m.dict(by_alias=True)["voltage"]} dump_data(temp, output_json_voltage_upgrades_filepath, indent=2) - bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltages( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, **simulation_params) + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltage_violations( + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, solve_params=solve_params) xfmr_loading_df = get_thermal_equipment_info(compute_loading=True, upper_limit=thermal_config["transformer_upper_limit"], - equipment_type="transformer", **simulation_params) + equipment_type="transformer", analysis_params=analysis_params, solve_params=solve_params) line_loading_df = get_thermal_equipment_info(compute_loading=True, upper_limit=thermal_config["line_upper_limit"], - equipment_type="line", ignore_switch=ignore_switch, **simulation_params) + equipment_type="line", ignore_switch=ignore_switch, analysis_params=analysis_params, solve_params=solve_params) overloaded_xfmr_list = list(xfmr_loading_df.loc[xfmr_loading_df['status'] == 'overloaded']['name'].unique()) overloaded_line_list = list(line_loading_df.loc[line_loading_df['status'] == 'overloaded']['name'].unique()) if (upgrade_status == "Voltage Upgrades Required") and create_plots: @@ -285,13 +296,22 @@ def determine_voltage_upgrades( feeder_stats = load_data(feeder_stats_json_file) else: feeder_stats = {} - feeder_stats["stage_results"].append( get_upgrade_stage_stats(dss, upgrade_stage="final", upgrade_type="voltage", xfmr_loading_df=xfmr_loading_df, line_loading_df=line_loading_df, + feeder_stats["stage_results"].append( get_upgrade_stage_stats(upgrade_stage="final", upgrade_type="voltage", xfmr_loading_df=xfmr_loading_df, line_loading_df=line_loading_df, bus_voltages_df=bus_voltages_df, regcontrols_df=new_regcontrols_df, capacitors_df=new_capacitors_df) ) + if upgrade_simulation_params_config["timeseries_analysis"]: + timeseries_upgrade_stats = get_upgrade_stage_timeseries_stats(upgrade_stage="final", upgrade_type="voltage", capacitors_df=orig_capacitors_df, regcontrols_df=orig_regcontrols_df, + transformer_upper_limit=thermal_config["transformer_upper_limit"], line_upper_limit=thermal_config["line_upper_limit"], + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, solve_params=solve_params, ignore_switch=ignore_switch) + feeder_stats["timeseries_stage_results"].append(timeseries_upgrade_stats) + if not "timeseries_metadata" in feeder_stats.keys(): + metadata_filename = os.path.join(os.path.dirname(master_path), "..", "metadata.csv") + feeder_stats["timeseries_metadata"] = pd.read_csv(metadata_filename).to_dict("records") dump_data(feeder_stats, feeder_stats_json_file, indent=2) end_time = time.time() logger.info(f"Simulation end time: {end_time}") simulation_time = end_time - start_time logger.info(f"Simulation time: {simulation_time}") + final_results = UpgradeViolationResultModel( name = job_name, scenario = scenario, diff --git a/disco/extensions/upgrade_simulation/upgrades/common_functions.py b/disco/extensions/upgrade_simulation/upgrades/common_functions.py index a779fe41..ae67bace 100644 --- a/disco/extensions/upgrade_simulation/upgrades/common_functions.py +++ b/disco/extensions/upgrade_simulation/upgrades/common_functions.py @@ -6,9 +6,11 @@ import numpy as np import pandas as pd import opendssdirect as dss +from pydantic import validator +from typing import Any, List, Dict from .pydss_parameters import * -from jade.utils.timing_utils import track_timing, Timer +from jade.utils.timing_utils import track_timing from disco import timer_stats_collector from disco.enums import LoadMultiplierType @@ -20,6 +22,7 @@ InvalidOpenDssElementError, ) from disco.models.upgrade_cost_analysis_generic_input_model import ( + UpgradeParamsBaseModel, _extract_specific_model_properties_, LineCodeCatalogModel, LineGeometryCatalogModel, LineModel, LineCatalogModel, @@ -42,8 +45,36 @@ 0: "none" # 0 maps to none, which means impedance units and line length units match } + +class CircuitSolveParams(UpgradeParamsBaseModel): + raise_exception: bool = True + calcvoltagebases: bool = False + enable_pydss_controllers: bool = False + pydss_controller_manager: Any = None + + +class ReloadCircuitParams(UpgradeParamsBaseModel): + dc_ac_ratio: float = None + max_control_iterations: int = None + pydss_volt_var_model: Any = None + + +class SimulationParams(UpgradeParamsBaseModel): + timepoint_multipliers: Dict = {} + timeseries_analysis: bool = False + multiplier_type: LoadMultiplierType = LoadMultiplierType.ORIGINAL + + @validator("multiplier_type") + def check_multiplier_type(cls, multiplier_type, values): + if values["timepoint_multipliers"]: + multiplier_type = LoadMultiplierType.UNIFORM + else: + multiplier_type = LoadMultiplierType.ORIGINAL + return multiplier_type + + @track_timing(timer_stats_collector) -def reload_dss_circuit(dss_file_list, commands_list=None, **kwargs): +def reload_dss_circuit(dss_file_list: List, solve_params: CircuitSolveParams, reload_circuit_params: ReloadCircuitParams, commands_list: List = None): """This function clears the circuit and loads dss files and commands. Also solves the circuit and checks for convergence errors @@ -63,30 +94,23 @@ def reload_dss_circuit(dss_file_list, commands_list=None, **kwargs): for dss_file in dss_file_list: logger.info(f"Redirecting '{dss_file}'.") check_dss_run_command(f"Redirect '{dss_file}'") - dc_ac_ratio = kwargs.get('dc_ac_ratio', None) - if dc_ac_ratio is not None: - change_pv_pctpmpp(dc_ac_ratio=dc_ac_ratio) + if reload_circuit_params.dc_ac_ratio is not None: + change_pv_pctpmpp(dc_ac_ratio=reload_circuit_params.dc_ac_ratio) if commands_list is not None: logger.info(f"Running {len(commands_list)} dss commands") for command_string in commands_list: check_dss_run_command(command_string) if "new " in command_string.lower(): check_dss_run_command("CalcVoltageBases") - enable_pydss_solve = kwargs.get("enable_pydss_solve", False) - raise_exception = kwargs.get("raise_exception", True) - if enable_pydss_solve: - pydss_params = define_initial_pydss_settings(**kwargs) - circuit_solve_and_check(raise_exception=raise_exception, **pydss_params) - return pydss_params - else: - max_control_iterations = kwargs.get("max_control_iterations", None) - if max_control_iterations is not None: - dss.Solution.MaxControlIterations(max_control_iterations) - circuit_solve_and_check(raise_exception=raise_exception) - return kwargs + if reload_circuit_params.max_control_iterations is not None: + dss.Solution.MaxControlIterations(reload_circuit_params.max_control_iterations) + if solve_params.enable_pydss_controllers: + solve_params.pydss_controller_manager = define_initial_pydss_settings(pydss_volt_var_model=reload_circuit_params.pydss_volt_var_model) + circuit_solve_and_check( solve_params ) + return solve_params -def run_selective_master_dss(master_filepath, **kwargs): +def run_selective_master_dss(master_filepath, solve_params): """This function executes master.dss file line by line and ignores some commands that Solve yearly mode, export or plot data. @@ -98,6 +122,8 @@ def run_selective_master_dss(master_filepath, **kwargs): ------- """ + solve_params_true_exception = solve_params.copy() + solve_params_true_exception.raise_exception = True run_dir = os.getcwd() check_dss_run_command("Clear") # logger.info("Redirecting master file:") @@ -114,33 +140,32 @@ def run_selective_master_dss(master_filepath, **kwargs): continue else: check_dss_run_command(f"{line}") - circuit_solve_and_check(raise_exception=True, **kwargs) + circuit_solve_and_check(solve_params_true_exception) os.chdir(run_dir) return + @track_timing(timer_stats_collector) -def circuit_solve_and_check(raise_exception=False, **kwargs): +def circuit_solve_and_check(solve_params=CircuitSolveParams()): """This function solves the circuit (both OpenDSS and PyDSS-if enabled) and can raise exception if convergence error occurs Parameters ---------- - raise_exception - kwargs - + solve_params Returns ------- """ - calcvoltagebases = kwargs.pop("calcvoltagebases", False) - if calcvoltagebases: + if solve_params.calcvoltagebases: check_dss_run_command("CalcVoltageBases") - dss_pass_flag = dss_solve_and_check(raise_exception=raise_exception) + dss_pass_flag = dss_solve_and_check(raise_exception=solve_params.raise_exception) pass_flag = dss_pass_flag - enable_pydss_solve = kwargs.get("enable_pydss_solve", False) - if enable_pydss_solve: # if pydss solver is also to be used - pydss_pass_flag = pydss_solve_and_check(raise_exception=raise_exception, **kwargs) + if solve_params.enable_pydss_controllers: # if pydss solver is also to be used + if solve_params.pydss_controller_manager is None: + raise Exception("pydss_controller_manager should be defined.") + pydss_pass_flag = pydss_solve_and_check(solve_params.pydss_controller_manager, raise_exception=solve_params.raise_exception) pass_flag = dss_pass_flag and pydss_pass_flag return pass_flag @@ -173,7 +198,7 @@ def dss_run_command_list(command_list): return -def write_text_file(string_list, text_file_path, **kwargs): +def write_text_file(string_list, text_file_path, num_new_lines=2): """This function writes the string contents of a list to a text file Parameters @@ -185,7 +210,6 @@ def write_text_file(string_list, text_file_path, **kwargs): ------- """ - num_new_lines = kwargs.get("num_new_lines", 2) breaks = "\n"*num_new_lines pathlib.Path(text_file_path).write_text(breaks.join(string_list)) @@ -261,19 +285,19 @@ def convert_length_units(length, unit_in, unit_out): return length*LENGTH_CONVERSION[unit_in]/LENGTH_CONVERSION[unit_out] -def get_scenario_name(enable_pydss_solve, pydss_volt_var_model): +def get_scenario_name(enable_pydss_controllers, pydss_volt_var_model): """This function determines the controller scenario Parameters ---------- - enable_pydss_solve : bool + enable_pydss_controllers : bool pydss_volt_var_model Returns ------- str """ - if enable_pydss_solve: + if enable_pydss_controllers: # scenario = pydss_volt_var_model.control1 # TODO can read in name instead scenario = "control_mode" else: @@ -316,7 +340,7 @@ def change_pv_pctpmpp(dc_ac_ratio): dss.PVsystems.Next() -def get_feeder_stats(dss): +def get_feeder_stats(): """This function gives metadata stats for a feeder Parameters @@ -349,46 +373,133 @@ def get_feeder_stats(dss): return data_dict -def get_upgrade_stage_stats(dss, upgrade_stage, upgrade_type, xfmr_loading_df, line_loading_df, bus_voltages_df, **kwargs): +def get_dss_object_counts(): + data_dict = { + "num_nodes": dss.Circuit.NumNodes(), + "num_loads": dss.Loads.Count(), + "num_lines": dss.Lines.Count(), + "num_transformers": dss.Transformers.Count(), + "num_pv_systems": dss.PVsystems.Count(), + "num_capacitors": dss.Capacitors.Count(), + "num_regulators": dss.RegControls.Count(), + } + + return data_dict + +def get_feeder_data(upgrade_stage, upgrade_type): + final_dict = {"stage": upgrade_stage, "upgrade_type": upgrade_type} + ckt_info_dict = get_circuit_info() + final_dict["feeder_components"] = ckt_info_dict + final_dict["feeder_components"].update(get_dss_object_counts()) + return final_dict + +def get_upgrade_stage_stats(upgrade_stage, upgrade_type, xfmr_loading_df, line_loading_df, bus_voltages_df, + line_properties=None, xfmr_properties=None, voltage_properties=None, + capacitor_properties=None, regcontrol_properties=None, capacitors_df=None, regcontrols_df=None): """This function gives upgrade stage stats for a feeder upgrade_stage can be Initial or Final upgrade_type can be thermal or voltage """ - final_dict = {"stage": upgrade_stage, "upgrade_type": upgrade_type} - ckt_info_dict = get_circuit_info() - final_dict["feeder_components"] = ckt_info_dict - final_dict["feeder_components"].update({ - "num_nodes": dss.Circuit.NumNodes(), - "num_loads": dss.Loads.Count(), - "num_lines": dss.Lines.Count(), - "num_transformers": dss.Transformers.Count(), - "num_pv_systems": dss.PVsystems.Count(), - "num_capacitors": dss.Capacitors.Count(), - "num_regulators": dss.RegControls.Count(), - } ) - equipment_dict = combine_equipment_health_stats(xfmr_loading_df, line_loading_df, bus_voltages_df, **kwargs) + final_dict = get_feeder_data(upgrade_stage, upgrade_type) + equipment_dict = combine_equipment_health_stats(xfmr_loading_df=xfmr_loading_df, line_loading_df=line_loading_df, bus_voltages_df=bus_voltages_df, + line_properties=line_properties, xfmr_properties=xfmr_properties, voltage_properties=voltage_properties, + capacitor_properties=capacitor_properties, regcontrol_properties=regcontrol_properties, capacitors_df=capacitors_df, + regcontrols_df=regcontrols_df) final_dict.update(equipment_dict) return final_dict -def combine_equipment_health_stats(xfmr_loading_df, line_loading_df, bus_voltages_df, **kwargs): - line_properties = kwargs.get("line_properties", - ['name', 'phases','normamps', 'kV', 'line_placement', 'length', 'units', 'max_amp_loading', - 'max_per_unit_loading', 'status']) - xfmr_properties = kwargs.get("xfmr_properties", - ['name', 'phases', 'windings', 'conns', 'kV', 'kVA', 'amp_limit_per_phase','max_amp_loading', - 'max_per_unit_loading', 'status'] ) - voltage_properties = kwargs.get("voltage_properties", - ['name', 'max_per_unit_voltage', 'min_per_unit_voltage', 'overvoltage_violation', - 'max_voltage_deviation', 'undervoltage_violation', 'min_voltage_deviation']) - capacitors_df = kwargs.get("capacitors_df", pd.DataFrame()) - regcontrols_df = kwargs.get("regcontrols_df", pd.DataFrame()) - capacitor_properties = kwargs.get("capacitor_properties", - ['capacitor_name','capcontrol_present', 'capcontrol_type', 'capcontrol_name', 'kv', 'kvar', - 'phases', 'DeadTime', 'Delay', 'OFFsetting', 'ONsetting']) - regcontrol_properties = kwargs.get("regcontrol_properties", - ['name', 'transformer', 'vreg', 'band', 'ptratio', 'delay', 'at_substation_xfmr_flag']) +def get_upgrade_stage_timeseries_stats(upgrade_stage, upgrade_type, voltage_upper_limit, voltage_lower_limit, transformer_upper_limit, line_upper_limit, solve_params, + ignore_switch, capacitors_df=None, regcontrols_df=None, line_properties=None, xfmr_properties=None, voltage_properties=None, capacitor_properties=None, + regcontrol_properties=None, ): + """This function gives upgrade stage stats for a feeder + upgrade_stage can be Initial or Final + upgrade_type can be thermal or voltage + + """ + final_dict = get_feeder_data(upgrade_stage, upgrade_type) + equipment_dict = combine_equipment_health_timeseries_stats(voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, transformer_upper_limit=transformer_upper_limit, + line_upper_limit=line_upper_limit, solve_params=solve_params, capacitors_df=capacitors_df, regcontrols_df=regcontrols_df, + line_properties=line_properties, xfmr_properties=xfmr_properties, voltage_properties=voltage_properties, capacitor_properties=capacitor_properties, + regcontrol_properties=regcontrol_properties, ignore_switch=ignore_switch) + final_dict.update(equipment_dict) + return final_dict + + +def combine_equipment_health_timeseries_stats(voltage_upper_limit, voltage_lower_limit, transformer_upper_limit, line_upper_limit, solve_params, + ignore_switch, capacitors_df=None, regcontrols_df=None, + line_properties=None, xfmr_properties=None, voltage_properties=None, capacitor_properties=None, + regcontrol_properties=None): + if line_properties is None: + line_properties = ['name', 'phases','normamps', 'kV', 'line_placement', 'length', 'units', 'max_amp_loading', + 'max_per_unit_loading', 'status'] + if xfmr_properties is None: + xfmr_properties = ['name', 'phases', 'windings', 'conns', 'kV', 'kVA', 'amp_limit_per_phase','max_amp_loading', + 'max_per_unit_loading', 'status'] + if voltage_properties is None: + voltage_properties = ['name', 'max_per_unit_voltage', 'min_per_unit_voltage', 'overvoltage_violation', + 'max_voltage_deviation', 'undervoltage_violation', 'min_voltage_deviation'] + if capacitor_properties is None: + capacitor_properties = ['capacitor_name','capcontrol_present', 'capcontrol_type', 'capcontrol_name', 'kv', 'kvar', + 'phases', 'DeadTime', 'Delay', 'OFFsetting', 'ONsetting'] + if regcontrol_properties is None: + regcontrol_properties = ['name', 'transformer', 'vreg', 'band', 'ptratio', 'delay', 'at_substation_xfmr_flag'] + + final_dict = {} + if (capacitors_df is None) or capacitors_df.empty: + final_dict["capacitor_control"] = [] + else: + final_dict["capacitor_control"] = capacitors_df[capacitor_properties].to_dict(orient="records") + + if (regcontrols_df is None) or regcontrols_df.empty: + final_dict["regulator_control"] = [] + else: + final_dict["regulator_control"] = regcontrols_df[regcontrol_properties].to_dict(orient="records") + + xfmr_dict = get_timeseries_thermal_comparison(equipment_type="transformer", compute_loading=True, upper_limit=transformer_upper_limit) + ref_xfmr_dict = {k:v.reset_index()[xfmr_properties].to_dict(orient="records") for k, v in xfmr_dict.items()} + + line_dict = get_timeseries_thermal_comparison(equipment_type="line", compute_loading=True, upper_limit=line_upper_limit, ignore_switch=ignore_switch) + ref_line_dict = {k:v.reset_index()[line_properties].to_dict(orient="records") for k, v in line_dict.items()} + + solve_params_no_exception = solve_params.copy() + solve_params_no_exception.raise_exception = False + voltage_dict = get_timeseries_voltage_comparison(voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, + equipment_type="voltage", solve_params=solve_params_no_exception) + ref_voltage_dict = {k:v.reset_index()[voltage_properties].to_dict(orient="records") for k, v in voltage_dict.items()} + + # some file reformatting + # if "windings" in xfmr_properties: + # xfmr_loading_df["windings"] = xfmr_loading_df["windings"].astype(int) + final_dict["transformer"] = ref_xfmr_dict + final_dict["line"] = ref_line_dict + final_dict["bus_voltage"] = ref_voltage_dict + return final_dict + + +def combine_equipment_health_stats(xfmr_loading_df, line_loading_df, bus_voltages_df, + line_properties=None, xfmr_properties=None, voltage_properties=None, + capacitor_properties=None, regcontrol_properties=None, capacitors_df=None, regcontrols_df=None, + ): + if line_properties is None: + line_properties = ['name', 'phases','normamps', 'kV', 'line_placement', 'length', 'units', 'max_amp_loading', + 'max_per_unit_loading', 'status'] + if xfmr_properties is None: + xfmr_properties =['name', 'phases', 'windings', 'conns', 'kV', 'kVA', 'amp_limit_per_phase','max_amp_loading', + 'max_per_unit_loading', 'status'] + if voltage_properties is None: + voltage_properties = ['name', 'max_per_unit_voltage', 'min_per_unit_voltage', 'overvoltage_violation', + 'max_voltage_deviation', 'undervoltage_violation', 'min_voltage_deviation'] + if capacitors_df is None: + capacitors_df = pd.DataFrame() + if regcontrols_df is None: + regcontrols_df = pd.DataFrame() + if capacitor_properties is None: + capacitor_properties = ['capacitor_name','capcontrol_present', 'capcontrol_type', 'capcontrol_name', 'kv', 'kvar', + 'phases', 'DeadTime', 'Delay', 'OFFsetting', 'ONsetting'] + if regcontrol_properties is None: + regcontrol_properties = ['name', 'transformer', 'vreg', 'band', 'ptratio', 'delay', 'at_substation_xfmr_flag'] final_dict = {} # some file reformatting @@ -438,10 +549,10 @@ def get_circuit_info(): return data_dict -def summarize_upgrades_outputs(overall_outputs, **kwargs): +def summarize_upgrades_outputs(overall_outputs, job_name=None,): """This function creates summary of upgrades and costs results""" summary = {"results": {}} - summary["results"]["name"] = kwargs.get("job_name", None) + summary["results"]["name"] = job_name violation_summary = pd.DataFrame(overall_outputs["violation_summary"]) thermal_violations = sum(violation_summary.loc[(violation_summary["stage"] == "final") & (violation_summary["upgrade_type"] == "thermal")][["num_line_violations", "num_transformer_violations"]].sum()) voltage_violations = sum(violation_summary.loc[(violation_summary["stage"] == "final") & (violation_summary["upgrade_type"] == "voltage")][["num_voltage_violation_buses"]].sum()) @@ -617,13 +728,15 @@ def create_voltage_output_summary(all_original_equipment, all_latest_equipment, return new_voltage_df -def create_overall_output_file(feeder_stats, upgrades_dict, costs_dict, **kwargs): +def create_overall_output_file(feeder_stats, upgrades_dict, costs_dict, thermal_equipment_type_list=None, voltage_equipment_type_list=None, job_name=None): """This function creates the overall output summary file Status can have values: unchanged, replaced, new, setting_changed """ output_cols = UpgradesCostResultSummaryModel.schema(True).get("properties").keys() - thermal_equipment_type_list = kwargs.get("thermal_equipment_type_list", ["transformer", "line"]) - voltage_equipment_type_list = kwargs.get("voltage_equipment_type_list", ["capacitor_control", "regulator_control"]) + if thermal_equipment_type_list is None: + thermal_equipment_type_list = ["transformer", "line"] + if voltage_equipment_type_list is None: + voltage_equipment_type_list = ["capacitor_control", "regulator_control"] props_dict = {"transformer": {"identifier": "name", "parameter_list": ["kVA"], }, "line": {"identifier": "name", "parameter_list": ["normamps"], }, "capacitor_control": {"identifier": "capacitor_name", "parameter_list": ["ONsetting", "OFFsetting", "Delay"], "upgrades_file_string": "capacitor", @@ -648,7 +761,7 @@ def create_overall_output_file(feeder_stats, upgrades_dict, costs_dict, **kwargs voltage_summary_df = create_voltage_output_summary(all_original_equipment, all_latest_equipment, voltage_equipment_type_list, props_dict, voltage_cost_df, upgrades_dict, output_cols) combined_df = pd.concat([thermal_summary_df, voltage_summary_df]) - combined_df["name"] = kwargs.get("job_name", None) + combined_df["name"] = job_name return combined_df @@ -852,7 +965,7 @@ def check_switch_property(all_df): return -def get_all_transformer_info_instance(upper_limit=None, compute_loading=True): +def get_snapshot_transformer_info(upper_limit=None, compute_loading=True): """This collects transformer information Returns @@ -865,7 +978,7 @@ def get_all_transformer_info_instance(upper_limit=None, compute_loading=True): return pd.DataFrame() all_df["name"] = all_df.index.str.split(".").str[1] all_df["equipment_type"] = all_df.index.str.split(".").str[0] - # extract only enabled lines + # extract only enabled objects all_df = all_df.loc[all_df["enabled"].str.lower() == "yes"] all_df["conn"] = all_df["conn"].str.strip() # remove trailing space from conn field # define empty new columns @@ -979,7 +1092,7 @@ def determine_line_placement(line_series): return info_dict -def get_all_line_info_instance(upper_limit=None, compute_loading=True, ignore_switch=True): +def get_snapshot_line_info(ignore_switch, upper_limit=None, compute_loading=True): """This collects line information. dss.Lines.Units() gives an integer. It can be mapped as below: @@ -996,7 +1109,7 @@ def get_all_line_info_instance(upper_limit=None, compute_loading=True, ignore_sw check_enabled_property(all_df, element_name="line") all_df["name"] = all_df.index.str.split(".").str[1] all_df["equipment_type"] = all_df.index.str.split(".").str[0] - # extract only enabled lines + # extract only enabled objects all_df = all_df.loc[all_df["enabled"].str.lower() == "yes"] all_df = add_info_line_definition_type(all_df) # define empty new columns @@ -1090,48 +1203,161 @@ def compare_multiple_dataframes(comparison_dict, deciding_column_name, compariso @track_timing(timer_stats_collector) -def get_thermal_equipment_info(compute_loading, equipment_type, upper_limit=None, ignore_switch=False, **kwargs): +def get_thermal_equipment_info(compute_loading, equipment_type, analysis_params=None, solve_params=None, upper_limit=None, ignore_switch=False): """This function determines the thermal equipment loading (line, transformer), based on timepoint multiplier Returns ------- DataFrame """ - timepoint_multipliers = kwargs.get("timepoint_multipliers", None) - multiplier_type = kwargs.get("multiplier_type", LoadMultiplierType.ORIGINAL) - # if there are no multipliers, run on rated load i.e. multiplier=1. 0 - # if compute_loading is false, then just run once (no need to check multipliers) - if (timepoint_multipliers is None) or (not compute_loading) or (multiplier_type == LoadMultiplierType.ORIGINAL): - if compute_loading and multiplier_type != LoadMultiplierType.ORIGINAL: - apply_uniform_timepoint_multipliers(multiplier_name=1, field="with_pv", **kwargs) + # if compute_loading is false, then just run once (no need to check multipliers) + if not compute_loading: + if equipment_type == "line": + loading_df = get_snapshot_line_info(compute_loading=compute_loading, upper_limit=upper_limit, ignore_switch=ignore_switch) + elif equipment_type == "transformer": + loading_df = get_snapshot_transformer_info(compute_loading=compute_loading, upper_limit=upper_limit) + else: + raise Exception(f"Unsupported equipment type {equipment_type}. Acceptable values are line, transformer.") + return loading_df + + # if loading is to be computed - look at the analysis params + assert upper_limit is not None + assert analysis_params is not None + deciding_column_name = "max_per_unit_loading" + + if analysis_params.timeseries_analysis: # timeseries analysis + loading_df = apply_timeseries_thermal(equipment_type, compute_loading=compute_loading, upper_limit=upper_limit, ignore_switch=ignore_switch, + deciding_column_name=deciding_column_name) + return loading_df + + if analysis_params.multiplier_type == LoadMultiplierType.ORIGINAL: + assert not analysis_params.timepoint_multipliers + # if no timepoint multipliers and original multiplier type, then dont make any changes to circuit, and directly compute loading if equipment_type == "line": - loading_df = get_all_line_info_instance(compute_loading=compute_loading, upper_limit=upper_limit, ignore_switch=ignore_switch) + loading_df = get_snapshot_line_info(compute_loading=compute_loading, upper_limit=upper_limit, ignore_switch=ignore_switch) elif equipment_type == "transformer": - loading_df = get_all_transformer_info_instance(compute_loading=compute_loading, upper_limit=upper_limit) + loading_df = get_snapshot_transformer_info(compute_loading=compute_loading, upper_limit=upper_limit) + return loading_df + elif analysis_params.multiplier_type == LoadMultiplierType.UNIFORM: # apply timepoint multipliers + loading_df = determine_timepoint_multiplier_thermal_loading(equipment_type, compute_loading, upper_limit, ignore_switch, + deciding_column_name, solve_params, analysis_params.timepoint_multipliers) return loading_df - if multiplier_type == LoadMultiplierType.UNIFORM: - comparison_dict = {} - for pv_field in timepoint_multipliers["load_multipliers"].keys(): - logger.debug(pv_field) - for multiplier_name in timepoint_multipliers["load_multipliers"][pv_field]: - logger.debug("Multipler name: %s", multiplier_name) - # this changes the dss network load and pv - apply_uniform_timepoint_multipliers(multiplier_name=multiplier_name, field=pv_field, **kwargs) - if equipment_type.lower() == "line": - deciding_column_name = "max_per_unit_loading" - loading_df = get_all_line_info_instance(compute_loading=compute_loading, upper_limit=upper_limit, ignore_switch=ignore_switch) - elif equipment_type.lower() == "transformer": - deciding_column_name = "max_per_unit_loading" - loading_df = get_all_transformer_info_instance(compute_loading=compute_loading, upper_limit=upper_limit) - loading_df.set_index("name", inplace=True) - comparison_dict[pv_field+"_"+str(multiplier_name)] = loading_df - # compare all dataframe, and create one that contains all worst loading conditions (across all multiplier conditions) - loading_df = compare_multiple_dataframes(comparison_dict, deciding_column_name, comparison_type="max") else: - raise Exception(f"Undefined multiplier_type {multiplier_type} passed.") + raise Exception(f"Undefined multiplier_type {analysis_params.multiplier_type} passed.") + + +def determine_timepoint_multiplier_thermal_loading(equipment_type, compute_loading, upper_limit, ignore_switch, + deciding_column_name, solve_params, timepoint_multipliers): + assert timepoint_multipliers + assert solve_params is not None + comparison_dict = {} + for pv_field in timepoint_multipliers["load_multipliers"].keys(): + logger.debug(pv_field) + for multiplier_name in timepoint_multipliers["load_multipliers"][pv_field]: + logger.debug("Multipler name: %s", multiplier_name) + # this changes the dss network load and pv + apply_uniform_timepoint_multipliers(multiplier_name=multiplier_name, field=pv_field, solve_params=solve_params) + if equipment_type.lower() == "line": + loading_df = get_snapshot_line_info(compute_loading=compute_loading, upper_limit=upper_limit, ignore_switch=ignore_switch) + elif equipment_type.lower() == "transformer": + loading_df = get_snapshot_transformer_info(compute_loading=compute_loading, upper_limit=upper_limit) + loading_df.set_index("name", inplace=True) + comparison_dict[pv_field+"_"+str(multiplier_name)] = loading_df + # compare all dataframe, and create one that contains all worst loading conditions (across all multiplier conditions) + loading_df = compare_multiple_dataframes(comparison_dict, deciding_column_name, comparison_type="max") + return loading_df + +def get_timeseries_thermal_comparison(equipment_type, compute_loading, upper_limit, ignore_switch=None): + """ + Time-series data used to determine thermal violations for each timestep + """ + if equipment_type.lower() not in ["line", "transformer"]: + raise Exception(f"Incorrect equipment type {equipment_type} provided. Possible values: line, transformer.") + if equipment_type.lower() == "line": + assert ignore_switch is not None + comparison_dict = {} + check_dss_run_command("Set mode=yearly number=1") # number=1, power flow per solve and stepsize remains the same + startH = 0 + num_pts = len(dss.LoadShape.PMult()) + time_step = 1 + for present_step in range(startH, num_pts, time_step): + key_name = "timepoint_"+str(present_step) + logger.debug(f"Present step: {present_step}") + check_dss_run_command("Set Controlmode=Static") + check_dss_run_command(f"set hour = {present_step}") + dss.Solution.Solve() # solves for specific hour that has been set + dss_pass_flag = dss.Solution.Converged() + if not dss_pass_flag: + logger.info(f"OpenDSS Convergence Error") + raise OpenDssConvergenceError("OpenDSS solution did not converge") + if equipment_type.lower() == "line": + loading_df = get_snapshot_line_info(compute_loading=compute_loading, upper_limit=upper_limit, ignore_switch=ignore_switch) + elif equipment_type.lower() == "transformer": + loading_df = get_snapshot_transformer_info(compute_loading=compute_loading, upper_limit=upper_limit) + else: + raise Exception(f"Not handled: {equipment_type}") + loading_df.set_index("name", inplace=True) + comparison_dict[key_name] = loading_df + return comparison_dict + + +def apply_timeseries_thermal(equipment_type, compute_loading, upper_limit, deciding_column_name, ignore_switch): + """ + Time-series data used to determine thermal violations + + """ + if equipment_type.lower() not in ["line", "transformer"]: + raise Exception(f"Incorrect equipment type {equipment_type} provided. Possible values: line, transformer.") + + comparison_dict = get_timeseries_thermal_comparison(equipment_type, compute_loading, upper_limit, ignore_switch) + # compare all dataframe, and create one that contains all worst loading conditions (across all multiplier conditions) + loading_df = compare_multiple_dataframes(comparison_dict, deciding_column_name, comparison_type="max") return loading_df +def get_timeseries_voltage_comparison(equipment_type, voltage_upper_limit, voltage_lower_limit, + solve_params): + """ + Time-series data used to determine voltage violations + """ + if equipment_type != "voltage": + raise Exception(f"Incorrect equipment type {equipment_type} provided. Possible values: voltage.") + + comparison_dict = {} + check_dss_run_command("Set mode=yearly number=1") # 1 power flow per solve and stepsize remains the same + startH = 0 + num_pts = len(dss.LoadShape.PMult()) + + time_step = 1 + for present_step in range(startH, num_pts, time_step): + key_name = "timepoint_"+str(present_step) + logger.debug(f"Present step: {present_step}") + check_dss_run_command("Set Controlmode=Static") + check_dss_run_command(f"set hour = {present_step}") + dss.Solution.Solve() # solves for specific hour that has been set + dss_pass_flag = dss.Solution.Converged() + if not dss_pass_flag: + logger.info(f"OpenDSS Convergence Error") + raise OpenDssConvergenceError("OpenDSS solution did not converge") + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_snapshot_bus_voltage_violations( + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, solve_params=solve_params) + bus_voltages_df.set_index("name", inplace=True) + comparison_dict[key_name] = bus_voltages_df + return comparison_dict + + +def apply_timeseries_voltage(equipment_type, voltage_upper_limit, voltage_lower_limit, + deciding_column_dict, solve_params): + comparison_dict = get_timeseries_voltage_comparison(equipment_type, voltage_upper_limit, voltage_lower_limit, + solve_params) + # compare all dataframe, and create one that contains all worst loading conditions (across all multiplier conditions) + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, \ + buses_with_violations = compare_multiple_dataframes_voltage( + comparison_dict=comparison_dict, deciding_column_dict=deciding_column_dict, + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit) + return bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, \ + buses_with_violations + def get_regcontrol_info(correct_PT_ratio=False, nominal_voltage=None): """This collects enabled regulator control information. If correcting PT ratio, the following information is followed (based on OpenDSS documentation) @@ -1368,57 +1594,68 @@ def check_dss_run_command(command_string): @track_timing(timer_stats_collector) -def get_bus_voltages(voltage_upper_limit, voltage_lower_limit, raise_exception=True, **kwargs): - """This function determines the voltages, based on timepoint multiplier +def get_bus_voltage_violations(voltage_upper_limit, voltage_lower_limit, analysis_params, solve_params): + """This function determines the bus voltages and violations Returns ------- DataFrame """ - timepoint_multipliers = kwargs.get("timepoint_multipliers", None) - multiplier_type = kwargs.get("multiplier_type", LoadMultiplierType.ORIGINAL) - # if there are no multipliers, run on rated load i.e. multiplier=1. 0 - # if compute_loading is false, then just run once (no need to check multipliers) - if (timepoint_multipliers is None) or (multiplier_type == LoadMultiplierType.ORIGINAL): - if multiplier_type != LoadMultiplierType.ORIGINAL: - apply_uniform_timepoint_multipliers(multiplier_name=1, field="with_pv", **kwargs) - # determine voltage violations after changes - bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltages_instance( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, raise_exception=raise_exception, - **kwargs) + deciding_column_dict = {"max_per_unit_voltage": "max", "min_per_unit_voltage": "min"} + if analysis_params.timeseries_analysis: + bus_voltages_df, undervoltage_bus_list, \ + overvoltage_bus_list, buses_with_violations = apply_timeseries_voltage(deciding_column_dict=deciding_column_dict, + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, + equipment_type="voltage", solve_params=solve_params) + + return bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations + + # if there are no multipliers, run on rated load + if analysis_params.multiplier_type == LoadMultiplierType.ORIGINAL: + assert not analysis_params.timepoint_multipliers + # apply_uniform_timepoint_multipliers(multiplier_name=1, field="with_pv", solve_params=solve_params) + # determine voltage violations after changes + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_snapshot_bus_voltage_violations( + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, solve_params=solve_params) + return bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations + + elif analysis_params.multiplier_type == LoadMultiplierType.UNIFORM: # apply timepoint multipliers + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = determine_timepoint_multiplier_bus_voltage_violations( + voltage_upper_limit, voltage_lower_limit, solve_params, analysis_params.timepoint_multipliers, deciding_column_dict) return bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations - if multiplier_type == LoadMultiplierType.UNIFORM: - comparison_dict = {} - for pv_field in timepoint_multipliers["load_multipliers"].keys(): - logger.debug(pv_field) - for multiplier_name in timepoint_multipliers["load_multipliers"][pv_field]: - logger.debug("Multipler name: %s", multiplier_name) - # this changes the dss network load and pv - apply_uniform_timepoint_multipliers(multiplier_name=multiplier_name, field=pv_field, **kwargs) - bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltages_instance( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, raise_exception=raise_exception, **kwargs) - bus_voltages_df.set_index("name", inplace=True) - comparison_dict[pv_field+"_"+str(multiplier_name)] = bus_voltages_df - # compare all dataframe, and create one that contains all worst loading conditions (across all multiplier conditions) - deciding_column_dict = {"max_per_unit_voltage": "max", "min_per_unit_voltage": "min"} - bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = compare_multiple_dataframes_voltage(comparison_dict=comparison_dict, - deciding_column_dict=deciding_column_dict, - voltage_upper_limit=voltage_upper_limit, - voltage_lower_limit=voltage_lower_limit) else: - raise Exception(f"Undefined multiplier_type {multiplier_type} passed.") - return bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations + raise Exception(f"Undefined multiplier_type {analysis_params.multiplier_type} passed.") + +def determine_timepoint_multiplier_bus_voltage_violations(voltage_upper_limit, voltage_lower_limit, solve_params, timepoint_multipliers, deciding_column_dict): + comparison_dict = {} + for pv_field in timepoint_multipliers["load_multipliers"].keys(): + logger.debug(pv_field) + for multiplier_name in timepoint_multipliers["load_multipliers"][pv_field]: + logger.debug("Multipler name: %s", multiplier_name) + # this changes the dss network load and pv + apply_uniform_timepoint_multipliers(multiplier_name=multiplier_name, field=pv_field, solve_params=solve_params) + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_snapshot_bus_voltage_violations( + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, solve_params=solve_params) + bus_voltages_df.set_index("name", inplace=True) + comparison_dict[pv_field+"_"+str(multiplier_name)] = bus_voltages_df + # compare all dataframe, and create one that contains all worst loading conditions (across all multiplier conditions) + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = compare_multiple_dataframes_voltage(comparison_dict=comparison_dict, + deciding_column_dict=deciding_column_dict, + voltage_upper_limit=voltage_upper_limit, + voltage_lower_limit=voltage_lower_limit) + return bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations + @track_timing(timer_stats_collector) -def get_bus_voltages_instance(voltage_upper_limit, voltage_lower_limit, raise_exception=True, **kwargs): +def get_snapshot_bus_voltage_violations(voltage_upper_limit, voltage_lower_limit, solve_params): """This computes per unit voltages for all buses in network Returns ------- DataFrame """ - circuit_solve_and_check(raise_exception=raise_exception, **kwargs) # this is added as a final check for convergence + circuit_solve_and_check(solve_params) # this is added as a final check for convergence all_dict = {} all_bus_names = dss.Circuit.AllBusNames() for bus_name in all_bus_names: @@ -1609,7 +1846,7 @@ def remove_duplicate_transformer_upgrades(xfmr_upgrades_df): return final_xfmr_upgrades_df -def get_pv_buses(dss): +def get_pv_buses(): pv_buses = [] flag = dss.PVsystems.First() while flag > 0: @@ -1618,7 +1855,7 @@ def get_pv_buses(dss): return pv_buses -def get_load_buses(dss): +def get_load_buses(): load_buses = [] flag = dss.Loads.First() while flag > 0: @@ -1739,8 +1976,8 @@ def create_timepoint_multipliers_dict(timepoint_multipliers): @track_timing(timer_stats_collector) -def apply_timepoint_multipliers_dict(reformatted_dict, multiplier_name, property_list=None, field="load_multipliers", - **kwargs): +def apply_timepoint_multipliers_dict(reformatted_dict, multiplier_name, solve_params, field, property_list=None + ): """This uses a dictionary with the format of output received from create_timepoint_multipliers_dict Currently, it only does works loads. But can be modified to accommodate other elements like PV as well. @@ -1753,6 +1990,8 @@ def apply_timepoint_multipliers_dict(reformatted_dict, multiplier_name, property ------- dict """ + solve_params_true_exception = solve_params.copy() + solve_params_true_exception.raise_exception = True name_list = list(reformatted_dict.keys()) if property_list is None: property_list = list(reformatted_dict[name_list[0]].keys()) @@ -1769,13 +2008,14 @@ def apply_timepoint_multipliers_dict(reformatted_dict, multiplier_name, property dss.Loads.kW(value) else: raise Exception(f"Property {property} not defined in multipliers dict") - circuit_solve_and_check(raise_exception=True, **kwargs) + + circuit_solve_and_check(solve_params_true_exception) else: raise Exception(f"Unsupported key in dictionary. Presently, load_multipliers is supported.") return reformatted_dict -def apply_uniform_timepoint_multipliers(multiplier_name, field, **kwargs): +def apply_uniform_timepoint_multipliers(multiplier_name, field, solve_params): """This function applies a uniform mulitplier to all elements. Currently, the multiplier only does works on loads. But can be modified to accommodate other elements like PV as well. It has two options, 1) all pv is enabled. 2) all pv is disabled. @@ -1792,5 +2032,5 @@ def apply_uniform_timepoint_multipliers(multiplier_name, field, **kwargs): raise Exception(f"Unknown parameter {field} passed in uniform timepoint multiplier dict." f"Acceptable values are 'with_pv', 'without_pv'") check_dss_run_command(f"set LoadMult = {multiplier_name}") - circuit_solve_and_check(raise_exception=True, **kwargs) + circuit_solve_and_check(solve_params) return True diff --git a/disco/extensions/upgrade_simulation/upgrades/cost_computation.py b/disco/extensions/upgrade_simulation/upgrades/cost_computation.py index 63ad031e..12f86431 100644 --- a/disco/extensions/upgrade_simulation/upgrades/cost_computation.py +++ b/disco/extensions/upgrade_simulation/upgrades/cost_computation.py @@ -106,7 +106,7 @@ def compute_all_costs( dump_data(reordered_dict, overall_output_summary_filepath, indent=2, cls=ExtendedJSONEncoder, allow_nan=False) -def compute_transformer_costs(xfmr_upgrades_df, xfmr_cost_database, **kwargs): +def compute_transformer_costs(xfmr_upgrades_df, xfmr_cost_database, backup_deciding_property="rated_kVA", misc_database=None): """This function computes the transformer costs. -Unit equipment cost for new_parallel and "upgrade" transformers are the same in the database. The difference would be the fixed costs added (if present in misc_database) @@ -124,7 +124,6 @@ def compute_transformer_costs(xfmr_upgrades_df, xfmr_cost_database, **kwargs): ---------- xfmr_upgrades_df xfmr_cost_database - kwargs Returns ------- @@ -135,8 +134,6 @@ def compute_transformer_costs(xfmr_upgrades_df, xfmr_cost_database, **kwargs): deciding_columns = ["rated_kVA", "phases", "primary_kV", "secondary_kV", "primary_connection_type", "secondary_connection_type", "num_windings"] output_columns_list = ["type", output_count_field, output_cost_field, "comment", "equipment_parameters"] - backup_deciding_property = kwargs.get("backup_deciding_property", "rated_kVA") - misc_database = kwargs.get("misc_database", None) # choose which properties are to be saved upgrade_type_list = ["upgrade", "new_parallel"] added_xfmr_df = xfmr_upgrades_df.loc[(xfmr_upgrades_df["upgrade_type"].isin(upgrade_type_list)) & (xfmr_upgrades_df["action"] == "add")] @@ -241,7 +238,7 @@ def reformat_xfmr_upgrades_file(xfmr_upgrades_df): return xfmr_upgrades_df -def compute_line_costs(line_upgrades_df, line_cost_database, **kwargs): +def compute_line_costs(line_upgrades_df, line_cost_database, backup_deciding_property="ampere_rating"): """This function computes the line costs. -Unit equipment cost for new_parallel and "upgrade" line are the not same in the database. There are different costs given for reconductored and new lines @@ -259,7 +256,7 @@ def compute_line_costs(line_upgrades_df, line_cost_database, **kwargs): ---------- line_upgrades_df line_cost_database - kwargs + Returns ------- @@ -269,7 +266,6 @@ def compute_line_costs(line_upgrades_df, line_cost_database, **kwargs): output_count_field = "count" deciding_columns = ["phases", "voltage_kV", "ampere_rating", "line_placement", "upgrade_type"] output_columns_list = ["type", output_count_field, output_cost_field, "comment", "equipment_parameters"] - backup_deciding_property = kwargs.get("backup_deciding_property", "ampere_rating") # choose which properties are to be saved upgrade_type_list = ["upgrade", "new_parallel"] @@ -371,7 +367,7 @@ def compute_capcontrol_cost(voltage_upgrades_df, controls_cost_database, keyword "change_cap_control": "controller_settings_modified"} cost_database_fields = {"add_new_cap_controller": "Add new capacitor controller", "change_cap_control": "Change capacitor controller settings", - "replace_cap_controller": "Replace capacitor controller" + "replace_cap_controller": "Replace capacitor controller" # this is not used currently } empty_cap_cost_dict = {"type": type_rows, "count": [0] * len(type_rows), "total_cost_usd": [0] * len(type_rows)} @@ -383,15 +379,15 @@ def compute_capcontrol_cost(voltage_upgrades_df, controls_cost_database, keyword if cap_upgrades_df.empty: # if there are no capacitor control upgrades return zero_cost_df cap_cost = [] - # if there are new capacitor controller + # if there are new capacitor controller count_new_controller = cap_upgrades_df[capcontrol_upgrade_fields["add_new_cap_controller"]].sum() unit_cost_new_controller = controls_cost_database.loc[controls_cost_database["type"] == cost_database_fields["add_new_cap_controller"]]["cost"].values[0] total_cost_new_controller = count_new_controller * unit_cost_new_controller + cap_cost.append( {"type": CapacitorControllerResultType.add_new_cap_controller.value, "count": count_new_controller, "total_cost_usd": total_cost_new_controller} ) - # if there are setting changes count_setting_changes = cap_upgrades_df[capcontrol_upgrade_fields["change_cap_control"]].sum() unit_cost_setting_changes = controls_cost_database.loc[controls_cost_database["type"] == diff --git a/disco/extensions/upgrade_simulation/upgrades/pydss_parameters.py b/disco/extensions/upgrade_simulation/upgrades/pydss_parameters.py index 76c202de..22d368cd 100644 --- a/disco/extensions/upgrade_simulation/upgrades/pydss_parameters.py +++ b/disco/extensions/upgrade_simulation/upgrades/pydss_parameters.py @@ -11,7 +11,7 @@ logger = logging.getLogger(__name__) -def define_initial_pydss_settings(**kwargs): +def define_initial_pydss_settings(pydss_volt_var_model): settings = ProjectModel( max_control_iterations=50, error_tolerance=0.0001, @@ -20,17 +20,14 @@ def define_initial_pydss_settings(**kwargs): dss.Text.Command("Set ControlMode={}".format(settings.control_mode.value)) dss.Solution.MaxControlIterations(settings.max_control_iterations) # we dont need to define controller everytime we solve the circuit, unless we're reloading the circuit - controller = CircuitElementController( - kwargs["pydss_volt_var_model"] - ) # Use all elements. + controller = CircuitElementController(pydss_volt_var_model) # Use all elements. pydss_controller_manager = ControllerManager.create([controller], settings) - kwargs.update({"pydss_controller_manager": pydss_controller_manager}) - return kwargs + return pydss_controller_manager -def pydss_solve_and_check(raise_exception=False, **kwargs): +def pydss_solve_and_check(pydss_controller_manager, raise_exception=False): logger.debug("Solving circuit using PyDSS controls") - pydss_pass_flag = kwargs["pydss_controller_manager"].run_controls() + pydss_pass_flag = pydss_controller_manager.run_controls() if not pydss_pass_flag: logger.info(f"PyDSS Convergence Error") if raise_exception: diff --git a/disco/extensions/upgrade_simulation/upgrades/thermal_upgrade_functions.py b/disco/extensions/upgrade_simulation/upgrades/thermal_upgrade_functions.py index e6b906dd..08f5f5f9 100644 --- a/disco/extensions/upgrade_simulation/upgrades/thermal_upgrade_functions.py +++ b/disco/extensions/upgrade_simulation/upgrades/thermal_upgrade_functions.py @@ -12,7 +12,8 @@ logger = logging.getLogger(__name__) @track_timing(timer_stats_collector) -def correct_line_violations(line_loading_df, line_design_pu, line_upgrade_options, parallel_lines_limit, **kwargs,): +def correct_line_violations(line_loading_df, line_design_pu, line_upgrade_options, parallel_lines_limit, solve_params, + external_upgrades_technical_catalog=None): """This function determines line upgrades to correct line violations. It also updates the opendss model with upgrades. @@ -27,6 +28,8 @@ def correct_line_violations(line_loading_df, line_design_pu, line_upgrade_option ------- """ + solve_params_true_exception = solve_params.copy() + solve_params_true_exception.raise_exception = True equipment_type = "Line" line_upgrades_df = pd.DataFrame() upgrades_dict = {} @@ -73,7 +76,7 @@ def correct_line_violations(line_loading_df, line_design_pu, line_upgrade_option temp_commands_list = [] # edit existing line if (new_config_type == "geometry") or (new_config_type == "linecode"): - external_upgrades_technical_catalog = kwargs.get("external_upgrades_technical_catalog", None) + assert external_upgrades_technical_catalog is not None command_string = ensure_line_config_exists(chosen_option, new_config_type, external_upgrades_technical_catalog) if command_string is not None: # if new line config definition had to be added temp_commands_list.append(command_string) @@ -103,12 +106,12 @@ def correct_line_violations(line_loading_df, line_design_pu, line_upgrade_option # run command for upgraded equipment, that resolves overloading for one equipment for command_item in temp_commands_list: check_dss_run_command(command_item) - circuit_solve_and_check(raise_exception=True, **kwargs) + circuit_solve_and_check(solve_params_true_exception) # raise_exception=True commands_list = commands_list + temp_commands_list # if higher upgrade is not available or chosen line upgrade rating is much higher than required, # dont oversize. Instead, place lines in parallelma else: - external_upgrades_technical_catalog = kwargs.get("external_upgrades_technical_catalog", None) + assert external_upgrades_technical_catalog is not None parallel_line_commands, temp_upgrades_dict_parallel = identify_parallel_lines(options=options, object_row=row, parallel_lines_limit=parallel_lines_limit, external_upgrades_technical_catalog=external_upgrades_technical_catalog) @@ -116,7 +119,7 @@ def correct_line_violations(line_loading_df, line_design_pu, line_upgrade_option for command_item in parallel_line_commands: check_dss_run_command(command_item) check_dss_run_command('CalcVoltageBases') - circuit_solve_and_check(raise_exception=True, **kwargs) + circuit_solve_and_check(solve_params_true_exception) # raise_exception=True commands_list = commands_list + parallel_line_commands upgrades_dict_parallel = upgrades_dict_parallel + temp_upgrades_dict_parallel # parallel upgrades is stored in a list (since it has same original_equipment name) index_names = ["original_equipment_name", "parameter_type"] @@ -129,14 +132,14 @@ def correct_line_violations(line_loading_df, line_design_pu, line_upgrade_option "original_equipment_name"]).reset_index() else: # if there is no overloading logger.info("This case has no line violations") - circuit_solve_and_check(raise_exception=True, **kwargs) # this is added as a final check for convergence + circuit_solve_and_check(solve_params_true_exception) # raise_exception=True # this is added as a final check for convergence output_fields = list(LineUpgradesTechnicalResultModel.schema(True).get("properties").keys()) # get fields with alias line_upgrades_df = line_upgrades_df[output_fields] return commands_list, line_upgrades_df @track_timing(timer_stats_collector) -def identify_parallel_lines(options, object_row, parallel_lines_limit, **kwargs): +def identify_parallel_lines(options, object_row, parallel_lines_limit, external_upgrades_technical_catalog): """This function identifies parallel line solutions, when a direct upgrade solution is not available from catalogue Parameters @@ -166,7 +169,6 @@ def identify_parallel_lines(options, object_row, parallel_lines_limit, **kwargs) raise ExceededParallelLinesLimit(f"Number of parallel lines required is {num_parallel_lines}, which exceeds limit of {parallel_lines_limit}." f" Increase parameter parallel_lines_limit in input config or ensure higher sized equipment is present in technical catalog.") new_config_type = chosen_option["line_definition_type"] - external_upgrades_technical_catalog = kwargs.get("external_upgrades_technical_catalog", None) upgrades_dict_parallel = [] for i in range(0, num_parallel_lines): curr_time = str(time.time()) @@ -208,15 +210,11 @@ def identify_parallel_lines(options, object_row, parallel_lines_limit, **kwargs) return commands_list, upgrades_dict_parallel -def define_line_object(line_name, chosen_option, action_type, **kwargs): +def define_line_object(line_name, chosen_option, action_type, bus1, bus2, original_units, length): """This function is used to create a command string to define an opendss line object """ command_string = f"{action_type} Line.{line_name} normamps={chosen_option['normamps']} emergamps={chosen_option['emergamps']}" if action_type == "New": - bus1 = kwargs.get("bus1", None) - bus2 = kwargs.get("bus2", None) - length = kwargs.get("length", None) - original_units = kwargs.get("original_units", None) if bus1 is None or bus2 is None or length is None or original_units is None: raise ValueError(f"Bus and length information is needed when defining a new line object.") if original_units != chosen_option["units"]: # if units are different @@ -316,7 +314,7 @@ def define_xfmr_object(xfmr_name, xfmr_info_series, action_type, buses_list=None @track_timing(timer_stats_collector) def correct_xfmr_violations(xfmr_loading_df, xfmr_design_pu, xfmr_upgrade_options, - parallel_transformers_limit, **kwargs): + parallel_transformers_limit, solve_params): """This function determines transformer upgrades to correct transformer violations. It also updates the opendss model with upgrades. @@ -331,6 +329,8 @@ def correct_xfmr_violations(xfmr_loading_df, xfmr_design_pu, xfmr_upgrade_option ------- """ + solve_params_true_exception = solve_params.copy() + solve_params_true_exception.raise_exception = True equipment_type = "Transformer" xfmr_upgrades_df = pd.DataFrame() upgrades_dict = {} @@ -404,7 +404,7 @@ def correct_xfmr_violations(xfmr_loading_df, xfmr_design_pu, xfmr_upgrade_option # "parameter_type": "new_equipment", "action": "add", "name": row["name"]}) check_dss_run_command(command_string) # run command for upgraded equipment - circuit_solve_and_check(raise_exception=True, **kwargs) + circuit_solve_and_check(solve_params_true_exception) # if higher upgrade is not available or chosen upgrade rating is much higher than required, # dont oversize. Instead, place equipment in parallel else: @@ -414,7 +414,7 @@ def correct_xfmr_violations(xfmr_loading_df, xfmr_design_pu, xfmr_upgrade_option for command_item in parallel_xfmr_commands: check_dss_run_command(command_item) check_dss_run_command('CalcVoltageBases') - circuit_solve_and_check(raise_exception=True, **kwargs) + circuit_solve_and_check(solve_params_true_exception) commands_list = commands_list + parallel_xfmr_commands upgrades_dict_parallel = upgrades_dict_parallel + temp_upgrades_dict_parallel # parallel upgrades is stored in a list (since it has same original_equipment name) index_names = ["original_equipment_name", "parameter_type"] @@ -428,7 +428,7 @@ def correct_xfmr_violations(xfmr_loading_df, xfmr_design_pu, xfmr_upgrade_option else: # if there is no overloading logger.info("This case has no transformer violations") - circuit_solve_and_check(raise_exception=True, **kwargs) # this is added as a final check for convergence + circuit_solve_and_check(solve_params_true_exception) # this is added as a final check for convergence output_fields = list(TransformerUpgradesTechnicalResultModel.schema(True).get("properties").keys()) # get fields with alias xfmr_upgrades_df = xfmr_upgrades_df[output_fields] return commands_list, xfmr_upgrades_df diff --git a/disco/extensions/upgrade_simulation/upgrades/upgrade_parameters.toml b/disco/extensions/upgrade_simulation/upgrades/upgrade_parameters.toml index 17f73367..bdfc7302 100644 --- a/disco/extensions/upgrade_simulation/upgrades/upgrade_parameters.toml +++ b/disco/extensions/upgrade_simulation/upgrades/upgrade_parameters.toml @@ -16,6 +16,7 @@ final_lower_limit = 0.95 nominal_voltage = 120 [upgrade_simulation_params] -enable_pydss_controller = true +enable_pydss_controllers = true dc_ac_ratio = 1.15 pydss_controller_name = "volt_var_ieee_1547_2018_catB" +timeseries_analysis = false \ No newline at end of file diff --git a/disco/extensions/upgrade_simulation/upgrades/voltage_upgrade_functions.py b/disco/extensions/upgrade_simulation/upgrades/voltage_upgrade_functions.py index 62c1fd3f..505f2914 100644 --- a/disco/extensions/upgrade_simulation/upgrades/voltage_upgrade_functions.py +++ b/disco/extensions/upgrade_simulation/upgrades/voltage_upgrade_functions.py @@ -11,6 +11,7 @@ from .common_functions import * from .thermal_upgrade_functions import define_xfmr_object from disco import timer_stats_collector +from disco.exceptions import OpenDssModelDisconnectedError from disco.models.upgrade_cost_analysis_generic_output_model import VoltageUpgradesTechnicalResultModel from opendssdirect import DSSException from jade.utils.timing_utils import track_timing, Timer @@ -65,8 +66,8 @@ def edit_capacitor_settings_for_convergence(voltage_config=None, control_command return new_control_command -def correct_capacitor_parameters(default_capacitor_settings, orig_capacitors_df, nominal_voltage, - **kwargs): +def correct_capacitor_parameters(default_capacitor_settings, orig_capacitors_df, nominal_voltage, + solve_params): """Corrects cap control parameters: change to voltage controlled, correct PT ratio. Add cap control if not present Parameters @@ -79,6 +80,8 @@ def correct_capacitor_parameters(default_capacitor_settings, orig_capacitors_df, ------- """ + resolve_params = solve_params.copy() + resolve_params.raise_exception = False # correct capacitor settings default_capcontrol_command = f"Type={default_capacitor_settings['cap_control']} " \ f"ONsetting={default_capacitor_settings['capON']} " \ @@ -97,12 +100,12 @@ def correct_capacitor_parameters(default_capacitor_settings, orig_capacitors_df, command_string = f"Edit CapControl.{row['capcontrol_name']} PTRatio={row['PTratio']} " \ f"{default_capcontrol_command}" check_dss_run_command(command_string) - pass_flag = circuit_solve_and_check(raise_exception=False, **kwargs) + pass_flag = circuit_solve_and_check(resolve_params) # raise exception=False if not pass_flag: command_string = edit_capacitor_settings_for_convergence(command_string) check_dss_run_command(command_string) # raise exception if no convergence even after change - circuit_solve_and_check(raise_exception=True, **kwargs) + circuit_solve_and_check(solve_params) # raise exception =True capacitors_commands_list.append(command_string) # if it is already voltage controlled, modify PT ratio if new is different after re-computation @@ -112,7 +115,7 @@ def correct_capacitor_parameters(default_capacitor_settings, orig_capacitors_df, command_string = f"Edit CapControl.{row['capcontrol_name']} PTRatio={row['PTratio']}" + orig_string check_dss_run_command(command_string) # this does not change original settings, so should not cause convergence - circuit_solve_and_check(raise_exception=True, **kwargs) + circuit_solve_and_check(solve_params) # raise exception =True capacitors_commands_list.append(command_string) # if there are capacitors without cap control, add a voltage-controlled cap control @@ -129,12 +132,12 @@ def correct_capacitor_parameters(default_capacitor_settings, orig_capacitors_df, f"terminal={default_capacitor_settings['terminal']} capacitor={row['capacitor_name']} " \ f"PTRatio={default_pt_ratio} {default_capcontrol_command}" check_dss_run_command(command_string) - pass_flag = circuit_solve_and_check(raise_exception=False, **kwargs) + pass_flag = circuit_solve_and_check(resolve_params) # raise exception =False if not pass_flag: command_string = edit_capacitor_settings_for_convergence(command_string) check_dss_run_command(command_string) # raise exception if no convergence even after change - circuit_solve_and_check(raise_exception=True, **kwargs) + circuit_solve_and_check(solve_params) # raise exception =True capacitors_commands_list.append(command_string) return capacitors_commands_list @@ -175,7 +178,7 @@ def find_line_connected_to_capacitor(capacitor_row, lines_df): @track_timing(timer_stats_collector) def sweep_capacitor_settings(voltage_config, initial_capacitors_df, default_capacitor_settings, voltage_upper_limit, - voltage_lower_limit, **kwargs): + voltage_lower_limit, solve_params, analysis_params): """This function sweeps through capacitor settings and returns dataframe of severity metrics for all the sweeps of capacitor controls with best settings. This function increases differences between cap ON and OFF voltages in user defined increments, default 1 volt, until upper and lower bounds are reached. @@ -197,13 +200,15 @@ def sweep_capacitor_settings(voltage_config, initial_capacitors_df, default_capa capacitor_sweep_list = [] # this list will contain severity of each capacitor setting sweep # get severity index for original/initial capacitor settings (ie before the settings sweep) temp_dict = {'cap_on_setting': 'original setting', 'cap_off_setting': 'original setting'} - pass_flag = circuit_solve_and_check(raise_exception=False, **kwargs) + resolve_params = solve_params.copy() + resolve_params.raise_exception = False + pass_flag = circuit_solve_and_check(resolve_params) # raise exception is False if not pass_flag: # if there is convergence issue at this setting, go onto next setting and dont save temp_dict['converged'] = False else: temp_dict['converged'] = True - severity_dict = compute_voltage_violation_severity( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit) + severity_dict = compute_voltage_violation_severity(voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, + analysis_params=analysis_params, solve_params=solve_params) temp_dict.update(severity_dict) capacitor_sweep_list.append(temp_dict) # start settings sweep @@ -218,16 +223,16 @@ def sweep_capacitor_settings(voltage_config, initial_capacitors_df, default_capa for index, row in initial_capacitors_df.iterrows(): # apply settings to all capacitors check_dss_run_command(f"Edit CapControl.{row['capcontrol_name']} ONsetting={cap_on_setting} " f"OFFsetting={cap_off_setting}") - pass_flag = circuit_solve_and_check(raise_exception=False, **kwargs) + pass_flag = circuit_solve_and_check(resolve_params) # don't raise exception if not pass_flag: # if there is convergence issue at this setting, go onto next setting and dont save temp_dict['converged'] = False break else: temp_dict['converged'] = True - bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltages( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, raise_exception=False, **kwargs) + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltage_violations( + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, solve_params=resolve_params) # raise exception is false severity_dict = compute_voltage_violation_severity( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit) + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, solve_params=solve_params) temp_dict.update(severity_dict) capacitor_sweep_list.append(temp_dict) if (cap_on_setting - cap_control_gap / 2) <= (voltage_lower_limit * voltage_config["nominal_voltage"]): @@ -242,7 +247,7 @@ def sweep_capacitor_settings(voltage_config, initial_capacitors_df, default_capa return capacitor_sweep_df -def choose_best_capacitor_sweep_setting(capacitor_sweep_df, initial_capacitors_df, deciding_field, **kwargs): +def choose_best_capacitor_sweep_setting(capacitor_sweep_df, initial_capacitors_df, deciding_field, solve_params): """This function takes the dataframe containing severity metrics, identifies the best cap control setting out of all the sweeps and returns dataframe of capacitor controls with best settings @@ -277,7 +282,7 @@ def choose_best_capacitor_sweep_setting(capacitor_sweep_df, initial_capacitors_d creation_action='Edit') for command_string in capacitor_settings_commands_list: check_dss_run_command(command_string) - circuit_solve_and_check(raise_exception=True, **kwargs) + circuit_solve_and_check(solve_params) # raise exception should be true if setting_type == 'initial_setting': # if initial settings are best, no need to return command with settings capacitor_settings_commands_list = [] return capacitors_df, capacitor_settings_commands_list @@ -308,23 +313,23 @@ def create_capcontrol_settings_commands(properties_list, capacitors_df, creation def determine_capacitor_upgrades(voltage_upper_limit, voltage_lower_limit, default_capacitor_settings, orig_capacitors_df, - voltage_config, deciding_field, **kwargs): + voltage_config, deciding_field, analysis_params, solve_params, + fig_folder=None, create_plots=False, circuit_source=None, title=None): + """This function corrects capacitor parameters, sweeps through capacitor settings and determines the best capacitor setting. It returns the dss commands associated with all these actions """ - fig_folder = kwargs.get("fig_folder", None) - create_plots = kwargs.get("create_plots", False) - circuit_source = kwargs.get("circuit_source", None) - title = kwargs.get("title", "Bus violations after existing capacitor sweep module_") - + if create_plots: + assert title is not None + assert fig_folder is not None capacitor_dss_commands = [] logger.info("Capacitors are present in the network. Perform capacitor bank control modifications.") capcontrol_parameter_commands_list = correct_capacitor_parameters( default_capacitor_settings=default_capacitor_settings, orig_capacitors_df=orig_capacitors_df, - nominal_voltage=voltage_config['nominal_voltage'], **kwargs) + nominal_voltage=voltage_config['nominal_voltage'], solve_params=solve_params) capacitor_dss_commands = capacitor_dss_commands + capcontrol_parameter_commands_list - bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltages( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, **kwargs) + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltage_violations( + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, solve_params=solve_params) if len(buses_with_violations) > 0: # get capacitors dataframe before any settings changes are made nosetting_changes_capacitors_df = get_capacitor_info(correct_PT_ratio=False) @@ -333,18 +338,18 @@ def determine_capacitor_upgrades(voltage_upper_limit, voltage_lower_limit, defau initial_capacitors_df=nosetting_changes_capacitors_df, default_capacitor_settings=default_capacitor_settings, voltage_upper_limit=voltage_upper_limit, - voltage_lower_limit=voltage_lower_limit, **kwargs) + voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, solve_params=solve_params) # choose best capacitor settings capacitors_df, capcontrol_settings_commands_list = choose_best_capacitor_sweep_setting( capacitor_sweep_df=capacitor_sweep_df, initial_capacitors_df=nosetting_changes_capacitors_df, - deciding_field=deciding_field, **kwargs) + deciding_field=deciding_field, solve_params=solve_params) capacitor_dss_commands = capacitor_dss_commands + capcontrol_settings_commands_list # determine voltage violations after capacitor changes - bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltages( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, **kwargs) - if (fig_folder is not None) and create_plots: - plot_voltage_violations(fig_folder=fig_folder, title=title+ - str(len(buses_with_violations)), buses_with_violations=buses_with_violations, circuit_source=circuit_source, enable_detailed=True) + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltage_violations( + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, solve_params=solve_params) + if create_plots: + plot_voltage_violations(fig_folder=fig_folder, title=title+ + str(len(buses_with_violations)), buses_with_violations=buses_with_violations, circuit_source=circuit_source, enable_detailed=True) return capacitor_dss_commands @@ -390,12 +395,13 @@ def get_capacitor_upgrades(orig_capacitors_df, new_capacitors_df): final_cap_upgrades["cap_on"] = new_capcontrols[cap_name]["ONsetting"] final_cap_upgrades["cap_off"] = new_capcontrols[cap_name]["OFFsetting"] final_cap_upgrades["ctrl_type"] = new_capcontrols[cap_name]["capcontrol_type"] - final_cap_upgrades["cap_settings"] = True + final_cap_upgrades["cap_settings"] = False # if there are differences between original and new controllers if cap_name in modified_capacitors: # if control type in original controller is voltage, only settings are changed if orig_capcontrols[cap_name]["capcontrol_type"].lower().startswith("volt"): final_cap_upgrades["ctrl_added"] = False + final_cap_upgrades["cap_settings"] = True # if original controller type was different (current or time), new controller (voltage type) is said to be added else: final_cap_upgrades["ctrl_added"] = True @@ -403,27 +409,27 @@ def get_capacitor_upgrades(orig_capacitors_df, new_capacitors_df): elif cap_name in new_addition: final_cap_upgrades["ctrl_added"] = True # else: # TODO else condition to be added? - processed_outputs.append( - VoltageUpgradesTechnicalResultModel(**{ - "equipment_type": final_cap_upgrades["cap_name"].split(".")[0], - "name": final_cap_upgrades["cap_name"].split(".")[1], - "new_controller_added": final_cap_upgrades["ctrl_added"], - "controller_settings_modified": final_cap_upgrades["cap_settings"], - "final_settings": { - "kvar": final_cap_upgrades["cap_kvar"], - "kv": final_cap_upgrades["cap_kv"], - "capcontrol_name": final_cap_upgrades["ctrl_name"], - "capcontrol_type": final_cap_upgrades["ctrl_type"], - "ONsetting": final_cap_upgrades["cap_on"], - "OFFsetting": final_cap_upgrades["cap_off"] - }, - "new_transformer_added": False, - "at_substation": False, - })) + processed_outputs.append( + VoltageUpgradesTechnicalResultModel(**{ + "equipment_type": final_cap_upgrades["cap_name"].split(".")[0], + "name": final_cap_upgrades["cap_name"].split(".")[1], + "new_controller_added": final_cap_upgrades["ctrl_added"], + "controller_settings_modified": final_cap_upgrades["cap_settings"], + "final_settings": { + "kvar": final_cap_upgrades["cap_kvar"], + "kv": final_cap_upgrades["cap_kv"], + "capcontrol_name": final_cap_upgrades["ctrl_name"], + "capcontrol_type": final_cap_upgrades["ctrl_type"], + "ONsetting": final_cap_upgrades["cap_on"], + "OFFsetting": final_cap_upgrades["cap_off"] + }, + "new_transformer_added": False, + "at_substation": False, + })) return processed_outputs -def compute_voltage_violation_severity(voltage_upper_limit, voltage_lower_limit, **kwargs): +def compute_voltage_violation_severity(voltage_upper_limit, voltage_lower_limit, analysis_params, solve_params): """This function computes voltage violation severity metrics, based on bus voltages Parameters @@ -434,8 +440,10 @@ def compute_voltage_violation_severity(voltage_upper_limit, voltage_lower_limit, ------- Dict """ - bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltages( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, raise_exception=False, **kwargs) + resolve_params = solve_params.copy() + resolve_params.raise_exception = False + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltage_violations( + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, solve_params=resolve_params) deviation_severity = bus_voltages_df['min_voltage_deviation'].sum() + bus_voltages_df['max_voltage_deviation'].sum() undervoltage_bus_list = list( bus_voltages_df.loc[bus_voltages_df['undervoltage_violation'] == True]['name'].unique()) @@ -448,7 +456,7 @@ def compute_voltage_violation_severity(voltage_upper_limit, voltage_lower_limit, return severity_dict -def correct_regcontrol_parameters(orig_regcontrols_df, **kwargs): +def correct_regcontrol_parameters(orig_regcontrols_df, solve_params): """This function corrects regcontrol ptratio is different from original. And generates commands list Parameters @@ -459,6 +467,8 @@ def correct_regcontrol_parameters(orig_regcontrols_df, **kwargs): ------- list """ + solve_params_true = solve_params.copy() + solve_params_true.raise_exception = True # correct regcontrol parameters settings default_regcontrol_command = " enabled=Yes" orig_string = ' !original, corrected PTratio only' @@ -468,7 +478,7 @@ def correct_regcontrol_parameters(orig_regcontrols_df, **kwargs): command_string = f"Edit RegControl.{row['name']} ptratio={row['ptratio']}" + default_regcontrol_command\ + orig_string check_dss_run_command(command_string) - circuit_solve_and_check(raise_exception=True, **kwargs) + circuit_solve_and_check(solve_params_true) # raise exception is true # this does not change original settings, so should not cause convergence issues regcontrols_commands_list.append(command_string) return regcontrols_commands_list @@ -476,7 +486,7 @@ def correct_regcontrol_parameters(orig_regcontrols_df, **kwargs): @track_timing(timer_stats_collector) def sweep_regcontrol_settings(voltage_config, initial_regcontrols_df, voltage_upper_limit, voltage_lower_limit, - exclude_sub_ltc=True, only_sub_ltc=False, **kwargs): + analysis_params, solve_params, exclude_sub_ltc=True, only_sub_ltc=False): """This function increases differences vreg in user defined increments, until upper and lower bounds are reached. At a time, same settings are applied to all regulator controls @@ -493,6 +503,12 @@ def sweep_regcontrol_settings(voltage_config, initial_regcontrols_df, voltage_up ------- """ + if exclude_sub_ltc: + assert not only_sub_ltc + if only_sub_ltc: + assert not exclude_sub_ltc + solve_params_no_exception = solve_params.copy() + solve_params_no_exception.raise_exception = False if exclude_sub_ltc: initial_df = initial_regcontrols_df.loc[initial_regcontrols_df['at_substation_xfmr_flag'] == False] if only_sub_ltc: @@ -500,15 +516,16 @@ def sweep_regcontrol_settings(voltage_config, initial_regcontrols_df, voltage_up regcontrol_sweep_list = [] # this list will contain severity of each setting sweep # get severity index for original/initial settings (ie before the settings sweep) temp_dict = {'setting': 'original'} - bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltages( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, raise_exception=False, **kwargs) - pass_flag = circuit_solve_and_check(raise_exception=False, **kwargs) + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltage_violations( + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, + solve_params=solve_params_no_exception) + pass_flag = circuit_solve_and_check(solve_params_no_exception) # raise exception = False if not pass_flag: # if there is convergence issue at this setting, go onto next setting and dont save temp_dict['converged'] = False else: temp_dict['converged'] = True severity_dict = compute_voltage_violation_severity( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit) + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, solve_params=solve_params) temp_dict.update(severity_dict) regcontrol_sweep_list.append(temp_dict) # generate list of voltage setpoints @@ -525,29 +542,29 @@ def sweep_regcontrol_settings(voltage_config, initial_regcontrols_df, voltage_up for index, row in initial_df.iterrows(): logger.debug(f"{vreg}_{band}") check_dss_run_command(f"Edit RegControl.{row['name']} vreg={vreg} band={band}") - pass_flag = circuit_solve_and_check(raise_exception=False, **kwargs) + pass_flag = circuit_solve_and_check(solve_params_no_exception) # raise exception = False if not pass_flag: # if there is convergence issue at this setting, go onto next setting and dont save temp_dict['converged'] = False break else: temp_dict['converged'] = True try: - bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltages( + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltage_violations( voltage_upper_limit=voltage_upper_limit, - voltage_lower_limit=voltage_lower_limit, **kwargs) + voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, solve_params=solve_params) except: # catch convergence error temp_dict['converged'] = False break severity_dict = compute_voltage_violation_severity( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit) + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, solve_params=solve_params) temp_dict.update(severity_dict) regcontrol_sweep_list.append(temp_dict) regcontrol_sweep_df = pd.DataFrame(regcontrol_sweep_list) return regcontrol_sweep_df -def choose_best_regcontrol_sweep_setting(regcontrol_sweep_df, initial_regcontrols_df, deciding_field, exclude_sub_ltc=True, - only_sub_ltc=False, **kwargs): +def choose_best_regcontrol_sweep_setting(regcontrol_sweep_df, initial_regcontrols_df, deciding_field, solve_params, exclude_sub_ltc=True, + only_sub_ltc=False): """This function takes the dataframe containing severity metrics, identifies the best regcontrol setting out of all the sweeps and returns dataframe of regcontrols with best settings @@ -560,6 +577,8 @@ def choose_best_regcontrol_sweep_setting(regcontrol_sweep_df, initial_regcontrol ------- DataFrame """ + solve_params_exception_true = solve_params.copy() + solve_params_exception_true.raise_exception = True if exclude_sub_ltc: initial_df = initial_regcontrols_df.loc[initial_regcontrols_df['at_substation_xfmr_flag'] == False] if only_sub_ltc: @@ -584,7 +603,7 @@ def choose_best_regcontrol_sweep_setting(regcontrol_sweep_df, initial_regcontrol creation_action='Edit') for command_string in regcontrol_settings_commands_list: check_dss_run_command(command_string) - circuit_solve_and_check(raise_exception=True, **kwargs) + circuit_solve_and_check(solve_params_exception_true) # raise exception = True if setting_type == 'original': # if original settings, no need to add to upgrades commands list regcontrol_settings_commands_list = [] logger.info("Original Regulator control settings are the best.") @@ -615,7 +634,7 @@ def create_regcontrol_settings_commands(properties_list, regcontrols_df, creatio return regcontrol_commands_list -def add_new_regcontrol_command(xfmr_info_series, default_regcontrol_settings, nominal_voltage, action_type='New', **kwargs): +def add_new_regcontrol_command(xfmr_info_series, default_regcontrol_settings, nominal_voltage, solve_params, action_type='New'): """This function runs and returns the dss command to add regulator control at a transformer. It also solves the circuit and calculates voltage bases after the regulator has been added to the circuit. It calls another function to create the dss command. @@ -630,6 +649,9 @@ def add_new_regcontrol_command(xfmr_info_series, default_regcontrol_settings, no ------- """ + solve_params_specific = solve_params.copy() + solve_params_specific.raise_exception = False + solve_params_specific.calcvoltagebases = True command_list = [] regcontrol_info_series = pd.Series(default_regcontrol_settings) regcontrol_info_series['transformer'] = xfmr_info_series['name'] @@ -671,7 +693,7 @@ def add_new_regcontrol_command(xfmr_info_series, default_regcontrol_settings, no check_dss_run_command(new_regcontrol_command) # run command check_dss_run_command('CalcVoltageBases') # max control iterations could exceed here as well, when adding a new regulator - pass_flag = circuit_solve_and_check(raise_exception=False, calcvoltagebases=True, **kwargs) # solve circuit + pass_flag = circuit_solve_and_check(solve_params_specific) # solve circuit command_list.append(new_regcontrol_command) return {'command_list': command_list, 'new_regcontrol_name': regcontrol_info_series['regcontrol_name'], 'pass_flag': pass_flag} @@ -706,8 +728,11 @@ def define_regcontrol_object(regcontrol_name, action_type, regcontrol_info_serie def sweep_and_choose_regcontrol_setting(voltage_config, initial_regcontrols_df, upper_limit, lower_limit, - dss_file_list, deciding_field, correct_parameters=False, exclude_sub_ltc=True, - only_sub_ltc=False, previous_dss_commands_list=None, **kwargs): + dss_file_list, deciding_field, solve_params, analysis_params, reload_circuit_params, previous_dss_commands_list, + create_plots=False, fig_folder=None, title=None, circuit_source=None, + correct_parameters=False, exclude_sub_ltc=True, + only_sub_ltc=False, + ): """This function combines the regcontrol settings sweep and choosing of best setting. Parameters @@ -725,50 +750,61 @@ def sweep_and_choose_regcontrol_setting(voltage_config, initial_regcontrols_df, ------- """ - fig_folder = kwargs.get("fig_folder", None) - create_plots = kwargs.get("create_plots", False) - circuit_source = kwargs.get("circuit_source", None) - title = kwargs.get("title", None) + if create_plots: + assert title is not None + assert fig_folder is not None + + if exclude_sub_ltc: + assert not only_sub_ltc + if only_sub_ltc: + assert not exclude_sub_ltc + reg_sweep_commands_list = [] if correct_parameters: # first correct regcontrol parameters (ptratio) including substation LTC, if present regcontrols_parameter_command_list = correct_regcontrol_parameters(orig_regcontrols_df=initial_regcontrols_df, - **kwargs) + solve_params=solve_params) reg_sweep_commands_list = regcontrols_parameter_command_list # sweep through settings and identify best setting regcontrol_sweep_df = sweep_regcontrol_settings(voltage_config=voltage_config, initial_regcontrols_df=initial_regcontrols_df, voltage_upper_limit=upper_limit, voltage_lower_limit=lower_limit, exclude_sub_ltc=exclude_sub_ltc, only_sub_ltc=only_sub_ltc, - **kwargs) + analysis_params=analysis_params, solve_params=solve_params) # reload circuit after settings sweep - reload_dss_circuit(dss_file_list=dss_file_list, commands_list=previous_dss_commands_list+reg_sweep_commands_list, **kwargs) + reload_dss_circuit(dss_file_list=dss_file_list, commands_list=previous_dss_commands_list+reg_sweep_commands_list, solve_params=solve_params, reload_circuit_params=reload_circuit_params) # choose best setting regcontrols_df, regcontrol_settings_commands_list = choose_best_regcontrol_sweep_setting( regcontrol_sweep_df=regcontrol_sweep_df, initial_regcontrols_df=initial_regcontrols_df, - exclude_sub_ltc=exclude_sub_ltc, only_sub_ltc=only_sub_ltc, deciding_field=deciding_field, **kwargs) + exclude_sub_ltc=exclude_sub_ltc, only_sub_ltc=only_sub_ltc, deciding_field=deciding_field, solve_params=solve_params) reg_sweep_commands_list = reg_sweep_commands_list + regcontrol_settings_commands_list - if (fig_folder is not None) and create_plots and (title is not None): - bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltages( - voltage_upper_limit=upper_limit, voltage_lower_limit=lower_limit, **kwargs) - plot_voltage_violations(fig_folder=fig_folder, title=title+ - str(len(buses_with_violations)), buses_with_violations=buses_with_violations, circuit_source=circuit_source, enable_detailed=True) + if create_plots: + assert fig_folder is not None + assert title is not None + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltage_violations( + voltage_upper_limit=upper_limit, voltage_lower_limit=lower_limit, analysis_params=analysis_params, solve_params=solve_params) + plot_voltage_violations(fig_folder=fig_folder, title=title+str(len(buses_with_violations)), + buses_with_violations=buses_with_violations, circuit_source=circuit_source, enable_detailed=True) return regcontrols_df, reg_sweep_commands_list def determine_substation_ltc_upgrades(voltage_upper_limit, voltage_lower_limit, orig_regcontrols_df, orig_ckt_info, circuit_source, default_subltc_settings, voltage_config, dss_file_list, comparison_dict, deciding_field, - previous_dss_commands_list, best_setting_so_far, **kwargs): + previous_dss_commands_list, best_setting_so_far, analysis_params, solve_params, reload_circuit_params, fig_folder=None, create_plots=False, + default_capacitor_settings=None): """Function determine substation LTC upgrades: # Use this block for adding a substation LTC, correcting its settings and running a sub LTC settings sweep. # if LTC exists, first try to correct its non set point simulation settings. # If this does not correct everything, correct its set points through a sweep. # If LTC does not exist, add one including a xfmr if required, then do a settings sweep if required """ - fig_folder = kwargs.get("fig_folder", None) - create_plots = kwargs.get("create_plots", False) + if create_plots: + assert fig_folder is not None + + solve_params_true_exception = solve_params.copy() + solve_params_true_exception.raise_exception = True results_dict = {} all_commands_list = previous_dss_commands_list @@ -784,61 +820,74 @@ def determine_substation_ltc_upgrades(voltage_upper_limit, voltage_lower_limit, logger.info("Substation transformer does not exist. So adding transformer and regcontrol on it.") # check add substation transformer and add ltc reg control on it new_subxfmr_added_dict = add_new_node_and_xfmr(action_type='New', node=circuit_source, circuit_source=circuit_source, - xfmr_conn_type="wye" ,**kwargs) + xfmr_conn_type="wye", solve_params=solve_params) add_subxfmr_commands = new_subxfmr_added_dict['commands_list'] comparison_dict["temp_afterxfmr"] = compute_voltage_violation_severity(voltage_upper_limit=voltage_upper_limit, - voltage_lower_limit=voltage_lower_limit, **kwargs) + voltage_lower_limit=voltage_lower_limit, + analysis_params=analysis_params, solve_params=solve_params) updated_ckt_info = get_circuit_info() - bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltages(voltage_upper_limit=voltage_upper_limit, - voltage_lower_limit=voltage_lower_limit, **kwargs) + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltage_violations(voltage_upper_limit=voltage_upper_limit, + voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, + solve_params=solve_params) new_subltc_added_dict = add_new_regcontrol_command( xfmr_info_series=pd.Series(updated_ckt_info['substation_xfmr']), default_regcontrol_settings=default_subltc_settings, - nominal_voltage=voltage_config["nominal_voltage"], **kwargs) - comparison_dict["temp_afterltc"] = compute_voltage_violation_severity(voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, **kwargs) + nominal_voltage=voltage_config["nominal_voltage"], solve_params=solve_params) + comparison_dict["temp_afterltc"] = compute_voltage_violation_severity(voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, + analysis_params=analysis_params, solve_params=solve_params) add_subltc_commands = new_subltc_added_dict["command_list"] if not new_subltc_added_dict["pass_flag"]: logger.info("No convergence after adding regulator control at substation LTC. " "Check if there is any setting that has convergence. Else remove substation LTC") + solve_params_specific = solve_params.copy() + solve_params_specific.calcvoltagebases=True reload_dss_circuit(dss_file_list=dss_file_list, commands_list=all_commands_list + add_subxfmr_commands + add_subltc_commands, - calcvoltagebases=True, **kwargs) + solve_params=solve_params, reload_circuit_params=reload_circuit_params) # this needs to be collected again, since a new regulator control might have been added at the substation initial_sub_regcontrols_df = get_regcontrol_info(correct_PT_ratio=True, nominal_voltage=voltage_config["nominal_voltage"]) # sweep through settings and identify best setting + title = "Bus violations after subltc sweep with new subxfmr, LTC " subltc_controls_df, subltc_control_settings_commands_list = sweep_and_choose_regcontrol_setting( voltage_config=voltage_config, initial_regcontrols_df=initial_sub_regcontrols_df, deciding_field=deciding_field, upper_limit=voltage_upper_limit, lower_limit=voltage_lower_limit, exclude_sub_ltc=False, only_sub_ltc=True, dss_file_list=dss_file_list, - previous_dss_commands_list=all_commands_list + add_subxfmr_commands + add_subltc_commands, **kwargs) - circuit_solve_and_check(raise_exception=True, **kwargs) + previous_dss_commands_list=all_commands_list + add_subxfmr_commands + add_subltc_commands, analysis_params=analysis_params, + solve_params=solve_params, reload_circuit_params=reload_circuit_params, + fig_folder=fig_folder, create_plots=create_plots, title=title, circuit_source=circuit_source) + circuit_solve_and_check(solve_params=solve_params_true_exception) # raise exception=True subltc_upgrade_commands = add_subxfmr_commands + add_subltc_commands + subltc_control_settings_commands_list all_commands_list = all_commands_list + subltc_upgrade_commands # if substation transformer is present but there are no regulator controls on the subltc elif (orig_ckt_info['substation_xfmr'] is not None) and (not subltc_present_flag): logger.info("Substation transformer exists, but there are no regulator controls on it. Adding..") - new_subltc_added_dict = add_new_regcontrol_command( - xfmr_info_series=pd.Series(updated_ckt_info['substation_xfmr']), + xfmr_info_series=pd.Series(orig_ckt_info['substation_xfmr']), default_regcontrol_settings=default_subltc_settings, - nominal_voltage=voltage_config["nominal_voltage"], **kwargs) - comparison_dict["temp_afterltc"] = compute_voltage_violation_severity(voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, **kwargs) + nominal_voltage=voltage_config["nominal_voltage"], solve_params=solve_params) + comparison_dict["temp_afterltc"] = compute_voltage_violation_severity(voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, + analysis_params=analysis_params, solve_params=solve_params) add_subltc_commands = new_subltc_added_dict["command_list"] if not new_subltc_added_dict["pass_flag"]: logger.info("No convergence after adding regulator control at substation LTC. " "Check if there is any setting that has convergence. Else remove substation LTC") reload_dss_circuit(dss_file_list=dss_file_list, commands_list=all_commands_list + add_subltc_commands, - **kwargs) + solve_params=solve_params, reload_circuit_params=reload_circuit_params) # this needs to be collected again, since a new regulator control might have been added at the substation initial_sub_regcontrols_df = get_regcontrol_info(correct_PT_ratio=True, nominal_voltage=voltage_config["nominal_voltage"]) # sweep through settings and identify best setting + title = "Bus violations after subltc sweep with new LTC " subltc_controls_df, subltc_control_settings_commands_list = sweep_and_choose_regcontrol_setting( voltage_config=voltage_config, initial_regcontrols_df=initial_sub_regcontrols_df, deciding_field=deciding_field, upper_limit=voltage_upper_limit, lower_limit=voltage_lower_limit, exclude_sub_ltc=False, only_sub_ltc=True, dss_file_list=dss_file_list, - previous_dss_commands_list=all_commands_list + add_subltc_commands, **kwargs) - circuit_solve_and_check(raise_exception=True, **kwargs) + previous_dss_commands_list=all_commands_list + add_subltc_commands, analysis_params=analysis_params, + solve_params=solve_params, reload_circuit_params=reload_circuit_params, + fig_folder=fig_folder, create_plots=create_plots, title=title, circuit_source=circuit_source) + solve_params_specific = solve_params.copy() + solve_params_specific.raise_exception = True + circuit_solve_and_check(solve_params_specific) subltc_upgrade_commands = add_subltc_commands + subltc_control_settings_commands_list all_commands_list = all_commands_list + subltc_upgrade_commands # if substation transformer, and reg controls are both present @@ -847,30 +896,35 @@ def determine_substation_ltc_upgrades(voltage_upper_limit, voltage_lower_limit, initial_sub_regcontrols_df = get_regcontrol_info(correct_PT_ratio=True, nominal_voltage=voltage_config["nominal_voltage"]) # sweep through settings and identify best setting + title = "Bus violations after subltc setting sweep " subltc_controls_df, subltc_control_settings_commands_list = sweep_and_choose_regcontrol_setting( voltage_config=voltage_config, initial_regcontrols_df=initial_sub_regcontrols_df, deciding_field=deciding_field, upper_limit=voltage_upper_limit, lower_limit=voltage_lower_limit, exclude_sub_ltc=False, only_sub_ltc=True, dss_file_list=dss_file_list, - previous_dss_commands_list=all_commands_list, **kwargs) - circuit_solve_and_check(raise_exception=True, **kwargs) + previous_dss_commands_list=all_commands_list, analysis_params=analysis_params, solve_params=solve_params, reload_circuit_params=reload_circuit_params, + fig_folder=fig_folder, create_plots=create_plots, title=title, circuit_source=circuit_source) + circuit_solve_and_check(solve_params=solve_params_true_exception) # raise exception=True subltc_upgrade_commands = subltc_control_settings_commands_list all_commands_list = all_commands_list + subltc_upgrade_commands - reload_dss_circuit(dss_file_list=dss_file_list, commands_list=all_commands_list, calcvoltagebases=True, **kwargs) + solve_params_specific = solve_params.copy() + solve_params_specific.calcvoltagebases=True + reload_dss_circuit(dss_file_list=dss_file_list, commands_list=all_commands_list, solve_params=solve_params_specific, reload_circuit_params=reload_circuit_params) # determine voltage violations after changes - bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltages( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, **kwargs) + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltage_violations( + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, solve_params=solve_params, analysis_params=analysis_params) comparison_dict["after_sub_ltc_checking"] = compute_voltage_violation_severity(voltage_upper_limit=voltage_upper_limit, - voltage_lower_limit=voltage_lower_limit, **kwargs) + voltage_lower_limit=voltage_lower_limit, + analysis_params=analysis_params, solve_params=solve_params) if comparison_dict["after_sub_ltc_checking"][deciding_field] < comparison_dict[best_setting_so_far][deciding_field]: best_setting_so_far = "after_sub_ltc_checking" - if (fig_folder is not None) and create_plots: + if create_plots: plot_voltage_violations(fig_folder=fig_folder, title="Bus violations after substation ltc module_"+ str(len(buses_with_violations)), buses_with_violations=buses_with_violations, circuit_source=circuit_source, enable_detailed=True) else: all_commands_list = list(set(all_commands_list) - set(subltc_upgrade_commands)) subltc_upgrade_commands = [] - reload_dss_circuit(dss_file_list=dss_file_list, commands_list=all_commands_list, **kwargs) + reload_dss_circuit(dss_file_list=dss_file_list, commands_list=all_commands_list, solve_params=solve_params, reload_circuit_params=reload_circuit_params) if (best_setting_so_far == "after_sub_ltc_checking") and (len(buses_with_violations) > 0): # after this, also run settings sweep on all vregs (other than substation LTC), since this can impact those settings too. @@ -878,30 +932,36 @@ def determine_substation_ltc_upgrades(voltage_upper_limit, voltage_lower_limit, orig_regcontrols_df = orig_regcontrols_df.loc[orig_regcontrols_df['at_substation_xfmr_flag'] == False] if (not orig_regcontrols_df.empty) and voltage_config["existing_regulator_sweep_action"]: logger.info("After substation LTC module, settings sweep for existing reg control devices (excluding substation LTC).") - kwargs["title"] = "Bus violations after subltc and vreg sweep" + title = "Bus violations after subltc_vreg sweep " regcontrols_df, reg_sweep_commands_list = sweep_and_choose_regcontrol_setting(voltage_config=voltage_config, initial_regcontrols_df=orig_regcontrols_df, upper_limit=voltage_upper_limit, lower_limit=voltage_lower_limit, dss_file_list=dss_file_list, deciding_field=deciding_field, correct_parameters=False, - exclude_sub_ltc=True, only_sub_ltc=False, previous_dss_commands_list=all_commands_list, - **kwargs) + exclude_sub_ltc=True, only_sub_ltc=False, previous_dss_commands_list=all_commands_list, title=title, + create_plots=create_plots, fig_folder=fig_folder, + analysis_params=analysis_params, solve_params=solve_params, reload_circuit_params=reload_circuit_params) comparison_dict["after_sub_ltc_and_vreg_checking"] = compute_voltage_violation_severity(voltage_upper_limit=voltage_upper_limit, - voltage_lower_limit=voltage_lower_limit, **kwargs) + voltage_lower_limit=voltage_lower_limit, + analysis_params=analysis_params, + solve_params=solve_params) best_setting_so_far = "after_sub_ltc_and_vreg_checking" # added to commands list subltc_upgrade_commands = subltc_upgrade_commands + reg_sweep_commands_list orig_capacitors_df = get_capacitor_info(correct_PT_ratio=True, nominal_voltage=voltage_config['nominal_voltage']) if voltage_config["capacitor_action_flag"] and len(orig_capacitors_df) > 0: - default_capacitor_settings = kwargs.pop("default_capacitor_settings", None) - kwargs["title"] = "Bus violations after subltc_vreg_cap sweep" - capacitor_dss_commands = determine_capacitor_upgrades(voltage_upper_limit, voltage_lower_limit, default_capacitor_settings, orig_capacitors_df, - voltage_config, deciding_field, **kwargs) + assert default_capacitor_settings is not None + title = "Bus violations after subltc_vreg_cap sweep " + capacitor_dss_commands = determine_capacitor_upgrades(voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, + default_capacitor_settings=default_capacitor_settings, orig_capacitors_df=orig_capacitors_df, + voltage_config=voltage_config, deciding_field=deciding_field, title=title, create_plots=create_plots, + fig_folder=fig_folder, solve_params=solve_params, analysis_params=analysis_params) subltc_upgrade_commands = subltc_upgrade_commands + capacitor_dss_commands comparison_dict["after_sub_ltc_vreg_cap_checking"] = compute_voltage_violation_severity(voltage_upper_limit=voltage_upper_limit, - voltage_lower_limit=voltage_lower_limit, **kwargs) + voltage_lower_limit=voltage_lower_limit, + analysis_params=analysis_params, + solve_params=solve_params) best_setting_so_far = "after_sub_ltc_vreg_cap_checking" - results_dict["comparison_dict"] = comparison_dict results_dict["best_setting_so_far"] = best_setting_so_far results_dict["subltc_upgrade_commands"] = subltc_upgrade_commands @@ -910,9 +970,12 @@ def determine_substation_ltc_upgrades(voltage_upper_limit, voltage_lower_limit, def determine_new_regulator_upgrades(voltage_config, buses_with_violations, voltage_upper_limit, voltage_lower_limit, deciding_field, circuit_source, default_regcontrol_settings, comparison_dict, best_setting_so_far, dss_file_list, - previous_dss_commands_list, fig_folder=None, create_plots=False, **kwargs): + previous_dss_commands_list, analysis_params, solve_params, reload_circuit_params, + fig_folder=None, create_plots=False): """Function to determine dss upgrade commands if new regulator is to be placed to resolve voltage violations in circuit. """ + if create_plots: + assert fig_folder is not None dss_commands_list = previous_dss_commands_list new_reg_upgrade_commands = [] logger.info("Place new regulators.") @@ -921,23 +984,26 @@ def determine_new_regulator_upgrades(voltage_config, buses_with_violations, volt circuit_source=circuit_source, initial_buses_with_violations=buses_with_violations, voltage_upper_limit=voltage_upper_limit, - voltage_lower_limit=voltage_lower_limit, create_plots=create_plots, + voltage_lower_limit=voltage_lower_limit, voltage_config=voltage_config, default_regcontrol_settings=default_regcontrol_settings, deciding_field=deciding_field, - fig_folder=fig_folder, **kwargs) + fig_folder=fig_folder, create_plots=create_plots, analysis_params=analysis_params, + solve_params=solve_params) if not regcontrol_cluster_commands: # if there are no regcontrol commands - reload_dss_circuit(dss_file_list=dss_file_list, commands_list=previous_dss_commands_list, **kwargs) + reload_dss_circuit(dss_file_list=dss_file_list, commands_list=previous_dss_commands_list, solve_params=solve_params, reload_circuit_params=reload_circuit_params) comparison_dict["after_addition_new_regcontrol"] = compute_voltage_violation_severity( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, **kwargs) + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, solve_params=solve_params) return {"new_reg_upgrade_commands": [], "comparison_dict": comparison_dict, "best_setting_so_far": best_setting_so_far} - reg_upgrade_commands = get_newly_added_regulator_settings(dss_file_list, previous_dss_commands_list, regcontrol_cluster_commands, voltage_lower_limit, voltage_upper_limit, - voltage_config, deciding_field, **kwargs) + reg_upgrade_commands = get_newly_added_regulator_settings(dss_file_list=dss_file_list, previous_dss_commands_list=previous_dss_commands_list, + regcontrol_cluster_commands=regcontrol_cluster_commands, voltage_lower_limit=voltage_lower_limit, + voltage_upper_limit=voltage_upper_limit, voltage_config=voltage_config, deciding_field=deciding_field, + analysis_params=analysis_params, solve_params=solve_params, reload_circuit_params=reload_circuit_params) dss_commands_list = previous_dss_commands_list + reg_upgrade_commands comparison_dict["after_addition_new_regcontrol"] = compute_voltage_violation_severity( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, **kwargs) + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, solve_params=solve_params) if comparison_dict["after_addition_new_regcontrol"][deciding_field] < comparison_dict[best_setting_so_far][deciding_field]: best_setting_so_far = "after_addition_new_regcontrol" new_reg_upgrade_commands = reg_upgrade_commands @@ -947,15 +1013,16 @@ def determine_new_regulator_upgrades(voltage_config, buses_with_violations, volt if remove_commands_list: dss_commands_list = [i for i in dss_commands_list if i not in remove_commands_list] - reload_dss_circuit(dss_file_list=dss_file_list, commands_list=dss_commands_list, **kwargs) + reload_dss_circuit(dss_file_list=dss_file_list, commands_list=dss_commands_list, solve_params=solve_params, reload_circuit_params=reload_circuit_params) comparison_dict["disabled_new_regcontrol"] = compute_voltage_violation_severity( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, **kwargs) + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, + analysis_params=analysis_params, solve_params=solve_params) return {"new_reg_upgrade_commands": new_reg_upgrade_commands, "comparison_dict": comparison_dict, "best_setting_so_far": best_setting_so_far} def get_newly_added_regulator_settings(dss_file_list, previous_dss_commands_list, regcontrol_cluster_commands, voltage_lower_limit, voltage_upper_limit, - voltage_config, deciding_field, **kwargs): + voltage_config, deciding_field, analysis_params, solve_params, reload_circuit_params): """this function finalizes the settings with the newly added voltage regulator. It also takes into account errors encountered due to max control iterations being exceeded. """ @@ -966,26 +1033,30 @@ def get_newly_added_regulator_settings(dss_file_list, previous_dss_commands_list regcontrol_sweep_df = sweep_regcontrol_settings(voltage_config=voltage_config, initial_regcontrols_df=regcontrol_df, voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, - exclude_sub_ltc=True, only_sub_ltc=False, **kwargs) + exclude_sub_ltc=True, only_sub_ltc=False, analysis_params=analysis_params, solve_params=solve_params) regcontrols_df, regcontrol_settings_commands_list = choose_best_regcontrol_sweep_setting(deciding_field=deciding_field, - regcontrol_sweep_df=regcontrol_sweep_df, initial_regcontrols_df=regcontrol_df, **kwargs) - reload_dss_circuit(dss_file_list=dss_file_list, commands_list=previous_dss_commands_list + regcontrol_cluster_commands + regcontrol_settings_commands_list, **kwargs) # reload circuit after settings sweep + regcontrol_sweep_df=regcontrol_sweep_df, initial_regcontrols_df=regcontrol_df, solve_params=solve_params, exclude_sub_ltc=True, only_sub_ltc=False) + reload_dss_circuit(dss_file_list=dss_file_list, commands_list=previous_dss_commands_list + regcontrol_cluster_commands + regcontrol_settings_commands_list, + solve_params=solve_params, reload_circuit_params=reload_circuit_params) # reload circuit after settings sweep reg_upgrade_commands = regcontrol_cluster_commands + regcontrol_settings_commands_list except DSSException as err: # if there is an error: dss._cffi_api_util.DSSException: (#485) if err.args[0] != 485: raise logger.info(f"First attempt at regulator settings sweep failed with error: {err}.") - reload_dss_circuit(dss_file_list=dss_file_list, commands_list=previous_dss_commands_list, **kwargs) # reload circuit before clustering + reload_dss_circuit(dss_file_list=dss_file_list, commands_list=previous_dss_commands_list, solve_params=solve_params, reload_circuit_params=reload_circuit_params) # reload circuit before clustering temp = [] for command in regcontrol_cluster_commands: # extract only new addition commands if "edit regcontrol." not in command.lower(): temp.append(command) # control iterations are exceeded - max_control_iterations = kwargs.get("max_control_iterations", dss.Solution.MaxControlIterations()) # get setting + max_control_iterations = reload_circuit_params.max_control_iterations + if max_control_iterations is None: + max_control_iterations = dss.Solution.MaxControlIterations() increase_control_iterations = max_control_iterations + 50 # here iterations are increased by 50, to reach a solution - logger.info(f"Increased MaxControlIterations from {dss.Solution.MaxControlIterations()} to {increase_control_iterations}") + logger.info(f"Increased MaxControlIterations from {max_control_iterations} to {increase_control_iterations}") dss.Solution.MaxControlIterations(increase_control_iterations) - kwargs["max_control_iterations"] = max_control_iterations + # reload_circuit_params_new = reload_circuit_params.copy() + reload_circuit_params.max_control_iterations = increase_control_iterations # The usual reason for exceeding MaxControlIterations is conflicting controls i.e., one or more RegControl devices are oscillating between taps # so try increasing band of reg control new_voltage_config = voltage_config.copy() @@ -993,26 +1064,27 @@ def get_newly_added_regulator_settings(dss_file_list, previous_dss_commands_list try: # First try for regulator controls (without subLTC) logger.info(f"Retrying settings sweep for existing reg control devices (other than sub LTC) with increased band to resolve error.") - reload_dss_circuit(dss_file_list=dss_file_list, commands_list=previous_dss_commands_list+temp, **kwargs) # reload circuit before settings edits + reload_dss_circuit(dss_file_list=dss_file_list, commands_list=previous_dss_commands_list+temp, reload_circuit_params=reload_circuit_params, solve_params=solve_params) # reload circuit before settings edits regcontrol_df = get_regcontrol_info() regcontrol_sweep_df = sweep_regcontrol_settings(voltage_config=new_voltage_config, initial_regcontrols_df=regcontrol_df, voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, - exclude_sub_ltc=True, only_sub_ltc=False, **kwargs) + exclude_sub_ltc=True, only_sub_ltc=False, analysis_params=analysis_params, solve_params=solve_params) regcontrols_df, regcontrol_settings_commands_list = choose_best_regcontrol_sweep_setting(deciding_field=deciding_field, - regcontrol_sweep_df=regcontrol_sweep_df, initial_regcontrols_df=regcontrol_df, **kwargs) - reload_dss_circuit(dss_file_list=dss_file_list, commands_list=previous_dss_commands_list+temp+regcontrol_settings_commands_list, **kwargs) # reload circuit after settings sweep - bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltages( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, **kwargs) + regcontrol_sweep_df=regcontrol_sweep_df, initial_regcontrols_df=regcontrol_df, solve_params=solve_params, exclude_sub_ltc=True, only_sub_ltc=False) + reload_dss_circuit(dss_file_list=dss_file_list, commands_list=previous_dss_commands_list+temp+regcontrol_settings_commands_list, + solve_params=solve_params, reload_circuit_params=reload_circuit_params) # reload circuit after settings sweep + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltage_violations( + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, solve_params=solve_params) reg_upgrade_commands = temp + regcontrol_settings_commands_list if len(buses_with_violations) > 0: regcontrol_df = get_regcontrol_info() regcontrol_sweep_df = sweep_regcontrol_settings(voltage_config=new_voltage_config, initial_regcontrols_df=regcontrol_df, voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, - exclude_sub_ltc=False, only_sub_ltc=True, **kwargs) + exclude_sub_ltc=False, only_sub_ltc=True, analysis_params=analysis_params, solve_params=solve_params) regcontrols_df, subltc_settings_commands_list = choose_best_regcontrol_sweep_setting(deciding_field=deciding_field, - regcontrol_sweep_df=regcontrol_sweep_df, initial_regcontrols_df=regcontrol_df, **kwargs) + regcontrol_sweep_df=regcontrol_sweep_df, initial_regcontrols_df=regcontrol_df, exclude_sub_ltc=False, only_sub_ltc=True, solve_params=solve_params) reg_upgrade_commands = reg_upgrade_commands + subltc_settings_commands_list except DSSException as err: @@ -1020,21 +1092,22 @@ def get_newly_added_regulator_settings(dss_file_list, previous_dss_commands_list raise # next try for sub LTC only (if it exists) logger.info(f"Control iterations exceeded. Retrying settings sweep for sub LTC with increased band to resolve error.") - reload_dss_circuit(dss_file_list=dss_file_list, commands_list=previous_dss_commands_list+temp, **kwargs) # reload circuit before settings edits + reload_dss_circuit(dss_file_list=dss_file_list, commands_list=previous_dss_commands_list+temp, + solve_params=solve_params, reload_circuit_params=reload_circuit_params) # reload circuit before settings edits regcontrol_df = get_regcontrol_info() regcontrol_sweep_df = sweep_regcontrol_settings(voltage_config=new_voltage_config, initial_regcontrols_df=regcontrol_df, voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, - exclude_sub_ltc=False, only_sub_ltc=True, **kwargs) + exclude_sub_ltc=False, only_sub_ltc=True, analysis_params=analysis_params, solve_params=solve_params) regcontrols_df, regcontrol_settings_commands_list = choose_best_regcontrol_sweep_setting(deciding_field=deciding_field, - regcontrol_sweep_df=regcontrol_sweep_df, initial_regcontrols_df=regcontrol_df, **kwargs) - reload_dss_circuit(dss_file_list=dss_file_list, commands_list=previous_dss_commands_list+temp+regcontrol_settings_commands_list, **kwargs) # reload circuit after settings sweep + regcontrol_sweep_df=regcontrol_sweep_df, initial_regcontrols_df=regcontrol_df, exclude_sub_ltc=False, only_sub_ltc=True, solve_params=solve_params) + reload_dss_circuit(dss_file_list=dss_file_list, commands_list=previous_dss_commands_list+temp+regcontrol_settings_commands_list, solve_params=solve_params, + reload_circuit_params=reload_circuit_params) # reload circuit after settings sweep reg_upgrade_commands = temp + regcontrol_settings_commands_list return reg_upgrade_commands -def add_new_node_and_xfmr(node, circuit_source, xfmr_conn_type=None, action_type='New', - **kwargs): +def add_new_node_and_xfmr(node, circuit_source, solve_params, xfmr_conn_type=None, action_type='New'): """This function adds a new transformer by creating a new node (before or after a line, depending on whether it is a substation xfmr) action_type parameter is 'New' by default, unless we're redefining, and the 'Edit' has to be passed. @@ -1049,6 +1122,10 @@ def add_new_node_and_xfmr(node, circuit_source, xfmr_conn_type=None, action_type ------- """ + solve_params_specific = solve_params.copy() + solve_params.raise_exception = True + solve_params.calcvoltagebases = True + substation_node_flag = False commands_list = [] node = node.lower() @@ -1154,13 +1231,13 @@ def add_new_node_and_xfmr(node, circuit_source, xfmr_conn_type=None, action_type commands_list.append(edit_line_command_string) commands_list.append(new_xfmr_command_string) commands_list.append(f"// new node added {new_node.split('.')[0]},{x},{y}") - circuit_solve_and_check(raise_exception=True, calcvoltagebases=True, **kwargs) + circuit_solve_and_check(solve_params_specific) # taise exception=True, calcvoltagebases=True info_dict = {'commands_list': commands_list, 'new_xfmr_name': xfmr_name, 'modified_line_name': chosen_line_info["name"]} return info_dict -def disable_new_xfmr_and_edit_line(transformer_name_to_disable, line_name_to_modify, **kwargs): +def disable_new_xfmr_and_edit_line(transformer_name_to_disable, line_name_to_modify, solve_params): """This function disables an added transformer in the feeder. since OpenDSS disables by transformer by opening the circuit instead of creating a short circuit, this function will remove the transformer by first disabling it, then it will connect the line properly to @@ -1175,6 +1252,8 @@ def disable_new_xfmr_and_edit_line(transformer_name_to_disable, line_name_to_mod ------- """ + solve_params_specific = solve_params.copy() + solve_params_specific.raise_exception = True commands_list = [] # for regulators, added transformer is always placed after the line (i.e. after 'to' node of line) # i.e. for this transformer: primary bus: newly created node, secondary bus: existing node @@ -1203,11 +1282,11 @@ def disable_new_xfmr_and_edit_line(transformer_name_to_disable, line_name_to_mod check_dss_run_command(command_string) # Update system admittance matrix check_dss_run_command("CalcVoltageBases") - circuit_solve_and_check(raise_exception=True, **kwargs) + circuit_solve_and_check(solve_params_specific) return commands_list -def add_new_regcontrol_at_node(node, default_regcontrol_settings, nominal_voltage, **kwargs): +def add_new_regcontrol_at_node(node, default_regcontrol_settings, nominal_voltage, solve_params): """This function adds a new regcontrol at a node. It identifies the correct transformer and places regcontrol there. Identify whether or not a reg contrl exists at the transformer connected to this bus - if not, place new regcontrol @@ -1242,7 +1321,7 @@ def add_new_regcontrol_at_node(node, default_regcontrol_settings, nominal_voltag else: # if enabled regcontrol does not exist on transformer # this runs the command and returns the command list new_regcontrol_dict = add_new_regcontrol_command(xfmr_info_series=chosen_xfmr, default_regcontrol_settings=default_regcontrol_settings, - nominal_voltage=nominal_voltage, **kwargs) + nominal_voltage=nominal_voltage, solve_params=solve_params) return new_regcontrol_dict @@ -1287,7 +1366,7 @@ def identify_common_upstream_nodes(G, buses_list): def test_new_regulator_placement_on_common_nodes(voltage_upper_limit, voltage_lower_limit, nominal_voltage, common_upstream_nodes_list, circuit_source, - default_regcontrol_settings, deciding_field, **kwargs): + default_regcontrol_settings, deciding_field, solve_params, analysis_params): """ In each cluster group, place a new regulator control at each common upstream node, unless it is the source bus (since that already contains the LTC) or if it has a distribution transformer. @@ -1327,26 +1406,29 @@ def test_new_regulator_placement_on_common_nodes(voltage_upper_limit, voltage_lo logger.debug("Distribution transformer already exists on this node. Skip.") continue # add new transformer at this node - new_xfmr_added_dict = add_new_node_and_xfmr(action_type='New', node=node, circuit_source=circuit_source, **kwargs) + new_xfmr_added_dict = add_new_node_and_xfmr(action_type='New', node=node, circuit_source=circuit_source, solve_params=solve_params) if new_xfmr_added_dict is None: # if new transformer elements were not added, continue logger.debug("New transformer elements could not be added on this node.") continue # add new regulator control at this node # These are just default settings and do not have to be written in the output file new_regcontrol_dict = add_new_regcontrol_at_node(node=node, default_regcontrol_settings=default_regcontrol_settings, - nominal_voltage=nominal_voltage, **kwargs) + nominal_voltage=nominal_voltage, solve_params=solve_params) if new_regcontrol_dict is None: logger.debug("New regulator elements could not be added on this node.") if new_xfmr_added_dict is not None: disable_new_xfmr_and_edit_line(transformer_name_to_disable=new_xfmr_added_dict['new_xfmr_name'], - line_name_to_modify=new_xfmr_added_dict['modified_line_name']) + line_name_to_modify=new_xfmr_added_dict['modified_line_name'], solve_params=solve_params) continue intra_cluster_group_severity_dict[node] = {} intra_cluster_group_severity_dict[node]['add_new_devices_command_list'] = new_xfmr_added_dict['commands_list'] + \ new_regcontrol_dict['command_list'] - pass_flag = circuit_solve_and_check(raise_exception=False, **kwargs) + solve_params_specific = solve_params.copy() + solve_params_specific.raise_exception = False + pass_flag = circuit_solve_and_check(solve_params_specific) # raise_exception=False intra_cluster_group_severity_dict[node]['converged'] = pass_flag - severity_dict = compute_voltage_violation_severity(voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, **kwargs) + severity_dict = compute_voltage_violation_severity(voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, + analysis_params=analysis_params, solve_params=solve_params) intra_cluster_group_severity_dict[node].update(severity_dict) intra_cluster_group_severity_dict[node].update({'new_xfmr_name': new_xfmr_added_dict['new_xfmr_name'], 'modified_line_name': new_xfmr_added_dict['modified_line_name'], 'new_regcontrol_name': new_regcontrol_dict['new_regcontrol_name']}) @@ -1355,7 +1437,7 @@ def test_new_regulator_placement_on_common_nodes(voltage_upper_limit, voltage_lo check_dss_run_command(command_string) intra_cluster_group_severity_dict[node]['disable_new_devices_command_list'] = disable_new_xfmr_and_edit_line(transformer_name_to_disable=new_xfmr_added_dict['new_xfmr_name'], line_name_to_modify=new_xfmr_added_dict['modified_line_name'], - **kwargs) + solve_params=solve_params) if intra_cluster_group_severity_dict[node][deciding_field] == 0: break # For a given list of common nodes in a cluster, identify the node which leads to minimum number of violations @@ -1373,7 +1455,9 @@ def test_new_regulator_placement_on_common_nodes(voltage_upper_limit, voltage_lo chosen_node_dict = intra_cluster_group_severity_dict[chosen_node] chosen_node_dict['node'] = chosen_node re_enable_added_regcontrol_objects(chosen_node_dict) - circuit_solve_and_check(raise_exception=True, **kwargs) + solve_params_specific = solve_params.copy() + solve_params_specific.raise_exception = True + circuit_solve_and_check(solve_params_specific) return chosen_node_dict @@ -1549,7 +1633,7 @@ def perform_clustering(num_clusters, square_distance_array, buses_with_violation def per_cluster_group_regulator_analysis(G, buses_list, voltage_config, voltage_upper_limit, voltage_lower_limit, - default_regcontrol_settings, circuit_source, deciding_field, **kwargs): + default_regcontrol_settings, circuit_source, deciding_field, analysis_params, solve_params): """This function performs analysis on one cluster group of buses with violations. It determines the common upstream buses for all the buses with violations in that cluster group. It places regulators on each of these common noeds, and determines the best node to place the regulator for that group. @@ -1566,7 +1650,7 @@ def per_cluster_group_regulator_analysis(G, buses_list, voltage_config, voltage_ chosen_node_dict = test_new_regulator_placement_on_common_nodes(voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, nominal_voltage=nominal_voltage, deciding_field=deciding_field, common_upstream_nodes_list=common_upstream_nodes_list, circuit_source=circuit_source, - default_regcontrol_settings=default_regcontrol_settings, **kwargs) + default_regcontrol_settings=default_regcontrol_settings, solve_params=solve_params, analysis_params=analysis_params) if chosen_node_dict is None: # if there is no common node on which regulator can be placed (for this cluster group) return None @@ -1576,12 +1660,13 @@ def per_cluster_group_regulator_analysis(G, buses_list, voltage_config, voltage_ init_regcontrols_df = get_regcontrol_info(correct_PT_ratio=True, nominal_voltage=nominal_voltage) regcontrol_sweep_df = sweep_regcontrol_settings(voltage_config=voltage_config, initial_regcontrols_df=init_regcontrols_df, voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, - exclude_sub_ltc=True, only_sub_ltc=False, **kwargs) + exclude_sub_ltc=True, only_sub_ltc=False, analysis_params=analysis_params, solve_params=solve_params) regcontrols_df, regcontrol_settings_commands_list = choose_best_regcontrol_sweep_setting( regcontrol_sweep_df=regcontrol_sweep_df, initial_regcontrols_df=init_regcontrols_df, deciding_field=deciding_field, exclude_sub_ltc=True, - only_sub_ltc=False, **kwargs) + only_sub_ltc=False, solve_params=solve_params) # determine violation severity after changes - severity_dict = compute_voltage_violation_severity(voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, **kwargs) + severity_dict = compute_voltage_violation_severity(voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, + analysis_params=analysis_params, solve_params=solve_params) cluster_group_info_dict.update(severity_dict) cluster_group_info_dict['settings_commands_list'] = regcontrol_settings_commands_list return cluster_group_info_dict @@ -1590,12 +1675,15 @@ def per_cluster_group_regulator_analysis(G, buses_list, voltage_config, voltage_ def cluster_and_place_regulator(G, square_distance_df, initial_buses_with_violations, num_clusters, voltage_config, voltage_upper_limit, voltage_lower_limit, default_regcontrol_settings, circuit_source, deciding_field, - **kwargs): + analysis_params, solve_params, + fig_folder=None, create_plots=False, + ): """ This function performs clustering on buses with violations, then iterates through each cluster group, performs regulator placement analysis Returns the best regulator placement for each cluster group, in the form of a dict. """ - fig_folder = kwargs.get("fig_folder", None) - create_plots = kwargs.get("create_plots", False) + if create_plots: + assert fig_folder is not None + if len(initial_buses_with_violations) == 1: # if there is only one violation, then clustering cant be performed. So directly assign bus to cluster clusters_dict = {0: initial_buses_with_violations} else: @@ -1609,14 +1697,15 @@ def cluster_and_place_regulator(G, square_distance_df, initial_buses_with_violat cluster_group_info_dict[cluster_id] = per_cluster_group_regulator_analysis(G=G, buses_list=buses_list, voltage_config=voltage_config, voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, default_regcontrol_settings=default_regcontrol_settings, - circuit_source=circuit_source, deciding_field=deciding_field, **kwargs) + circuit_source=circuit_source, deciding_field=deciding_field, + analysis_params=analysis_params, solve_params=solve_params) if cluster_group_info_dict[cluster_id] is None: logger.debug("There is no common node on which regulator can be placed (for this cluster group)") return cluster_group_info_dict cluster_group_info_dict[cluster_id].update({"buses_list": buses_list,}) # determine voltage violations after changes - bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltages( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, **kwargs) + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltage_violations( + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, solve_params=solve_params) if (len(buses_with_violations)) == 0: logger.info("All nodal violations have been removed successfully by new regulator placement.") break @@ -1627,19 +1716,19 @@ def cluster_and_place_regulator(G, square_distance_df, initial_buses_with_violat @track_timing(timer_stats_collector) def determine_new_regulator_location(circuit_source, initial_buses_with_violations, voltage_upper_limit, voltage_lower_limit, - voltage_config, default_regcontrol_settings, max_regs, deciding_field, - **kwargs): + voltage_config, default_regcontrol_settings, max_regs, deciding_field, analysis_params, solve_params, + fig_folder=None, create_plots=False + ): """Function to determine new regulator location. This decision is made after testing out various clustering and placement options. """ - fig_folder = kwargs.get("fig_folder", None) - create_plots = kwargs.get("create_plots", False) + if create_plots: + assert fig_folder is not None # prepare for clustering G = generate_networkx_representation() upper_triang_paths_dict = get_upper_triangular_dist(G=G, buses_with_violations=initial_buses_with_violations) square_distance_df = get_full_distance_df(upper_triang_paths_dict=upper_triang_paths_dict) # if create_plots: - # fig_folder = kwargs.get('fig_folder', None) # plot_heatmap_distmatrix(square_array=square_distance_df, fig_folder=fig_folder) # currently not used options_dict = {} @@ -1653,15 +1742,15 @@ def determine_new_regulator_location(circuit_source, initial_buses_with_violatio initial_buses_with_violations=initial_buses_with_violations, num_clusters=option_num, voltage_config=voltage_config, voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, default_regcontrol_settings=default_regcontrol_settings, circuit_source=circuit_source, - **kwargs) + analysis_params=analysis_params, solve_params=solve_params) options_dict[cluster_option_name] = {} options_dict[cluster_option_name]["details"] = temp_dict # get severity for this option severity_dict = compute_voltage_violation_severity( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, **kwargs) + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, solve_params=solve_params) # determine voltage violations after changes - bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltages( - voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, **kwargs) + bus_voltages_df, undervoltage_bus_list, overvoltage_bus_list, buses_with_violations = get_bus_voltage_violations( + voltage_upper_limit=voltage_upper_limit, voltage_lower_limit=voltage_lower_limit, analysis_params=analysis_params, solve_params=solve_params) if (fig_folder is not None) and create_plots: plot_voltage_violations(fig_folder=fig_folder, title="Bus violations for "+cluster_option_name+" voltage regulators"+"_"+ str(len(buses_with_violations)), buses_with_violations=buses_with_violations, circuit_source=circuit_source, enable_detailed=True) @@ -1728,9 +1817,9 @@ def plot_feeder(fig_folder, title, circuit_source=None, enable_detailed=False): default_node_color = 'black' NodeLegend = { - "Load": get_load_buses(dss), - "PV": get_pv_buses(dss), - "Transformer": list(get_all_transformer_info_instance(compute_loading=False)['bus_names_only'].str[0].values), + "Load": get_load_buses(), + "PV": get_pv_buses(), + "Transformer": list(get_snapshot_transformer_info(compute_loading=False)['bus_names_only'].str[0].values), } if circuit_source is not None: NodeLegend["Circuit Source"] = [circuit_source] @@ -1787,8 +1876,8 @@ def plot_voltage_violations(fig_folder, title, buses_with_violations, circuit_so nx.draw_networkx_nodes(Un_G, pos=position_dict, alpha=1.0, node_size=default_node_size, node_color=default_node_color) NodeLegend = { - # "Load": get_load_buses(dss), - # "PV": get_pv_buses(dss), + # "Load": get_load_buses(), + # "PV": get_pv_buses(), # "Transformer": list(get_all_transformer_info_instance(compute_loading=False)['bus_names_only'].str[0].values), "Violation": buses_with_violations, } @@ -1964,7 +2053,9 @@ def get_graph_edges_dataframe(attr_fields): """ chosen_fields = ['bus1', 'bus2'] + attr_fields # prepare lines dataframe - all_lines_df = get_thermal_equipment_info(compute_loading=False, equipment_type="line") + all_lines_df = get_thermal_equipment_info(compute_loading=False, equipment_type="line", ignore_switch=False) + if all_lines_df.empty: + return pd.DataFrame() all_lines_df['bus1'] = all_lines_df['bus1'].str.split('.', expand=True)[0].str.lower() all_lines_df['bus2'] = all_lines_df['bus2'].str.split('.', expand=True)[0].str.lower() # convert length to metres @@ -2018,7 +2109,6 @@ def generate_networkx_representation(): Parameters ---------- - kwargs Returns ------- @@ -2029,7 +2119,6 @@ def generate_networkx_representation(): G = add_graph_bus_nodes(G=G, bus_coordinates_df=bus_coordinates_df) # add buses as nodes to the graph attr_fields = ['phases', 'length', 'name', 'equipment_type'] # define edge attributes edges_df = get_graph_edges_dataframe(attr_fields=attr_fields) # get edges dataframe (from lines and transformers) - # add edges to graph G = add_graph_edges(G=G, edges_df=edges_df, attr_fields=attr_fields, source='bus1', target='bus2') complete_flag = check_buscoordinates_completeness(bus_coordinates_df, verbose=True) # check if sufficient buscoordinates data is available if complete_flag: @@ -2038,6 +2127,19 @@ def generate_networkx_representation(): return G +def check_network_connectivity(G, raise_exception=True): + temp_graph = G.to_undirected() + connectivity_status = nx.is_connected(temp_graph) + if not connectivity_status: + isolated = list(nx.isolates(G)) + msg = f"OpenDSS Circuit is disconnected. These are the isolated elements: {isolated} " + if raise_exception: + raise OpenDssModelDisconnectedError(msg) + else: + logging.info("Feeder model has no isolated nodes.") + return connectivity_status + + def check_buscoordinates_completeness(bus_coordinates_df, verbose=False): """This function checks if complete bus coordinates are present. This is needed to plot feeder figures. """ diff --git a/disco/models/upgrade_cost_analysis_generic_input_model.py b/disco/models/upgrade_cost_analysis_generic_input_model.py index c732d7c4..37d15e7c 100644 --- a/disco/models/upgrade_cost_analysis_generic_input_model.py +++ b/disco/models/upgrade_cost_analysis_generic_input_model.py @@ -6,9 +6,11 @@ from PyDSS.controllers import PvControllerModel from disco.models.base import BaseAnalysisModel +from disco.enums import LoadMultiplierType from disco.models.upgrade_cost_analysis_equipment_model import * from disco.extensions.upgrade_simulation.upgrade_configuration import DEFAULT_UPGRADE_PARAMS_FILE + _DEFAULT_UPGRADE_PARAMS = None _SUPPORTED_UPGRADE_TYPES = ["thermal", "voltage"] @@ -318,10 +320,6 @@ class ThermalUpgradeParamsModel(UpgradeParamsBaseModel): upgrade_iteration_threshold: Optional[int] = Field( title="upgrade_iteration_threshold", description="Upgrade iteration threshold", default=5 ) - timepoint_multipliers: Optional[Dict] = Field( - title="timepoint_multipliers", - description='Dictionary to provide timepoint multipliers. example: timepoint_multipliers={"load_multipliers": {"with_pv": [1.2], "without_pv": [0.6]}}', - ) @validator("voltage_lower_limit") def check_voltage_lower_limits(cls, voltage_lower_limit, values): @@ -359,18 +357,6 @@ def check_catalog(cls, external_catalog, values): UpgradeTechnicalCatalogModel(**load_data(external_catalog)) return external_catalog - @validator("timepoint_multipliers") - def check_timepoint_multipliers(cls, timepoint_multipliers): - if timepoint_multipliers is None: - return timepoint_multipliers - if "load_multipliers" not in timepoint_multipliers: - raise ValueError("load_multipliers must be defined in timepoint_multipliers") - if ("with_pv" not in timepoint_multipliers["load_multipliers"]) and ("without_pv" not in timepoint_multipliers["load_multipliers"]): - raise ValueError( - 'Either "with_pv" or "without_pv" must be defined in timepoint_multipliers["load_multipliers"]' - ) - return timepoint_multipliers - class VoltageUpgradeParamsModel(UpgradeParamsBaseModel): """Voltage Upgrade Parameters for all jobs in a simulation""" @@ -433,11 +419,6 @@ class VoltageUpgradeParamsModel(UpgradeParamsBaseModel): description="Flag to enable or disable substation LTC upgrades module", default=True, ) - timepoint_multipliers: dict = Field( - title="timepoint_multipliers", - description='Dictionary to provide timepoint multipliers. example: timepoint_multipliers={"load_multipliers": {"with_pv": [1.2], "without_pv": [0.6]}}', - default=None, - ) capacitor_action_flag: bool = Field( title="capacitor_action_flag", description="Flag to enable or disable capacitor controls settings sweep module", @@ -471,10 +452,79 @@ def check_final_voltage_lower_limits(cls, final_lower_limit, values): f"final_upper_limit={upper} must be greater than final_lower_limit={final_lower_limit}" ) return final_lower_limit + + +class PyDssControllerModels(UpgradeParamsBaseModel): + """Defines the settings for PyDSS controllers""" + + pv_controller: Optional[PvControllerModel] = Field( + title="pv_controller", description="Settings for a PV controller" + ) + + +class UpgradeSimulationParamsModel(UpgradeParamsBaseModel): + """Parameters for all jobs in a simulation""" + # plot_violations: bool = Field( + # title="plot_violations", + # description="If True, create plots of violations before and after simulation.", + # default=True, + # ) + upgrade_order: List[str] = Field( + description="Order of upgrade algorithm. 'thermal' or 'voltage' can be removed from the " + "simulation by excluding them from this parameter.", + default=_SUPPORTED_UPGRADE_TYPES, + ) + include_pf1: bool = Field( + title="include_pf1", + description="Include PF1 scenario (no controls) if pydss_controllers are defined.", + default=True, + ) + dc_ac_ratio: float = Field( + title="dc_ac_ratio", + description="Apply DC-AC ratio for PV Systems", + default=None + ) + timeseries_analysis: bool = Field( + title="timeseries_analysis", + description="timeseries_analysis", + default=False + ) + timepoint_multipliers: Dict = Field( + title="timepoint_multipliers", + description='Dictionary to provide timepoint multipliers. example: timepoint_multipliers={"load_multipliers": {"with_pv": [1.2], "without_pv": [0.6]}}', + default = {} + ) + enable_pydss_controllers: bool = Field( + title="enable_pydss_controllers", + description="Flag to enable/disable use of PyDSS controllers", + default=False, + ) + pydss_controllers: PyDssControllerModels = Field( + title="pydss_controllers", + description="If enable_pydss_controllers is True, these PyDSS controllers are applied to each corresponding element type.", + default=PyDssControllerModels(), + ) + + @validator("upgrade_order") + def check_upgrade_order(cls, upgrade_order): + diff = set(upgrade_order).difference(_SUPPORTED_UPGRADE_TYPES) + if diff: + raise ValueError(f"Unsupported values in upgrade_order: {diff}") + return upgrade_order + + def has_pydss_controllers(self): + """Return True if a PyDSS controller is defined. + + Returns + ------- + bool + + """ + return self.pydss_controllers.pv_controller is not None @validator("timepoint_multipliers") def check_timepoint_multipliers(cls, timepoint_multipliers): - if timepoint_multipliers is None: + if not timepoint_multipliers: return timepoint_multipliers if "load_multipliers" not in timepoint_multipliers: raise ValueError("load_multipliers must be defined in timepoint_multipliers") @@ -518,15 +568,7 @@ def check_model_file(cls, opendss_model_file): return opendss_model_file -class PyDssControllerModels(UpgradeParamsBaseModel): - """Defines the settings for PyDSS controllers""" - - pv_controller: Optional[PvControllerModel] = Field( - title="pv_controller", description="Settings for a PV controller" - ) - - -class UpgradeCostAnalysisSimulationModel(UpgradeParamsBaseModel): +class UpgradeCostAnalysisSimulationModel(UpgradeSimulationParamsModel): """Defines the jobs in an upgrade cost analysis simulation.""" class Config: @@ -552,34 +594,6 @@ class Config: description="If True, calculate upgrade costs from database.", default=True, ) - upgrade_order: List[str] = Field( - description="Order of upgrade algorithm. 'thermal' or 'voltage' can be removed from the " - "simulation by excluding them from this parameter.", - default=_SUPPORTED_UPGRADE_TYPES, - ) - pydss_controllers: PyDssControllerModels = Field( - title="pydss_controllers", - description="If enable_pydss_controllers is True, these PyDSS controllers are applied to each corresponding element type.", - default=PyDssControllerModels(), - ) - plot_violations: bool = Field( - title="plot_violations", - description="If True, create plots of violations before and after simulation.", - default=True, - ) - enable_pydss_controllers: bool = Field( - title="enable_pydss_controllers", - description="Flag to enable/disable use of PyDSS controllers", - default=False, - ) - include_pf1: bool = Field( - title="include_pf1", - description="Include PF1 scenario (no controls) if pydss_controllers are defined.", - default=True, - ) - dc_ac_ratio: Optional[float] = Field( - title="dc_ac_ratio", description="Apply DC-AC ratio for PV Systems", default=None - ) jobs: List[UpgradeCostAnalysisGenericModel] @root_validator(pre=True) @@ -602,23 +616,6 @@ def check_database(cls, calculate_costs, values): load_cost_database(values["upgrade_cost_database"]) return calculate_costs - @validator("upgrade_order") - def check_upgrade_order(cls, upgrade_order): - diff = set(upgrade_order).difference(_SUPPORTED_UPGRADE_TYPES) - if diff: - raise ValueError(f"Unsupported values in upgrade_order: {diff}") - return upgrade_order - - def has_pydss_controllers(self): - """Return True if a PyDSS controller is defined. - - Returns - ------- - bool - - """ - return self.pydss_controllers.pv_controller is not None - class TransformerUnitCostModel(UpgradeParamsBaseModel): """Contains Transformer Unit Cost Database Model""" diff --git a/disco/models/upgrade_cost_analysis_generic_output_model.py b/disco/models/upgrade_cost_analysis_generic_output_model.py index 61bfaf21..161844c0 100644 --- a/disco/models/upgrade_cost_analysis_generic_output_model.py +++ b/disco/models/upgrade_cost_analysis_generic_output_model.py @@ -374,6 +374,7 @@ def list_values(cls): class CapacitorControllerResultType(ExtendedEnum): """Possible values for capacitor upgrade type""" + # replace_cap_controller = "Replace Capacitor controller" # this is not used currently add_new_cap_controller = "Capacitor controller" change_cap_control = "Capacitor controller setting change" diff --git a/disco/preprocess/__init__.py b/disco/preprocess/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/disco/preprocess/select_timepoints.py b/disco/preprocess/select_timepoints.py new file mode 100644 index 00000000..a1f2bcae --- /dev/null +++ b/disco/preprocess/select_timepoints.py @@ -0,0 +1,227 @@ +# -*- coding: utf-8 -*- +""" +Created on Fri Nov 25 07:43:46 2022 + +@author: ksedzro +""" + +import os +import numpy as np +import pandas as pd +import time + + + +def get_parameter(line, parameter_name, func=None): + if func!=None: + val = func(line.split(f'{parameter_name}=')[1].split(' ')[0]) + + else: + val = line.split(f'{parameter_name}=')[1].split(' ')[0] + if '\n' in val: + val = val.strip('\n') + + return val + + +def collect_category(file_path, bus_data, category='demand'): + """ + file_path can be: + loads.dss path + EVloads.dss path + PVsystems.dss path + category must be 'demand' or 'generation' + if file_path points to loads or EV loads, category must be 'demand' + if file_path points to PV systems or DERs, category must be 'generation' + """ + assert category in ['demand', 'generation'] + with open(file_path, 'r') as lr: + lines = lr.readlines() + + for line in lines: + if line.lower().startswith('new'): + line = line.lower() + size = 0 + profile_name = '' + bus = get_parameter(line, 'bus1') + #bus = line.split('bus1=')[1].split(' ')[0] + if '.' in bus: + bus = bus.split('.')[0] + if not bus in bus_data.keys(): + bus_data[bus] = {'bus':bus, 'demand':[], 'generation':[]} + + if 'kw=' in line: + size = get_parameter(line, 'kw', float) + # load_size = float(line.split('kw=')[1].split(' ')[0]) + elif 'kva=' in line: + size = get_parameter(line, 'kva', float) + # load_size = float(line.split('kva=')[1].split(' ')[0]) + elif 'pmpp=' in line: + size = get_parameter(line, 'pmpp', float) + if 'yearly' in line: + profile_name = get_parameter(line, 'yearly') + elif 'daily' in line: + profile_name = get_parameter(line, 'daily') + + bus_data[bus][category].append([size, profile_name]) + + return bus_data + +def collect_profiles(profile_files): + """ + This function builds a dictionary called profile_data. + The profile_data collects for each timeseries profile: + the name + the path to the actual CV profile data file + the numpy array of the profile timeseries data + It also builds and stores a new destination path where new reduced profile timeseries will be written + INPUT: + profile_files: a list of paths pointing to .dss profile files such as LoadShapes.dss, PVshapes.dss, etc. + OUTPUT: + proile_data: a dictionary (see description above) + """ + profile_data = {} + for file_path in profile_files: + base_path, _ = os.path.split(file_path) + with open(file_path, 'r') as lr: + lines = lr.readlines() + for line in lines: + if line.lower().startswith('new'): + line = line.lower() + profile_name = line.split('loadshape.')[1].split(' ')[0] + rel_path = line.split('file=')[1].split(')')[0] + profile_path = os.path.join(base_path, rel_path) + with open(profile_path) as pr: + profile_array = np.loadtxt(pr, delimiter=",") + folder, filename = os.path.split(profile_path) + copy_dir = folder+'-new' + if not os.path.exists(copy_dir): + os.mkdir(copy_dir) + new_profile_path = os.path.join(copy_dir, filename) + profile_data[profile_name] = {'profile_name': profile_name, + 'profile_path': profile_path, + 'new_profile_path': new_profile_path, + 'time_series': profile_array + } + + return profile_data + +def agregate_series(bus_data, profile_data, critical_conditions): + ag_series = {} + critical_time_indices = [] + head_critical_time_indices = [] + for bus, dic in bus_data.items(): + ag_series[bus] = {'critical_time_idx':[],'condition':[],} + + if dic['demand']: + for data in dic['demand']: + + if 'demand' in ag_series[bus].keys(): + ag_series[bus]['demand'] += data[0]*profile_data[data[1]]['time_series'] + else: + ag_series[bus]['demand'] = data[0]*profile_data[data[1]]['time_series'] + if 'max_demand' in critical_conditions: + max_demand_idx = np.where(ag_series[bus]['demand'] == np.amax(ag_series[bus]['demand']))[0].tolist()[0] + ag_series[bus]['critical_time_idx'].append(max_demand_idx) + ag_series[bus]['condition'].append("max_demand") + + if 'min_demand' in critical_conditions: + min_demand_idx = np.where(ag_series[bus]['demand'] == np.amin(ag_series[bus]['demand']))[0].tolist()[0] + ag_series[bus]['critical_time_idx'].append(min_demand_idx) + ag_series[bus]['condition'].append("min_demand") + # ag_series[bus]['critical_time_idx'] += [max_demand_idx, min_demand_idx] + + if dic['generation']: + for data in dic['generation']: + if 'generation' in ag_series[bus].keys(): + ag_series[bus]['generation'] += data[0]*profile_data[data[1]]['time_series'] + else: + ag_series[bus]['generation'] = data[0]*profile_data[data[1]]['time_series'] + if 'max_generation' in critical_conditions: + max_gen_idx = np.where(ag_series[bus]['generation'] == np.amax(ag_series[bus]['generation']))[0].tolist()[0] + ag_series[bus]['critical_time_idx'].append(max_gen_idx) + ag_series[bus]['condition'].append("max_generation") + if 'demand' in ag_series[bus].keys() and 'max_net_generation' in critical_conditions: + arr = ag_series[bus]['generation'] - ag_series[bus]['demand'] + max_netgen_idx = np.where(arr == np.amax(arr))[0].tolist()[0] + ag_series[bus]['critical_time_idx'].append(max_netgen_idx) + ag_series[bus]['condition'].append("max_net_generation") + + total_gen = sum([dic['generation'] for bus, dic in ag_series.items() + if 'generation' in dic.keys()]) + total_dem = sum([dic['demand'] for bus, dic in ag_series.items() + if 'demand' in dic.keys()]) + net_total_gen = total_gen - total_dem + if 'max_demand' in critical_conditions: + max_demand_idx = np.where(total_dem == np.amax(total_dem))[0].tolist()[0] + head_critical_time_indices.append(max_demand_idx) + if 'min_demand' in critical_conditions: + min_demand_idx = np.where(total_dem == np.amin(total_dem))[0].tolist()[0] + head_critical_time_indices.append(min_demand_idx) + if 'max_generation' in critical_conditions: + max_gen_idx = np.where(total_gen == np.amax(total_gen))[0].tolist()[0] + head_critical_time_indices.append(max_gen_idx) + if 'max_net_generation' in critical_conditions: + max_netgen_idx = np.where(net_total_gen == np.amax(net_total_gen))[0].tolist()[0] + head_critical_time_indices.append(max_netgen_idx) + + critical_time_indices = [t for bus, dic in ag_series.items() + for t in dic['critical_time_idx'] + if 'critical_time_idx' in dic.keys()] + critical_time_indices += head_critical_time_indices + critical_time_indices = list(set(critical_time_indices)) + critical_time_indices.sort() + for profile, val in profile_data.items(): + base_len = len(val['time_series']) + compression_rate = len(critical_time_indices)/base_len + data = val['time_series'][critical_time_indices] + pd.DataFrame(data).to_csv(val['new_profile_path'], + index=False, header=False) + + return ag_series, head_critical_time_indices, critical_time_indices, compression_rate + + +def main(category_path_dict, + profile_files, + critical_conditions=['max_demand', 'min_demand', 'max_generation', 'max_net_generation']): + """ + INPUT: + category_path_dict: a dictionary where: + the keys are power conversion asset categories: "demand" and "generation" + the values are a list of paths pointing to the corresponding power conversions assets' .dss files such as "Loads.dss" and "PVSystems.dss" files + example: category_path_dict = {'demand': [LOAD_PATH, EVLOAD_PATH], 'generation': [PV_PATH]} + profile_files: a list of paths pointing to shape (profile) files such as "LoadShapes.dss" + OUTPUT: + bus_data: + profile_data: + ag_series: + head_time_indices: list of critical time indices when only feeder-head timeseries are considered + critical_time_indices: all critical time indices (individual buses as well as feeder-head considered) + compression_rate: ratio between the number of critical timepoints and the total number of timepoints in the timeseries + + """ + + bus_data={} + for category, file_paths in category_path_dict.items(): + for file_path in file_paths: + bus_data = collect_category(file_path, bus_data, category=category) + profile_data = collect_profiles(profile_files) + ag_series, head_time_indices, critical_time_indices, compression_rate = \ + agregate_series(bus_data, profile_data, critical_conditions) + return bus_data, profile_data, ag_series, head_time_indices, critical_time_indices, compression_rate + + +if __name__ == "__main__": + st = time.time() + PV_PATH = r"C:\Users\KSEDZRO\Documents\Projects\LA-equity-resilience\data\P12U\sb9_p12uhs3_1247_trans_264--p12udt8475\190\PVSystems.dss" + LOAD_PATH = r"C:\Users\KSEDZRO\Documents\Projects\LA-equity-resilience\data\P12U\sb9_p12uhs3_1247_trans_264--p12udt8475\Loads.dss" + LOADSHAPES_PATH = r"C:\Users\KSEDZRO\Documents\Projects\LA-equity-resilience\data\P12U\sb9_p12uhs3_1247_trans_264--p12udt8475\LoadShapes.dss" + PVSHAPES_PATH = r"C:\Users\KSEDZRO\Documents\Projects\LA-equity-resilience\data\P12U\sb9_p12uhs3_1247_trans_264--p12udt8475\PVShapes.dss" + category_path_dict = {'demand': [LOAD_PATH], 'generation': [PV_PATH]} + profile_files = [LOADSHAPES_PATH, PVSHAPES_PATH] + critical_conditions = ['max_demand', 'max_net_generation'] + + bus_data, profile_data, ag_series, head_time_indices, critical_time_indices, compression_rate = \ + main(category_path_dict, profile_files, critical_conditions) + et = time.time() + elapse_time = et-st diff --git a/disco/preprocess/select_timepoints2.py b/disco/preprocess/select_timepoints2.py new file mode 100644 index 00000000..bc76cc59 --- /dev/null +++ b/disco/preprocess/select_timepoints2.py @@ -0,0 +1,579 @@ +# -*- coding: utf-8 -*- +""" +Created on Fri Dec 2 12:13:24 2022 + +@author: ksedzro +""" + +import filecmp +import enum +import logging +import shutil +import time +from pathlib import Path + +import numpy as np +import pandas as pd +import opendssdirect as dss + + +logger = logging.getLogger(__name__) + + +_DISALLOWED_OPEN_DSS_COMMANDS = ("export", "plot", "show") +_SOLVE_LINE = "Solve mode=snapshot\n" + + +class CriticalCondition(enum.Enum): + """Possible critical conditions to use for time-point selection""" + + MAX_DEMAND = "max_demand" + MIN_DEMAND = "min_demand" + MAX_GENERATION = "max_generation" + MAX_NET_GENERATION = "max_net_generation" + + +class DemandCategory(enum.Enum): + + LOAD = "load" + + +class GenerationCategory(enum.Enum): + + PV_SYSTEM = "pv_system" + STORAGE = "storage" + + +class InvalidParameter(Exception): + """Raised when user input is invalid""" + + +def load_feeder(path_to_master: Path, destination_dir: Path, fix_master_file: bool): + """Compile an OpenDSS circuit after first ensuring that time-series mode is disabled.""" + if not path_to_master.exists(): + raise FileNotFoundError(path_to_master) + + if fix_master_file: + new_master = make_fixes_to_master(path_to_master) + else: + new_master = path_to_master + check_master_file(new_master) + + try: + dss.Text.Command(f"redirect {new_master}") + shutil.copyfile(new_master, destination_dir / new_master.name) + logger.info("Redirected to %s", new_master) + finally: + if path_to_master != new_master: + new_master.unlink() + + +def make_fixes_to_master(master_file): + suffix = master_file.suffix + # TODO this is being saved in the wrong location + new_master = master_file.parent / master_file.name.replace(suffix, f"_snapshot{suffix}") + def has_invalid_command(line): + for command in _DISALLOWED_OPEN_DSS_COMMANDS: + if line.startswith(command): + return True + return False + + with open(new_master, "w") as f_out: # overwrite file if it exists + with open(master_file, "r") as f_in: + for line in f_in: + lowered = line.strip().lower() + if lowered.startswith("solve") or has_invalid_command(lowered): + logger.warning("Removing line from new Master.dss: %s", line.strip()) + else: + f_out.write(line) + f_out.write(_SOLVE_LINE) + return new_master + + +def check_master_file(master_file: Path): + comment_chars = ("!", "#") + + def check_invalid_command(line): + for invalid_command in _DISALLOWED_OPEN_DSS_COMMANDS: + if line.startswith(invalid_command): + raise InvalidParameter(f"The command {invalid_command} is not allowed.") + + def is_commented(line): + for comment_char in comment_chars: + if line.startswith(comment_char): + return True + return False + + with open(master_file) as f_in: + found_solve = False + for line in f_in: + lowered = line.strip().lower() + if not lowered or is_commented(line): + continue + check_invalid_command(lowered) + if "solve" in lowered: + if lowered not in ("solve", _SOLVE_LINE.strip().lower()): + raise InvalidParameter( + "The solve command cannot have parameters besides mode=snapshot: " + f"{line.strip()}: {master_file}" + ) + if found_solve: + raise InvalidParameter( + f"Cannot have more than one call to Solve: {master_file}" + ) + found_solve = True + + +def get_profile(): + """Return the profile of the currenlty-selected OpenDSS element. + + Returns + ------- + str + Return "" if there is no load shape profile attached. + + """ + profile = dss.Properties.Value("yearly") + if not profile: + profile = dss.Properties.Value("daily") + if not profile: + profile = dss.Properties.Value("duty") + return profile + + +def get_param_values(param_class, bus_data, category): + def get_bus(): + bus = dss.Properties.Value("bus1") + if "." in bus: + bus = dss.Properties.Value("bus1").split(".")[0] + return bus + + if param_class == DemandCategory.LOAD: + flag = dss.Loads.First() + while flag > 0: + bus = get_bus() + if not bus in bus_data.keys(): + bus_data[bus] = {"bus": bus, "demand": [], "generation": []} + capacity = 0 + size = "" + if not size: + size = dss.Properties.Value("kW") + if not size: + size = dss.Properties.Value("kva") + if size: + capacity = float(size) + else: + raise Exception(f"Did not find size for {dss.CktElement.Name()}") + + profile_name = get_profile() + if not profile_name: + raise Exception(f"Did not find profile name for {dss.CktElement.Name()}") + bus_data[bus][category].append([capacity, profile_name]) + flag = dss.Loads.Next() + + elif param_class == GenerationCategory.PV_SYSTEM: + flag = dss.PVsystems.First() + while flag > 0: + bus = get_bus() + if not bus in bus_data.keys(): + bus_data[bus] = {"bus": bus, "demand": [], "generation": []} + capacity = 0 + size = "" + if not size: + size = dss.Properties.Value("pmpp") + if not size: + size = dss.Properties.Value("kva") + if size: + capacity = float(size) + else: + raise Exception(f"Did not find size for {dss.CktElement.Name()}") + + profile_name = get_profile() + if not profile_name: + raise Exception(f"Did not find profile name for {dss.CktElement.Name()}") + bus_data[bus][category].append([capacity, profile_name]) + flag = dss.PVsystems.Next() + + elif param_class == GenerationCategory.STORAGE: + flag = dss.Storages.First() + while flag > 0: + bus = get_bus() + if not bus in bus_data.keys(): + bus_data[bus] = {"bus": bus, "demand": [], "generation": []} + capacity = 0 + size = "" + if not size: + size = dss.Properties.Value("kwrated") + if not size: + size = dss.Properties.Value("kva") + if size: + capacity = float(size) + else: + raise Exception(f"Did not find size for {dss.CktElement.Name()}") + + profile_name = get_profile() + if not profile_name: + raise Exception(f"Did not find profile name for {dss.CktElement.Name()}") + bus_data[bus][category].append([capacity, profile_name]) + flag = dss.Storages.Next() + + else: + raise Exception(f"Invalid param_class={param_class}") + + return bus_data + + +def reset_profile_data(used_profiles, critical_time_indices, profile_types=("active", "reactive")): + flag = dss.LoadShape.First() + while flag > 0: + name = dss.LoadShape.Name() + number_of_timepoints = len(critical_time_indices) + # TODO: Kwami, should there be error checking on profile_types? + original_p_mult = None + original_q_mult = None + + if name in used_profiles: + if "active" in profile_types: + original_p_mult = dss.LoadShape.PMult() + + if "reactive" in profile_types: + original_q_mult = dss.LoadShape.QMult() + + dss.LoadShape.Npts(number_of_timepoints) + + if original_p_mult is not None and len(original_p_mult) > 1: + if len(original_p_mult) > max(critical_time_indices): + dss.LoadShape.PMult(list(np.array(original_p_mult)[critical_time_indices])) + else: + raise Exception("IndexError: Index out of range") + if original_q_mult is not None and len(original_q_mult) > 1: + if len(original_q_mult) > max(critical_time_indices): + dss.LoadShape.QMult(list(np.array(original_q_mult)[critical_time_indices])) + else: + raise Exception("IndexError: Index out of range") + flag = dss.LoadShape.Next() + + +def save_circuit(output_folder: Path): + """Run the OpenDSS command to save a compiled circuit into a directory.""" + dss.Text.Command(f"Save Circuit Dir={output_folder}") + logger.info("Saved circuit to %s", output_folder) + + +def export_power_flow_results(path: Path): + """Export OpenDSS circuit elemement values into a directory.""" + path.mkdir(exist_ok=True) + for export_type, filename in { + "currents": "currents.csv", + "capacity": "capacity.csv", + "loads": "loads.csv", + "powers [mva]": "powers.csv", + "voltages": "voltages.csv", + }.items(): + dss.Text.Command(f"export {export_type} {path}/{filename}") + + +def compare_power_flow_results(before_path, after_path): + """Compare the exported results from two directories. + + Raises + ------ + Exception + Raised if the results do not match. + + """ + before = {x.name: x for x in before_path.iterdir()} + after = {x.name: x for x in after_path.iterdir()} + assert sorted(before.keys()) == sorted(after.keys()) + match = True + for name in before: + if not filecmp.cmp(before[name], after[name]): + logger.error("Files before=%s and after=%s do not match", before[name], after[name]) + match = False + + # TODO: csv comparisons have _mostly_ minor differences. + # if not match: + # raise Exception("Before/after power flow results do not match. Refer to the log file.") + + +def get_profile_data(): + """ + This function builds a dictionary called profile_data. + The profile_data collects for each timeseries profile: + the name + the numpy array of the profile timeseries data + INPUT: + None + OUTPUT: + proile_data: a dictionary (see description above) + """ + profile_data = {} + flag = dss.LoadShape.First() + while flag > 0: + profile_name = dss.LoadShape.Name() + profile_array = np.array(dss.LoadShape.PMult()) + if profile_name in profile_data: + raise Exception(f"Detected duplicate profile name: {profile_name}") + profile_data[profile_name] = { + "profile_name": profile_name, + "time_series": profile_array, + } + flag = dss.LoadShape.Next() + + return profile_data + + +def aggregate_series( + bus_data, + profile_data, + critical_conditions, + feederhead_only, + recreate_profiles, + destination_dir, + create_new_circuit, +): + ag_series = {} + critical_time_indices = [] + head_critical_time_indices = [] + used_profiles = [] + for bus, dic in bus_data.items(): + ag_series[bus] = { + "critical_time_idx": [], + "condition": [], + } + + if dic["demand"]: + for data in dic["demand"]: + if "demand" in ag_series[bus]: + ag_series[bus]["demand"] += data[0] * profile_data[data[1]]["time_series"] + else: + ag_series[bus]["demand"] = data[0] * profile_data[data[1]]["time_series"] + used_profiles.append(data[1]) + if CriticalCondition.MAX_DEMAND in critical_conditions: + max_demand_idx = np.where( + ag_series[bus]["demand"] == np.amax(ag_series[bus]["demand"]) + )[0].tolist()[0] + ag_series[bus]["critical_time_idx"].append(max_demand_idx) + ag_series[bus]["condition"].append(CriticalCondition.MAX_DEMAND) + + if CriticalCondition.MIN_DEMAND in critical_conditions: + min_demand_idx = np.where( + ag_series[bus]["demand"] == np.amin(ag_series[bus]["demand"]) + )[0].tolist()[0] + ag_series[bus]["critical_time_idx"].append(min_demand_idx) + ag_series[bus]["condition"].append(CriticalCondition.MIN_DEMAND) + # ag_series[bus]['critical_time_idx'] += [max_demand_idx, min_demand_idx] + + if dic["generation"]: + for data in dic["generation"]: + if "generation" in ag_series[bus]: + ag_series[bus]["generation"] += data[0] * profile_data[data[1]]["time_series"] + else: + ag_series[bus]["generation"] = data[0] * profile_data[data[1]]["time_series"] + used_profiles.append(data[1]) + if CriticalCondition.MAX_GENERATION in critical_conditions: + max_gen_idx = np.where( + ag_series[bus]["generation"] == np.amax(ag_series[bus]["generation"]) + )[0].tolist()[0] + ag_series[bus]["critical_time_idx"].append(max_gen_idx) + ag_series[bus]["condition"].append(CriticalCondition.MAX_GENERATION) + if ( + "demand" in ag_series[bus] + and CriticalCondition.MAX_NET_GENERATION in critical_conditions + ): + arr = ag_series[bus]["generation"] - ag_series[bus]["demand"] + max_netgen_idx = np.where(arr == np.amax(arr))[0].tolist()[0] + ag_series[bus]["critical_time_idx"].append(max_netgen_idx) + ag_series[bus]["condition"].append(CriticalCondition.MAX_NET_GENERATION) + + total_gen = sum([dic["generation"] for bus, dic in ag_series.items() if "generation" in dic]) + total_dem = sum([dic["demand"] for bus, dic in ag_series.items() if "demand" in dic]) + net_total_gen = total_gen - total_dem + if CriticalCondition.MAX_DEMAND in critical_conditions: + max_demand_idx = np.where(total_dem == np.amax(total_dem))[0].tolist()[0] + head_critical_time_indices.append(max_demand_idx) + if CriticalCondition.MIN_DEMAND in critical_conditions: + min_demand_idx = np.where(total_dem == np.amin(total_dem))[0].tolist()[0] + head_critical_time_indices.append(min_demand_idx) + if CriticalCondition.MAX_GENERATION in critical_conditions: + max_gen_idx = np.where(total_gen == np.amax(total_gen))[0].tolist()[0] + head_critical_time_indices.append(max_gen_idx) + if CriticalCondition.MAX_NET_GENERATION in critical_conditions: + max_netgen_idx = np.where(net_total_gen == np.amax(net_total_gen))[0].tolist()[0] + head_critical_time_indices.append(max_netgen_idx) + + if feederhead_only: + critical_time_indices = head_critical_time_indices + else: + critical_time_indices = [ + t + for bus, dic in ag_series.items() + for t in dic["critical_time_idx"] + if "critical_time_idx" in dic + ] + critical_time_indices += head_critical_time_indices + critical_time_indices = sorted(set(critical_time_indices)) + compression_rate = 0 + if recreate_profiles: + destination_profile_dir = destination_dir / "new_profiles" + destination_profile_dir.mkdir() + + for profile, val in profile_data.items(): + if profile in used_profiles: + base_len = len(val["time_series"]) + compression_rate = len(critical_time_indices) / base_len + if recreate_profiles: + data = val["time_series"][critical_time_indices] + new_profile_path = destination_profile_dir / f"{profile}.csv" + pd.DataFrame(data).to_csv(new_profile_path, index=False, header=False) + + if create_new_circuit: + reset_profile_data(used_profiles, critical_time_indices) + before_path = destination_dir / "power_flow_results_before" + after_path = destination_dir / "power_flow_results_after" + destination_model_dir = destination_dir / "reduced_model" + destination_model_dir.mkdir() + save_circuit(destination_model_dir) + master_file = destination_model_dir / "Master.dss" + with open(master_file, "a") as f_out: + f_out.write("Calcvoltagebases\n") + f_out.write("Solve\n") + + return ag_series, head_critical_time_indices, critical_time_indices, compression_rate + + +def get_metadata(ag_series, head_critical_time_indices, critical_conditions, feederhead_only): + """ + This function exports metadata associated with selected timepoints. + INPUT: + ag_series: Dictionary + head_critical_time_indices: List + OUTPUT: + metadata_df: DataFrame + """ + buses_list = [] + critical_timepoint_list = [] + condition_list = [] + + # for feeder head + buses_list.append("feederhead") + critical_timepoint_list.append(head_critical_time_indices) + condition_list.append(critical_conditions) + + if not feederhead_only: + for bus, dic in ag_series.items(): + buses_list.append(bus) + critical_timepoint_list.append(dic["critical_time_idx"]) + condition_list.append(dic["condition"]) + # buses_list.append("feederhead") + # critical_timepoint_list.append(head_critical_time_indices) + # condition_list.append(critical_conditions) + df = pd.DataFrame( + list(zip(buses_list, critical_timepoint_list, condition_list)), + columns=["buses_list", "critical_timepoint_list", "condition_list"], + ) + expanded_df = df.explode(["critical_timepoint_list", "condition_list"]) + expanded_df.loc[expanded_df["condition_list"].notnull(), "condition_list"] = expanded_df.loc[ + expanded_df["condition_list"].notnull() + ]["condition_list"].apply(lambda x: x.value) + metadata_df = expanded_df.groupby("critical_timepoint_list").agg(list) + return metadata_df + + +def main( + path_to_master: Path, + categories, + critical_conditions=tuple(x for x in CriticalCondition), + feederhead_only=False, + recreate_profiles=False, + destination_dir=None, + create_new_circuit=True, + fix_master_file=False, +): + """ + INPUT: + category_path_dict: a dictionary where: + the keys are power conversion asset categories: "demand" and "generation" + the values are a list of paths pointing to the corresponding power conversions assets' .dss files such as "Loads.dss" and "PVSystems.dss" files + example: category_path_dict = {'demand': [LOAD_PATH, EVLOAD_PATH], 'generation': [PV_PATH]} + profile_files: a list of paths pointing to shape (profile) files such as "LoadShapes.dss" + OUTPUT: + bus_data: + profile_data: + ag_series: + head_time_indices: list of critical time indices when only feeder-head timeseries are considered + critical_time_indices: all critical time indices (individual buses as well as feeder-head considered) + compression_rate: ratio between the number of critical timepoints and the total number of timepoints in the timeseries + + """ + critical_conditions = set(critical_conditions) + if not destination_dir.exists(): + raise FileNotFoundError(f"destination_dir={destination_dir} does not exist") + + bus_data = {} + load_feeder(path_to_master, destination_dir, fix_master_file) + for category, param_classes in categories.items(): + for param_class in param_classes: + bus_data = get_param_values(param_class, bus_data, category) + profile_data = get_profile_data() + ag_series, head_critical_time_indices, critical_time_indices, compression_rate = aggregate_series( + bus_data, + profile_data, + critical_conditions, + feederhead_only, + recreate_profiles, + destination_dir, + create_new_circuit, + ) + metadata_df = get_metadata(ag_series, head_critical_time_indices, critical_conditions, feederhead_only) + metadata_df.to_csv(destination_dir / "metadata.csv") + + logger.info("head_time_indices = %s length = %s", head_critical_time_indices, len(head_critical_time_indices)) + logger.info( + "critical_time_indices = %s length = %s", critical_time_indices, len(critical_time_indices) + ) + + return ( + bus_data, + profile_data, + ag_series, + head_critical_time_indices, + critical_time_indices, + compression_rate, + ) + + +if __name__ == "__main__": + # path_to_master = Path( + # r"tests/data/generic-models/p1uhs23_1247/p1udt21301/PVDeployments/p1uhs23_1247__p1udt21301__random__2__15.dss" + # ) + path_to_master = Path( + r"C:\Users\SABRAHAMs\Desktop\NREL\current_projects\BlocPowerIthaca\feeder_model\baseline\OpenDSS\dss_files\Master_run.dss" + ) + destination = Path(r"C:\Users\SABRAHAMs\Desktop\NREL\current_projects\BlocPowerIthaca\feeder_model\baseline\OpenDSS\dss_files\test-output-timepoint") + destination.mkdir(exist_ok=True) + st = time.time() + category_class_dict = { + "demand": [DemandCategory.LOAD], + "generation": [GenerationCategory.PV_SYSTEM], + } + critical_conditions = [CriticalCondition.MAX_DEMAND, CriticalCondition.MAX_NET_GENERATION] + feederhead_only = True + + ( + bus_data, + profile_data, + ag_series, + head_critical_time_indices, + critical_time_indices, + compression_rate, + ) = main( + path_to_master, + category_class_dict, + critical_conditions=critical_conditions, + feederhead_only=feederhead_only, + destination_dir=destination, + recreate_profiles=True, + ) + et = time.time() + elapse_time = et - st diff --git a/docs/source/analysis-workflows/upgrade-cost-analysis.rst b/docs/source/analysis-workflows/upgrade-cost-analysis.rst index 382e4429..046ebd43 100644 --- a/docs/source/analysis-workflows/upgrade-cost-analysis.rst +++ b/docs/source/analysis-workflows/upgrade-cost-analysis.rst @@ -48,7 +48,6 @@ Here are optional parameters that you can customize in the same file: parallel_transformers_limit = 4 parallel_lines_limit = 4 upgrade_iteration_threshold = 5 - timepoint_multipliers = {} [voltage_upgrade_params] capacitor_sweep_voltage_gap = 1.0 @@ -57,7 +56,6 @@ Here are optional parameters that you can customize in the same file: max_regulators = 4 place_new_regulators = true use_ltc_placement = true - timepoint_multipliers = {} capacitor_action_flag = true existing_regulator_sweep_action = true diff --git a/tests/data/upgrade_cost_analysis_generic_mult.json b/tests/data/upgrade_cost_analysis_generic_mult.json index 3348ae96..c26577d0 100644 --- a/tests/data/upgrade_cost_analysis_generic_mult.json +++ b/tests/data/upgrade_cost_analysis_generic_mult.json @@ -8,8 +8,7 @@ "voltage_lower_limit": 0.95, "read_external_catalog": true, "external_catalog": "./disco/extensions/upgrade_simulation/upgrades/disco_technical_catalog.json", - "create_plots": true, - "timepoint_multipliers": {"load_multipliers": {"without_pv": [1, 1.3]}} + "create_plots": true }, "voltage_upgrade_params": { "initial_upper_limit": 1.05, @@ -28,9 +27,9 @@ "place_new_regulators": true, "capacitor_action_flag": true, "existing_regulator_sweep_action": true, - "create_plots": true, - "timepoint_multipliers": {"load_multipliers": { "without_pv": [1, 1.3]}} + "create_plots": true }, + "timepoint_multipliers": {"load_multipliers": {"without_pv": [1, 1.3]}}, "pydss_controllers": "", "enable_pydss_controllers": false, "include_pf1": true, diff --git a/tests/data/upgrade_cost_analysis_generic_pydss.json b/tests/data/upgrade_cost_analysis_generic_pydss.json new file mode 100644 index 00000000..66e34cd1 --- /dev/null +++ b/tests/data/upgrade_cost_analysis_generic_pydss.json @@ -0,0 +1,72 @@ +{ + "thermal_upgrade_params": { + "transformer_upper_limit": 1.5, + "line_upper_limit": 1.5, + "line_design_pu": 0.75, + "transformer_design_pu": 0.75, + "voltage_upper_limit": 1.05, + "voltage_lower_limit": 0.95, + "read_external_catalog": true, + "external_catalog": "./disco/extensions/upgrade_simulation/upgrades/disco_technical_catalog.json", + "create_plots": true + }, + "voltage_upgrade_params": { + "initial_upper_limit": 1.05, + "initial_lower_limit": 0.95, + "final_upper_limit": 1.05, + "final_lower_limit": 0.95, + "nominal_voltage": 120, + "capacitor_sweep_voltage_gap": 1, + "reg_control_bands": [ + 1, + 2 + ], + "reg_v_delta": 0.5, + "max_regulators": 4, + "use_ltc_placement": false, + "place_new_regulators": true, + "capacitor_action_flag": true, + "existing_regulator_sweep_action": true, + "create_plots": true + }, + "pydss_controllers": { + "pv_controller": + { + "Control1":"VVar", + "Control2":"None", + "Control3":"None", + "pf":1, + "pfMin":0.8, + "pfMax":1, + "Pmin":0, + "Pmax":1, + "uMin":0.9399999999999999, + "uDbMin":0.97, + "uDbMax":1.03, + "uMax":1.06, + "QlimPU":0.44, + "PFlim":0.9, + "enable_pf_limit":false, + "uMinC":1.06, + "uMaxC":1.1, + "PminVW":10, + "VWtype":"Rated Power", + "percent_p_cutin":10, + "percent_p_cutout":10, + "Efficiency":100, + "Priority":"Var", + "DampCoef":0.8 + }}, + "enable_pydss_controllers": true, + "include_pf1": true, + "upgrade_cost_database": "./disco/extensions/upgrade_simulation/upgrades/Generic_DISCO_cost_database_v2.xlsx", + "jobs": [ + { + "model_type": "UpgradeCostAnalysisGenericModel", + "name": "sb10_p7uhs7_1247_trans_301__p7udt173__random__7__85_pydss", + "opendss_model_file": "./tests/data/upgrade-models/sb10_p7uhs7_1247_trans_301/p7udt173/PVDeployments/sb10_p7uhs7_1247_trans_301__p7udt173__random__7__85.dss", + "blocked_by": [], + "estimated_run_minutes": null + } + ] +} \ No newline at end of file diff --git a/tests/integration/test_time_point_selection.py b/tests/integration/test_time_point_selection.py new file mode 100644 index 00000000..8b3fc78f --- /dev/null +++ b/tests/integration/test_time_point_selection.py @@ -0,0 +1,156 @@ +import fileinput +import math +import shutil +import subprocess +from pathlib import Path + +import opendssdirect as dss +import pytest + +from disco.preprocess.select_timepoints2 import ( + CriticalCondition, + DemandCategory, + GenerationCategory, + InvalidParameter, + main, + get_profile, +) + + +MASTER_FILE = ( + Path("tests") + / "data" + / "generic-models" + / "p1uhs23_1247" + / "p1udt21301" + / "PVDeployments" + / "p1uhs23_1247__p1udt21301__random__2__15.dss" +) + + +def test_time_point_selection(tmp_path): + categories = { + "demand": [DemandCategory.LOAD], + "generation": [GenerationCategory.PV_SYSTEM], + } + critical_conditions = [CriticalCondition.MAX_DEMAND, CriticalCondition.MAX_NET_GENERATION] + ( + bus_data, + profile_data, + ag_series, + head_time_indices, + critical_time_indices, + compression_rate, + ) = main( + MASTER_FILE, + categories=categories, + critical_conditions=critical_conditions, + destination_dir=tmp_path, + ) + dss.Text.Command(f"Clear") + dss.Text.Command(f"Redirect {MASTER_FILE}") + connected_profiles = get_connected_load_shape_profiles() + before = get_pmult_data(connected_profiles) + + new_master = tmp_path / "new_model" / "Master.dss" + assert new_master.exists() + dss.Text.Command(f"Clear") + dss.Text.Command(f"Redirect {new_master}") + after = get_pmult_data(connected_profiles) + assert sorted(before.keys()) == sorted(after.keys()) + + count = 0 + for name in before: + for i, main_index in enumerate(critical_time_indices): + num_decimals = 2 + val1 = round(before[name][main_index], num_decimals) + val2 = round(after[name][i], num_decimals) + assert math.isclose( + val1, val2 + ), f"Mismatch for LoadShape {name} at time_point={main_index} before={val1} after={val2}" + count += 1 + + +def test_invalid_master_file(tmp_path): + bad_file = MASTER_FILE.parent / (MASTER_FILE.name + ".tmp") + shutil.copyfile(MASTER_FILE, bad_file) + with fileinput.input(files=[bad_file], inplace=True) as f: + for line in f: + if "Solve" in line: + print("Solve mode=yearly") + else: + print(line, end="") + + categories = { + "demand": [DemandCategory.LOAD], + "generation": [GenerationCategory.PV_SYSTEM], + } + critical_conditions = [CriticalCondition.MAX_DEMAND, CriticalCondition.MAX_NET_GENERATION] + try: + with pytest.raises(InvalidParameter): + main( + bad_file, + categories=categories, + critical_conditions=critical_conditions, + destination_dir=tmp_path, + fix_master_file=False, + ) + main( + bad_file, + categories=categories, + critical_conditions=critical_conditions, + destination_dir=tmp_path, + fix_master_file=True, + ) + finally: + if bad_file.exists(): + bad_file.unlink() + + +def get_connected_load_shape_profiles(): + load_shapes = set() + for cls in (dss.Loads, dss.PVsystems, dss.Storages): + flag = cls.First() + while flag > 0: + profile = get_profile() + if profile: + load_shapes.add(profile) + flag = cls.Next() + + return load_shapes + + +def get_pmult_data(connected_profiles): + flag = dss.LoadShape.First() + load_shapes = {} + while flag > 0: + name = dss.LoadShape.Name() + if name in connected_profiles: + assert name not in load_shapes, name + load_shapes[name] = dss.LoadShape.PMult() + flag = dss.LoadShape.Next() + + return load_shapes + + +def test_time_point_selection_cli(tmp_path): + cmd = [ + "disco", + "select-time-points", + str(MASTER_FILE), + "-d", + "load", + "-g", + "pv_system", + "-c", + CriticalCondition.MAX_DEMAND.value, + "-c", + CriticalCondition.MIN_DEMAND.value, + "-c", + CriticalCondition.MAX_DEMAND.value, + "-c", + CriticalCondition.MAX_NET_GENERATION.value, + "-o", + str(tmp_path / "output"), + ] + subprocess.run(cmd, check=True)