diff --git a/disco/cli/disco.py b/disco/cli/disco.py index 58a3b670..7b64112c 100644 --- a/disco/cli/disco.py +++ b/disco/cli/disco.py @@ -19,6 +19,8 @@ from disco.cli.create_pipeline import create_pipeline from disco.cli.ingest_tables import ingest_tables from disco.cli.make_summary_tables import make_summary_tables +from disco.cli.compute_hosting_capacity import compute_hosting_capacity +from disco.cli.plots import plot from disco.cli.select_time_points import select_time_points from disco.cli.summarize_hosting_capacity import summarize_hosting_capacity from disco.cli.config_generic_models import config_generic_models @@ -57,3 +59,5 @@ def cli(): cli.add_command(summarize_hosting_capacity) cli.add_command(upgrade_cost_analysis) cli.add_command(hosting_capacity_by_timestep) +cli.add_command(compute_hosting_capacity) +cli.add_command(plot) diff --git a/disco/cli/disco_internal.py b/disco/cli/disco_internal.py index 080b9410..37d443a9 100644 --- a/disco/cli/disco_internal.py +++ b/disco/cli/disco_internal.py @@ -10,10 +10,8 @@ from disco.cli.compute_time_series_impact_analysis import compute_time_series_impact_analysis from disco.cli.make_cba_tables import make_cba_tables from disco.cli.make_upgrade_tables import make_upgrade_tables -from disco.cli.compute_hosting_capacity import compute_hosting_capacity from disco.cli.cba_post_process import cba_post_process from disco.cli.compute_cba import compute_cba -from disco.cli.plots import plot logger = logging.getLogger(__name__) @@ -30,7 +28,5 @@ def cli(): cli.add_command(compute_time_series_impact_analysis) cli.add_command(make_cba_tables) cli.add_command(make_upgrade_tables) -cli.add_command(compute_hosting_capacity) cli.add_command(cba_post_process) cli.add_command(compute_cba) -cli.add_command(plot) diff --git a/disco/cli/pv_deployments.py b/disco/cli/pv_deployments.py index 77d2a059..ff40da45 100644 --- a/disco/cli/pv_deployments.py +++ b/disco/cli/pv_deployments.py @@ -37,7 +37,7 @@ def create_pv_deployments(input_path: str, hierarchy: str, config: dict): print(json.dumps(summary, indent=2)) -def create_pv_configs(input_path: str, hierarchy: str, config: dict): +def create_pv_configs(input_path: str, hierarchy: str, config: dict, control_name: str, limit: int): """A method for generating pv config JSON files """ hierarchy = DeploymentHierarchy(hierarchy) config = SimpleNamespace(**config) @@ -46,7 +46,7 @@ def create_pv_configs(input_path: str, hierarchy: str, config: dict): print(f"'-p' or '--placement' option is ignored for this action.") manager = PVConfigManager(input_path, hierarchy, config) - config_files = manager.generate_pv_configs() + config_files = manager.generate_pv_configs(control_name=control_name, limit=limit) print(f"PV configs created! Total: {len(config_files)}") @@ -112,8 +112,12 @@ def redirect_pv_shapes(input_path: str, hierarchy: str, config: dict): hierarchy = DeploymentHierarchy(hierarchy) config = SimpleNamespace(**config) manager = PVDataManager(input_path, hierarchy, config) - manager.redirect_substation_pv_shapes() - manager.redirect_feeder_pv_shapes() + if hierarchy == DeploymentHierarchy.SUBSTATION: + manager.redirect_substation_pv_shapes() + elif hierarchy == DeploymentHierarchy.FEEDER: + manager.redirect_feeder_pv_shapes() + else: + raise NotImplementedError(f"{hierarchy=}") def generate_pv_deployment_jobs(input_path: str, hierarchy: str, config: dict): @@ -170,12 +174,28 @@ def pv_deployments(): required=True, help="Choose the action related to pv deployments" ) +@click.option( + "-c", "--control-name", + type=click.STRING, + default="volt_var_ieee_1547_2018_catB", + show_default=True, + help="Choose the control name to assign to pv configs in the action create-configs." +) @click.option( "-h", "--hierarchy", type=click.Choice(HIERARCHY_CHOICE, case_sensitive=False), required=True, help="Choose the deployment hierarchy." ) +@click.option( + "-l", "--kw-limit", + type=click.FLOAT, + default=5, + show_default=True, + help="Capacity threshold to use for assigning the value of --control-name. The action " + "create-configs will only assign a control if a PV's capacity is greater than this value in " + "kW.", +) @click.option( "-p", "--placement", type=click.Choice(PLACEMENT_CHOICE, case_sensitive=False), @@ -280,7 +300,9 @@ def pv_deployments(): def source_tree_1( input_path, action, + control_name, hierarchy, + kw_limit, placement, category, master_filename, @@ -322,7 +344,11 @@ def source_tree_1( "random_seed": random_seed } action_function = ACTION_MAPPING[action] - action_function(input_path, hierarchy, config) + args = [input_path, hierarchy, config] + if action == "create-configs": + args.append(control_name) + args.append(kw_limit) + action_function(*args) pv_deployments.add_command(source_tree_1) diff --git a/disco/pipelines/creator.py b/disco/pipelines/creator.py index 5c036385..b285e107 100644 --- a/disco/pipelines/creator.py +++ b/disco/pipelines/creator.py @@ -80,8 +80,8 @@ def make_postprocess_command(self): with_loadshape = config_params["with_loadshape"] auto_select_time_points = config_params["auto_select_time_points"] pf1 = config_params["pf1"] - base_cmd = f"disco-internal compute-hosting-capacity {inputs}" - plot_cmd = f"disco-internal plot {inputs}" + base_cmd = f"disco compute-hosting-capacity {inputs}" + plot_cmd = f"disco plot {inputs}" scenarios = [CONTROL_MODE_SCENARIO] if pf1: scenarios.append(PF1_SCENARIO) @@ -192,10 +192,10 @@ def make_postprocess_command(self): commands.append(f"disco make-summary-tables {inputs}") if hosting_capacity: for scenario in TIME_SERIES_SCENARIOS: - commands.append(f"disco-internal compute-hosting-capacity {inputs} --scenario={scenario}") + commands.append(f"disco compute-hosting-capacity {inputs} --scenario={scenario}") for scenario in TIME_SERIES_SCENARIOS: - commands.append(f"disco-internal plot {inputs} --scenario={scenario}") + commands.append(f"disco plot {inputs} --scenario={scenario}") # Postprocess to ingest results into sqlite database task_name = self.template.data["task_name"] diff --git a/disco/sources/source_tree_1/pv_deployments.py b/disco/sources/source_tree_1/pv_deployments.py index d5163cd7..8550f5f6 100644 --- a/disco/sources/source_tree_1/pv_deployments.py +++ b/disco/sources/source_tree_1/pv_deployments.py @@ -117,10 +117,11 @@ def disable_loadshapes_redirect(self) -> None: def load_feeder(self) -> None: """OpenDSS redirect master DSS file""" dss.Text.Command("Clear") - logger.info("OpenDSS loads feeder - %s", self.master_file) - r = dss.Text.Command(f"Redirect '{self.master_file}'") - if r is not None: - logger.exception("OpenDSSError: %s. Feeder: %s", str(r), self.master_file) + logger.info("OpenDSS load feeder - %s", self.master_file) + try: + dss.Text.Command(f"Redirect '{self.master_file}'") + except Exception: + logger.exception(f"OpenDSSError: master_file={self.master_file}") raise def search_head_line(self) -> None: @@ -330,8 +331,7 @@ def get_master_file(self, input_path: str) -> Optional[str]: """Return the full path of master file""" master_file = os.path.join(input_path, self.config.master_filename) if not os.path.exists(master_file): - logger.exception("'%s' not found in '%s'. System exits!", self.config.master_filename, self.feeder_path) - raise + raise FileNotFoundError(f"{self.config.master_filename} not found in {self.feeder_path}") return master_file def load_pvdss_instance(self) -> PVDSSInstance: @@ -351,7 +351,7 @@ def load_pvdss_instance(self) -> PVDSSInstance: flag = pvdss_instance.ensure_energy_meter() if flag: pvdss_instance.load_feeder() # Need to reload after master file updated. - except Exception as error: + except Exception: logger.exception("Failed to load master file - %s", master_file) raise return pvdss_instance @@ -365,12 +365,10 @@ def deploy_all_pv_scenarios(self) -> dict: total_loads = pvdss_instance.get_total_loads() feeder_stats = pvdss_instance.get_feeder_stats(total_loads) if total_loads.total_load <= 0: - feeder_stats_string = json.dumps(feeder_stats.__dict__) - logger.exception( - "Failed to generate PV scenarios on feeder - %s, stats: %s", - feeder_name, feeder_stats_string + stats_str = json.dumps(feeder_stats.__dict__) + raise ValueError( + f"Failed to generate PV scenarios on feeder - {feeder_name}, stats: {stats_str}" ) - raise # combined bus distance customer_distance = pvdss_instance.get_customer_distance() @@ -393,12 +391,11 @@ def deploy_all_pv_scenarios(self) -> dict: feeder_stats = pvdss_instance.get_feeder_stats(total_loads, existing_pvs) if feeder_stats.pcent_base_pv > self.config.max_penetration: feeder_stats_string = json.dumps(feeder_stats.__dict__) - logger.exception( - "Failed to generate PV scenarios on feeder - %s. \ - The existing PV amount exceeds the maximum penetration level of %s\%. Stats: %s", - feeder_name, self.cofig.max_penetration, feeder_stats_string + raise ValueError( + f"Failed to generate PV scenarios on feeder - {feeder_name}. " + f"The existing PV amount exceeds the maximum penetration level of " + f"{self.config.max_penetration}%. Stats: {feeder_stats_string}", ) - raise snum = self.config.sample_number + 1 start = self.config.min_penetration @@ -596,12 +593,11 @@ def get_all_remaining_pv_to_install(self, data: SimpleNamespace) -> dict: all_remaining_pv_to_install = total_pv - data.total_existing_pv if all_remaining_pv_to_install <= 0: minimum_penetration = (data.total_existing_pv * 100) / max(0.0001, data.total_load) - logger.exception( - "Failed to generate PV scenarios on feeder - %s. \ - The system has more than the target PV penetration. \ - Please increase penetration to at least %s.", self.feeder_path, minimum_penetration + raise ValueError( + f"Failed to generate PV scenarios on feeder - {self.feeder_path}. " + "The system has more than the target PV penetration. " + f"Please increase penetration to at least {minimum_penetration}." ) - raise return all_remaining_pv_to_install def get_priority_buses(self, data: SimpleNamespace) -> list: @@ -650,7 +646,10 @@ def add_pv_string(self, bus: str, pv_type: str, pv_size: float, pv_string: str) pv_name = self.generate_pv_name(bus, pv_type) dss.Circuit.SetActiveBus(bus) node_list = bus.split(".") - ph = len(node_list) - 1 + if len(node_list) == 1: + ph = 3 + else: + ph = len(node_list) - 1 if ph == 3: conn = "delta" kv = round(dss.Bus.kVBase()*(3)**0.5, 4) @@ -731,7 +730,7 @@ def get_pv_shapes_file(self, input_path: str) -> str: pv_shapes_file = os.path.join(input_path, PV_SHAPES_FILENAME) return pv_shapes_file - def create_all_pv_configs(self) -> None: + def create_all_pv_configs(self, control_name, limit) -> None: """Create PV configs JSON file""" root_path = self.get_pv_deployments_path() if not os.path.exists(root_path): @@ -760,7 +759,7 @@ def create_all_pv_configs(self) -> None: continue pv_systems_file = os.path.join(pen_dir, PV_SYSTEMS_FILENAME) if os.path.exists(pv_systems_file): - pv_conf, pv_prof = self.assign_profile(pv_systems_file, pv_shapes_file, pv_systems) + pv_conf, pv_prof = self.assign_profile(pv_systems_file, pv_shapes_file, pv_systems, control_name, limit) pv_configs += pv_conf pv_profiles.update(pv_prof) self.attach_profile(pv_systems_file, pv_profiles) @@ -782,16 +781,17 @@ def get_customer_types(self): bus_key = "bus1=" shape_key = "yearly=" - load_key = "Load." + load_key = "load." bus_customer_types, load_customer_types = {}, {} with open(loads_file, "r") as f: for line in f.readlines(): - if bus_key not in line.lower(): + lowered = line.lower().strip() + if bus_key not in lowered: continue - bus = line.split(bus_key)[1].split(" ")[0].split(".")[0] - shape_name = line.split(shape_key)[1].split(" ")[0] - load = line.split(load_key)[1].split(" ")[0] + bus = lowered.split(bus_key)[1].split(" ")[0].split(".")[0] + shape_name = lowered.split(shape_key)[1].split(" ")[0] + load = lowered.split(load_key)[1].split(" ")[0] if "com_" in shape_name: customer_type = "commercial" elif "res_" in shape_name: @@ -813,7 +813,8 @@ def assign_profile( pv_systems_file: str, pv_shapes_file: str, pv_systems: set, - limit: int = 5 + control_name: str, + limit: int, ) -> dict: """Assign PV profile to PV systems.""" pv_dict = self.get_pvsys(pv_systems_file) @@ -823,9 +824,7 @@ def assign_profile( pv_value = value["pmpp"] if pv_name in pv_systems: continue - if float(pv_value) > limit: - control_name = "volt_var_ieee_1547_2018_catB" - else: + if float(pv_value) <= limit: control_name = "pf1" pv_profile = random.choice(shape_list) @@ -1242,13 +1241,11 @@ def __init__(self, input_path: str, hierarchy: DeploymentHierarchy, config: Simp def redirect(self, input_path: str) -> bool: """Given a path, update the master file by redirecting PVShapes.dss""" - pv_shapes_file = os.path.join(input_path, PV_SHAPES_FILENAME) self._copy_pv_shapes_file(input_path) master_file = os.path.join(input_path, self.config.master_filename) if not os.path.exists(master_file): - logger.exception("'%s' not found in '%s'. System exits!", self.config.master_filename, input_path) - raise + raise FileNotFoundError(f"{self.config.master_filename} not found in {input_path}") index = 0 with open(master_file, "r") as fr: @@ -1299,7 +1296,8 @@ def redirect_substation_pv_shapes(self) -> None: substation_paths = self.get_substation_paths() logger.info("Running PVShapes redirect in %s substation directories...", len(substation_paths)) with ProcessPoolExecutor() as executor: - executor.map(self.redirect, substation_paths) + for _ in executor.map(self.redirect, substation_paths): + pass logger.info("Substation PVShapes redirect done!") def redirect_feeder_pv_shapes(self) -> None: @@ -1307,7 +1305,8 @@ def redirect_feeder_pv_shapes(self) -> None: feeder_paths = self.get_feeder_paths() logger.info("Running PVShapes redirect in %s feeder directories...", len(feeder_paths)) with ProcessPoolExecutor() as executor: - executor.map(self.redirect, feeder_paths) + for _ in executor.map(self.redirect, feeder_paths): + pass logger.info("Feeder PVShapes redirect done!") def rename(self, feeder_path: str) -> None: @@ -1345,7 +1344,8 @@ def rename_feeder_loads(self, feeder_paths: list) -> None: deployed.append(feeder_path) with ProcessPoolExecutor() as executor: - executor.map(self.rename, deployed) + for _ in executor.map(self.rename, deployed): + pass logger.info("Feeder Loads rename done, total %s", len(deployed)) def revert(self, feeder_path: str): @@ -1370,7 +1370,8 @@ def revert(self, feeder_path: str): def revert_master_files(self, feeder_paths: list) -> None: """Revert master files with LoadShapes.dss redirect enabled""" with ProcessPoolExecutor() as executor: - executor.map(self.revert, feeder_paths) + for _ in executor.map(self.revert, feeder_paths): + pass logger.info("Feeder Redirect LoadShapes.dss enabled in master files, total %s", len(feeder_paths)) def restore_feeder_data(self) -> None: @@ -1427,6 +1428,7 @@ def backup_loads_file(self, loads_file: str) -> bool: shutil.copyfile(loads_file, original_loads_file) try: + # TODO DT: remove these os.chmod(original_loads_file, 0o666) except Exception: pass @@ -1479,7 +1481,11 @@ def build_load_dictionary(self, load_lines: list) -> dict: kw = self.get_attribute(line, "kw=") kvar = self.get_attribute(line, "kvar=") kva = self.get_attribute(line, "kva=") - phases = self.get_attribute(line, "phases=") + if "phases=" in line: + phases = self.get_attribute(line, "phases=") + else: + # Kwami says that this is a safe default if the line does not define the value. + phases = 3 init_name = self.get_attribute(line, "new load.") if "." in bus_node: @@ -1514,10 +1520,9 @@ def build_load_dictionary(self, load_lines: list) -> dict: load_dict[bus, name]["kvar"] = kvar if "kva=" in lowered_line: load_dict[bus, name]["kva"] = kva - if "phases=" in lowered_line: - load_dict[bus, name]["phases"] = phases load_dict[bus,name]["line_idx"] = idx load_dict[bus,name]["kv"] = kv + load_dict[bus, name]["phases"] = phases rekeyed_load_dict = {v["line_idx"]: v for k, v in load_dict.items()} return rekeyed_load_dict @@ -1560,7 +1565,8 @@ def transform_feeder_loads(self) -> None: feeder_paths = self.get_feeder_paths() logger.info("Transforming loads files in %s feeders...", len(feeder_paths)) with ProcessPoolExecutor() as executor: - executor.map(self.transform, feeder_paths) + for _ in executor.map(self.transform, feeder_paths): + pass logger.info("Feeder Loads transformed, total %s feeders.", len(feeder_paths)) @@ -1732,13 +1738,13 @@ def __init__(self,input_path: str, hierarchy: DeploymentHierarchy, config: Simpl """ super().__init__(input_path, hierarchy, config) - def generate_pv_configs(self) -> list: + def generate_pv_configs(self, control_name="volt_var_ieee_1547_2018_catB", limit=5) -> list: """Generate pv config JSON files based on PV deployments""" config_files = [] feeder_paths = self.get_feeder_paths() for feeder_path in feeder_paths: generator = get_pv_scenario_generator(feeder_path, self.config) - result = generator.create_all_pv_configs() + result = generator.create_all_pv_configs(control_name, limit) config_files.extend(result) generator.create_pv_systems_sum_group_file(pv_config_files=result) diff --git a/docs/source/pv-deployments.rst b/docs/source/pv-deployments.rst index e803685a..32ab4c80 100644 --- a/docs/source/pv-deployments.rst +++ b/docs/source/pv-deployments.rst @@ -34,17 +34,24 @@ There are several actions here related to PV deployments manipulation, including Redirect PVShapes ----------------- -Before performing PV deployments, we need to ensure the ``PVShapes.dss`` is redirected in the master -file located in substation and feeder directories. Two steps are required: +This workflow will generate OpenDSS files with varying counts and sizes of PVSystems. It will +assign load shape profiles to those PVSystems from a pool of profiles. You must define these +profiles in a ``PVShapes.dss`` file and copy that files to all substation and/or feeder +directories. -First, you need to generate the PV profiles into a ``PVShapes.dss`` file on your own, and then -copy the ``PVShapes.dss`` into each substation and feeder directories. +All ``Master.dss`` need to redirect to ``PVShapes.dss``. We recommend that you add these lines to +your files. If you do that, you can skip to the next section. -Second, run the command below. +If your directory structure aligns with the ``source-tree-1`` expectations, the disco CLI command +below will add the redirects automatically. + +.. todo:: Make this code handle all cases generically. + +Run this command: .. code-block:: bash - $ disco pv-deployments source-tree-1 -a direct-pvshapes -h INPUT_PATH + $ disco pv-deployments source-tree-1 -a redirect-pvshapes -h INPUT_PATH Transform Loads diff --git a/setup.py b/setup.py index 466d047d..c478900f 100644 --- a/setup.py +++ b/setup.py @@ -63,11 +63,11 @@ def install_jade_extensions(): "NREL-jade~=0.9.3", "chevron~=0.14.0", "click~=8.0", - "dsspy~=2.2", + "dsspy~=2.3.0", "filelock~=3.8", "matplotlib~=3.6", "networkx~=2.8", - "opendssdirect.py~=0.7.0", + "opendssdirect.py~=0.8.4", "openpyxl~=3.0", "pandas~=1.5.0", "pydantic~=1.6",