diff --git a/CHANGELOG.md b/CHANGELOG.md
index 035def73..e3a908fe 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,20 @@ All notable changes to the INCORE documents generated by Sphinx package will be
The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/).
+
+## [4.6.0] - 2023-10-11
+
+### Added
+- Added commercial buildings markdown and notebook [#271](https://github.com/IN-CORE/incore-docs/issues/271)
+- Local hazard tutorial [#272](https://github.com/IN-CORE/incore-docs/issues/272)
+
+### Fixed
+- Broken notebook using restoration util class [#260](https://github.com/IN-CORE/incore-docs/issues/260)
+
+### Changed
+- Add additional input dataset and citation to INDP [#274](https://github.com/IN-CORE/incore-docs/issues/274)
+
+
## [4.5.0] - 2023-08-16
### Added
diff --git a/manual_jb/content/_config.yml b/manual_jb/content/_config.yml
index 43bc383d..19f76afb 100644
--- a/manual_jb/content/_config.yml
+++ b/manual_jb/content/_config.yml
@@ -1,9 +1,9 @@
-title: "IN-CORE Manual
4.5.0"
+title: "IN-CORE Manual
4.6.0"
project: "IN-CORE Manual"
author: IN-CORE Community
copyright: "2023" # Copyright year to be placed in the footer
logo: images/resilience-logo.png
-version: "4.5.0" # Version is not part of JB config, we use it for autobuild of incore-docs
+version: "4.6.0" # Version is not part of JB config, we use it for autobuild of incore-docs
exclude_patterns: [_build, .DS_Store, "**.ipynb_checkpoints"]
repository:
diff --git a/manual_jb/content/_toc.yml b/manual_jb/content/_toc.yml
index 7349c981..1b234108 100644
--- a/manual_jb/content/_toc.yml
+++ b/manual_jb/content/_toc.yml
@@ -61,6 +61,7 @@ chapters:
- file: notebooks/create_tornado/create_tornado.ipynb
- file: notebooks/retrofit_plan_Joplin_2021_12_01/retrofit_plan_Joplin_2021_12_01.ipynb
- file: notebooks/create_network_dataset/create_network_dataset.ipynb
+ - file: notebooks/create_local_hazard/create_local_hazard.ipynb
- file: faq
- file: workshops
sections:
diff --git a/manual_jb/content/analyses.md b/manual_jb/content/analyses.md
index f1da1bc9..56c5f4f6 100644
--- a/manual_jb/content/analyses.md
+++ b/manual_jb/content/analyses.md
@@ -7,38 +7,39 @@
5. [Capital shocks](analyses/capital_shocks)
6. [Combined wind wave surge building damage](analyses/combined_wind_wave_surge_building_dmg)
7. [Combined wind wave surge building loss](analyses/combined_wind_wave_surge_building_loss)
-8. [Cumulative building damage](analyses/cumulative_building_dmg)
-9. [Electric power facility damage](analyses/epf_dmg)
-10. [Electric power facility repair cost](analyses/epf_repair_cost)
-11. [Electric power facility restoration](analyses/epf_restoration)
-12. [Electric power network functionality](analyses/epn_functionality)
-13. [Household-level housing sequential recovery](analyses/housing_household_recovery)
-14. [Galveston Computable General Equilibrium (CGE)](analyses/galveston_cge.md)
-15. [Housing recovery](analyses/housing_recovery)
-16. [Housing unit allocation](analyses/housingunitallocation)
-17. [Interdependent Network Design Problem](analyses/indp)
-18. [Joplin Computable General Equilibrium (CGE)](analyses/joplin_cge)
-19. [Joplin empirical restoration](analyses/joplin_empirical_restoration)
-20. [Mean damage](analyses/mean_dmg)
-21. [Multi-objective retrofit optimization](analyses/multi_retrofit_optimization)
-22. [Monte Carlo failure probability](analyses/mc_failure_prob)
-23. [Network cascading interdependency functionality](analyses/nci_functionality)
-24. [Nonstructural building damage](analyses/non_structural_building_dmg)
-25. [Pipeline damage](analyses/pipeline_dmg)
-26. [Pipeline damage with repair rate](analyses/pipeline_dmg_w_repair_rate)
-27. [Pipeline functionality](analyses/pipeline_functionality)
-28. [Pipeline repair cost](analyses/pipeline_repair_cost)
-29. [Pipeline restoration](analyses/pipeline_restoration)
-30. [Population dislocation](analyses/populationdislocation)
-31. [Portfolio recovery](analyses/portfolio_recovery)
-32. [Residential building recovery](analyses/residential_building_recovery)
-33. [Road damage](analyses/road_dmg)
-34. [Salt Lake City Computable General Equilibrium (CGE)](analyses/slc_cge.md)
-35. [Seaside Computable General Equilibrium (CGE)](analyses/seaside_cge)
-36. [Social Vulnerability](analyses/social_vulnerability)
-37. [Tornado electric power network (EPN) damage](analyses/tornadoepn_dmg)
-38. [Transportation recovery](analyses/transportation_recovery)
-39. [Water facility damage](analyses/waterfacility_dmg)
-40. [Water network functionality](analyses/wfn_functionality)
-41. [Water facility repair cost](analyses/water_facility_repair_cost)
-42. [Water facility restoration](analyses/water_facility_restoration)
+8. [Commercial building recovery](analyses/commercial_building_recovery)
+9. [Cumulative building damage](analyses/cumulative_building_dmg)
+10. [Electric power facility damage](analyses/epf_dmg)
+11. [Electric power facility repair cost](analyses/epf_repair_cost)
+12. [Electric power facility restoration](analyses/epf_restoration)
+13. [Electric power network functionality](analyses/epn_functionality)
+14. [Household-level housing sequential recovery](analyses/housing_household_recovery)
+15. [Galveston Computable General Equilibrium (CGE)](analyses/galveston_cge.md)
+16. [Housing recovery](analyses/housing_recovery)
+17. [Housing unit allocation](analyses/housingunitallocation)
+18. [Interdependent Network Design Problem](analyses/indp)
+19. [Joplin Computable General Equilibrium (CGE)](analyses/joplin_cge)
+20. [Joplin empirical restoration](analyses/joplin_empirical_restoration)
+21. [Mean damage](analyses/mean_dmg)
+22. [Multi-objective retrofit optimization](analyses/multi_retrofit_optimization)
+23. [Monte Carlo failure probability](analyses/mc_failure_prob)
+24. [Network cascading interdependency functionality](analyses/nci_functionality)
+25. [Nonstructural building damage](analyses/non_structural_building_dmg)
+26. [Pipeline damage](analyses/pipeline_dmg)
+27. [Pipeline damage with repair rate](analyses/pipeline_dmg_w_repair_rate)
+28. [Pipeline functionality](analyses/pipeline_functionality)
+29. [Pipeline repair cost](analyses/pipeline_repair_cost)
+30. [Pipeline restoration](analyses/pipeline_restoration)
+31. [Population dislocation](analyses/populationdislocation)
+32. [Portfolio recovery](analyses/portfolio_recovery)
+33. [Residential building recovery](analyses/residential_building_recovery)
+34. [Road damage](analyses/road_dmg)
+35. [Salt Lake City Computable General Equilibrium (CGE)](analyses/slc_cge.md)
+36. [Seaside Computable General Equilibrium (CGE)](analyses/seaside_cge)
+37. [Social Vulnerability](analyses/social_vulnerability)
+38. [Tornado electric power network (EPN) damage](analyses/tornadoepn_dmg)
+39. [Transportation recovery](analyses/transportation_recovery)
+40. [Water facility damage](analyses/waterfacility_dmg)
+41. [Water network functionality](analyses/wfn_functionality)
+42. [Water facility repair cost](analyses/water_facility_repair_cost)
+43. [Water facility restoration](analyses/water_facility_restoration)
diff --git a/manual_jb/content/analyses/commercial_building_recovery.md b/manual_jb/content/analyses/commercial_building_recovery.md
new file mode 100644
index 00000000..dd66b359
--- /dev/null
+++ b/manual_jb/content/analyses/commercial_building_recovery.md
@@ -0,0 +1,88 @@
+# Commercial building recovery
+
+**Description**
+
+This analysis computes the recovery time needed for each commercial building from any damage states to receive the
+full restoration. Currently, supported hazards are tornadoes.
+
+The methodology incorporates the multi-layer Monte Carlo simulation approach and determines the two-step recovery
+time that includes delay and repair. The delay model was modified based on the REDi framework and calculated the
+end-result outcomes resulted from delay impeding factors such as post-disaster inspection, insurance claim,
+financing and government permit. The repair model followed the FEMA P-58 approach and was controlled by fragility
+functions.
+
+The outputs of this analysis is a CSV file with time-stepping recovery probabilities at the building level.
+
+**Contributors**
+
+- Science: Wanting Lisa Wang, John W. van de Lindt
+- Implementation: Wanting Lisa Wang and NCSA IN-CORE Dev Team
+
+**Related publications**
+
+- Wang, W.L., Watson, M., van de Lindt, J.W. and Xiao, Y., 2023. Commercial Building Recovery Methodology for Use
+ in Community Resilience Modeling. Natural Hazards Review, 24(4), p.04023031.
+
+**Input parameters**
+
+key name | type | name | description
+--- | --- | --- | ---
+`result_name` * | `str` | Result name | Name of the result dataset.
+`num_samples` * | `int` | Samples number | Number of sample scenarios.
+`seed` | `int` | Seed | Initial seed for the probabilistic model.
+`repair_key` | `str` | Repair key | A repair key to use in mapping dataset.
+
+**Input datasets**
+
+key name | type | name | description
+--- | --- | --- | ---
+`buildings` * | `ergo:buildingInventoryVer4`
`ergo:buildingInventoryVer5`
`ergo:buildingInventoryVer6`
`ergo:buildingInventoryVer7` | Building dataset | A building dataset.
+`dfr3_mapping_set` * | `incore:dfr3MappingSet` | DFR3 Mapping Set | DFR3 Mapping Set.
+`sample_damage_states` * | `incore:sampleDamageState` | Damage states | Sample damage states.
+`mcs_failure` * | `incore:failureProbability` | MCS failure | mcs_failure.
+`delay_factors` * | `incore:buildingRecoveryFactors` | Delay factors | Delay impeding factors such as post-disaster inspection, insurance claim,
and government permit based on building's damage state. Provided by REDi framework.
+
+**Output datasets**
+
+key name | type | parent key | name | description
+--- | --- |-------------------------| --- | ---
+`time_stepping_recovery` * | `incore:buildingRecovery` | `buildings` | Results | A dataset containing results (format: CSV)
with percentages of commercial building recovery. | A dataset containing results (format: CSV)
with percentages of commercial building recovery.
+`recovery` * | `incore:buildingRecoveryTime` | `buildings` | Building Recovery Time | A dataset containing results (format: CSV)
with commercial building recovery time.
+`total_delay` * | `incore:buildingRecoveryDelay` | `buildings` | Building Recovery Delay | A dataset containing results (format: CSV)
with commercial building delay time.
+
+(* required)
+
+**Execution**
+
+code snippet:
+
+```
+ # Create Commercial building recovery instance
+ comm_recovery = CommercialBuildingRecovery(client)
+
+ # Load input building infrastructure dataset
+ comm_recovery.load_remote_input_dataset("buildings", buildings)
+
+ # Load repair mapping
+ repair_service = RepairService(client)
+ mapping_set = MappingSet(repair_service.get_mapping(mapping_id))
+ comm_recovery.set_input_dataset('dfr3_mapping_set', mapping_set)
+
+ # Load input datasets
+ com_recovery.load_remote_input_dataset("sample_damage_states", sample_damage_states)
+ com_recovery.load_remote_input_dataset("mcs_failure", mcs_failure)
+ com_recovery.load_remote_input_dataset("delay_factors", delay_factors)
+
+ # Specify the result name
+ result_name = "joplin_recovery"
+
+ # Set analysis parameters
+ comm_recovery.set_parameter("result_name", result_name)
+ comm_recovery.set_parameter("seed", seed)
+ comm_recovery.set_parameter("num_samples", 10)
+
+ # Run commercial recovery analysis
+ comm_recovery.run_analysis()
+```
+
+full analysis: [commercial_building_recovery.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/commercial_building_recovery.ipynb)
\ No newline at end of file
diff --git a/manual_jb/content/analyses/indp.md b/manual_jb/content/analyses/indp.md
index 4163b9a0..00dc589d 100644
--- a/manual_jb/content/analyses/indp.md
+++ b/manual_jb/content/analyses/indp.md
@@ -10,26 +10,30 @@ systems subject to budget and operational constraints.
- Implementation: Hesam Talebiyan, Chen Wang, and NCSA IN-CORE Dev Team
+**Related publications**
+- Talebiyan, Hesam. "Interdependent Restoration of Infrastructure Networks with Humans in the Loop: decentralized and strategic decision processes." (2021) Diss., Rice University. [https://hdl.handle.net/1911/111232](https://hdl.handle.net/1911/111232)
+
+
**Input parameters**
- key name | type | name | description
---------------------------------|---------|-----------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
- `network_type` * | `str` | Network Type | Type of the network, which is set to `from_csv` for Seaside networks. e.g. *from_csv*, *incore*.
- `MAGS` * | `list` | MAGS | The earthquake return period.
- `sample_range` * | `range` | Sample Range | The range of sample scenarios to be analyzed.
- `dislocation_data_type` | `str` | Dislocation Data Type | Dislocation Data Type.
- `return_model` | `str` | Return Model | Type of the model for the return of the dislocated population. Options: *step_function* and *linear*.
- `testbed_name` | `str` | Testbed Name | Sets the name of the testbed in analysis.
- `extra_commodity` * | `dict` | Extra Commodity | Multi-commodity parameters dictionary.
+ key name | type | name | description
+--------------------------------|---------|-----------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ `network_type` * | `str` | Network Type | Type of the network, which is set to `from_csv` for Seaside networks. e.g. *from_csv*, *incore*.
+ `MAGS` * | `list` | MAGS | The earthquake return period.
+ `sample_range` * | `range` | Sample Range | The range of sample scenarios to be analyzed.
+ `dislocation_data_type` | `str` | Dislocation Data Type | Dislocation Data Type.
+ `return_model` | `str` | Return Model | Type of the model for the return of the dislocated population. Options: *step_function* and *linear*.
+ `testbed_name` | `str` | Testbed Name | Sets the name of the testbed in analysis.
+ `extra_commodity` * | `dict` | Extra Commodity | Multi-commodity parameters dictionary.
`RC` * | `list` | Resource Caps | List of resource caps or the number of available resources in each step of the analysis. Each item of the list is a dictionary whose items show the type of resource and the available number of that type of resource. For example: * If `network_type`=*from_csv*, you have two options:* if, for example, `R_c`= [{"budget": 3}, {"budget": 6}], then the analysis is done for the cases when there are 3 and 6 resources available of type "budget" (total resource assignment).* if, for example, `R_c`= [{"budget": {1:1, 2:1}}, {"budget": {1:1, 2:2}}, {"budget": {1:3, 2:3}}] and given there are 2 layers, then the analysis is done for the case where each layer gets 1 resource of type "budget", AND the case where layer 1 gets 1 and layer 2 gets 2 resources of type "budget", AND the case where each layer gets 3 resources of type "budget" (Prescribed resource for each layer).
- `layers` * | `list` | Layers | List of layers in the analysis.
- `method` * | `str` | Method | There are two choices of method: 1. `INDP`: runs Interdependent Network Restoration Problem (INDP). 2. `TDINDP`: runs time-dependent INDP (td-INDP). In both cases, if "time_resource" is True, then the repair time for each element is considered in devising the restoration plans.
- `t_steps` | `int` | Time steps | Number of time steps of the analysis.
- `time_resource` | `bool` | Time Resource | If time resource is True, then the repair time for each element is considered in devising the restoration plans.
- `save_model` | `bool` | Save Model | If the optimization model should be saved to file. The default is False.
- `solver_engine` | `str` | Solver Engine | Solver to use for optimization model. Default to use SCIP solver.
- `solver_path` | `str` | Solver Engine Path | Executable Path to the Solver to use for optimization model. Default to SCIP solver.
- `solver_time_limit` | `int` | Solve Time Limit | Solving time limit in seconds.
+ `layers` * | `list` | Layers | List of layers in the analysis.
+ `method` * | `str` | Method | There are two choices of method: 1. `INDP`: runs Interdependent Network Restoration Problem (INDP). 2. `TDINDP`: runs time-dependent INDP (td-INDP). In both cases, if "time_resource" is True, then the repair time for each element is considered in devising the restoration plans.
+ `t_steps` | `int` | Time steps | Number of time steps of the analysis.
+ `time_resource` | `bool` | Time Resource | If time resource is True, then the repair time for each element is considered in devising the restoration plans.
+ `save_model` | `bool` | Save Model | If the optimization model should be saved to file. The default is False.
+ `solver_engine` | `str` | Solver Engine | Solver to use for optimization model. Default to use SCIP solver.
+ `solver_path` | `str` | Solver Engine Path | Executable Path to the Solver to use for optimization model. Default to SCIP solver.
+ `solver_time_limit` | `int` | Solve Time Limit | Solving time limit in seconds.
**Input datasets**
@@ -54,6 +58,7 @@ systems subject to budget and operational constraints.
`epf_failure_state` * | `incore:sampleFailureState` | Electric Power Facility Failure State | MCS failure state of electric power facilities.
`epf_damage_state` * | `incore:sampleDamageState` | Electric Power Facility Damage State | MCS damage state of electric power facilities
`pop_dislocation` * | `incore:popDislocation` | Population Dislocation | Population dislocation.
+ `dt_params` | `incore:dTParams` | Dislocation time parameters | Parameters for population dislocation time.
`bldgs2elec` | `incore:bldgs2elec` | Building To Electric Power Facility | Relation between building and electric power facility.
`bldgs2wter` | `incore:bldgs2wter` | Building To Water Facility | Relation between building and water facility.
diff --git a/manual_jb/content/notebooks/create_local_hazard/create_local_hazard.ipynb b/manual_jb/content/notebooks/create_local_hazard/create_local_hazard.ipynb
new file mode 100644
index 00000000..324acfb1
--- /dev/null
+++ b/manual_jb/content/notebooks/create_local_hazard/create_local_hazard.ipynb
@@ -0,0 +1,541 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Create Local Hazards"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "This tutorial provides examples of how to create local hazards for tornadoes, earthquakes, tsunamis, floods, and hurricanes, respectively."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2023-10-05T17:23:46.971951Z",
+ "start_time": "2023-10-05T17:23:46.967669Z"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "import os\n",
+ "import json\n",
+ "from pyincore import Hurricane, Flood, Earthquake, Tornado\n",
+ "from pyincore.models.hazard.tsunami import Tsunami"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "You will need to indicate the local data path initially."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "dir = \"files\""
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Create Tornadoes"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "**Inputs**: JSON representation of a dataset describing a tornado. Each available dataset in Shapefile format."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "{\n",
+ " \"name\": \"pytest - Joplin Tornado\",\n",
+ " \"description\": \"Joplin tornado hazard\",\n",
+ " \"tornadoType\": \"dataset\",\n",
+ " \"threshold\": null,\n",
+ " \"thresholdUnit\": \"mph\"\n",
+ "}\n"
+ ]
+ }
+ ],
+ "source": [
+ "with open(os.path.join(dir, \"tornado_dataset.json\"), 'r') as file:\n",
+ " tornado_dataset_json = file.read()\n",
+ " print(json.dumps(json.loads(tornado_dataset_json), indent=4))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "[{'demands': ['wind'], 'units': ['mph'], 'loc': '-94.37, 37.04', 'hazardValues': [102.62435891472981]}]\n"
+ ]
+ }
+ ],
+ "source": [
+ "# create the tornado object\n",
+ "tornado = Tornado.from_json_file(os.path.join(dir, \"tornado_dataset.json\"))\n",
+ "\n",
+ "# attach dataset from local file\n",
+ "tornado.hazardDatasets[0].from_file((os.path.join(dir, \"joplin_tornado/joplin_path_wgs84.shp\")), \n",
+ " data_type=\"incore:tornadoWindfield\")\n",
+ "\n",
+ "payload = [\n",
+ " {\n",
+ " \"demands\": [\"wind\"],\n",
+ " \"units\": [\"mph\"],\n",
+ " \"loc\": \"-94.37, 37.04\"\n",
+ " }\n",
+ "]\n",
+ "\n",
+ "values = tornado.read_hazard_values(payload, seed=1234) # removing the seed will give random values\n",
+ "print(values)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Create Earthquakes"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "**Inputs**: JSON representation of a dataset describing an earthquake. Each available dataset in TIF format."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "{\n",
+ " \"name\": \"pytest - Memphis Deterministic EQ\",\n",
+ " \"description\": \"Memphis dataset based deterministic hazard - Pytest\",\n",
+ " \"eqType\": \"dataset\",\n",
+ " \"hazardDatasets\": [\n",
+ " {\n",
+ " \"hazardType\": \"deterministic\",\n",
+ " \"demandType\": \"SA\",\n",
+ " \"demandUnits\": \"g\",\n",
+ " \"period\": \"0.2\",\n",
+ " \"eqParameters\": {\n",
+ " \"srcLatitude\": \"35.927\",\n",
+ " \"srcLongitude\": \"-89.919\",\n",
+ " \"magnitude\": \"7.9\",\n",
+ " \"depth\": \"10.0\"\n",
+ " }\n",
+ " },\n",
+ " {\n",
+ " \"hazardType\": \"deterministic\",\n",
+ " \"demandType\": \"PGA\",\n",
+ " \"demandUnits\": \"g\",\n",
+ " \"period\": \"0.0\",\n",
+ " \"eqParameters\": {\n",
+ " \"srcLatitude\": \"35.927\",\n",
+ " \"srcLongitude\": \"-89.919\",\n",
+ " \"magnitude\": \"7.9\",\n",
+ " \"depth\": \"10.0\"\n",
+ " }\n",
+ " }\n",
+ " ]\n",
+ "}\n"
+ ]
+ }
+ ],
+ "source": [
+ "with open(os.path.join(dir, \"eq-dataset.json\"), 'r') as file:\n",
+ " earthquake_dataset_json = file.read()\n",
+ " print(json.dumps(json.loads(earthquake_dataset_json), indent=4))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 13,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "[{'demands': ['PGA', '0.2 SA'], 'units': ['g', 'g'], 'loc': '35.03,-89.93', 'hazardValues': [0.3149999976158142, 0.4729999899864197]}]\n"
+ ]
+ }
+ ],
+ "source": [
+ "# create the earthquake object\n",
+ "eq = Earthquake.from_json_file(os.path.join(dir, \"eq-dataset.json\"))\n",
+ "\n",
+ "# attach datasets from local files\n",
+ "eq.hazardDatasets[0].from_file((os.path.join(dir, \"eq-dataset-SA.tif\")),\n",
+ " data_type=\"ergo:probabilisticEarthquakeRaster\")\n",
+ "eq.hazardDatasets[1].from_file((os.path.join(dir, \"eq-dataset-PGA.tif\")),\n",
+ " data_type=\"ergo:probabilisticEarthquakeRaster\")\n",
+ "\n",
+ "payload = [\n",
+ " {\n",
+ " \"demands\": [\"PGA\", \"0.2 SA\"],\n",
+ " \"units\": [\"g\", \"g\"],\n",
+ " \"loc\": \"35.03,-89.93\"\n",
+ " }\n",
+ "]\n",
+ "\n",
+ "values = eq.read_hazard_values(payload)\n",
+ "print(values)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Create Tsunamis"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "**Inputs**: JSON representation of a dataset describing a tsunami. Each available dataset in TIF format."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 14,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "{\n",
+ " \"name\": \"pytest - Seaside Probabilistic Tsunami - 100 yr\",\n",
+ " \"description\": \"pytest - Seaside dataset based probabilistic tsunami hazard. This is just a test!\",\n",
+ " \"tsunamiType\": \"dataset\",\n",
+ " \"hazardDatasets\": [\n",
+ " {\n",
+ " \"hazardType\": \"probabilistic\",\n",
+ " \"demandType\": \"Vmax\",\n",
+ " \"demandUnits\": \"m/s\",\n",
+ " \"recurrenceInterval\": \"100\",\n",
+ " \"recurrenceUnit\": \"years\"\n",
+ " },\n",
+ " {\n",
+ " \"hazardType\": \"probabilistic\",\n",
+ " \"demandType\": \"Mmax\",\n",
+ " \"demandUnits\": \"m^3/s^2\",\n",
+ " \"recurrenceInterval\": \"100\",\n",
+ " \"recurrenceUnit\": \"years\"\n",
+ " },\n",
+ " {\n",
+ " \"hazardType\": \"probabilistic\",\n",
+ " \"demandType\": \"Hmax\",\n",
+ " \"demandUnits\": \"m\",\n",
+ " \"recurrenceInterval\": \"100\",\n",
+ " \"recurrenceUnit\": \"years\"\n",
+ " }\n",
+ " ]\n",
+ "}\n"
+ ]
+ }
+ ],
+ "source": [
+ "with open(os.path.join(dir, \"tsunami.json\"), 'r') as file:\n",
+ " tsunami_dataset_json = file.read()\n",
+ " print(json.dumps(json.loads(tsunami_dataset_json), indent=4))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 15,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "[{'demands': ['hmax'], 'units': ['m'], 'loc': '46.006,-123.935', 'hazardValues': [2.9000000953674316]}]\n"
+ ]
+ }
+ ],
+ "source": [
+ "# create the tsunami object\n",
+ "tsunami = Tsunami.from_json_file(os.path.join(dir, \"tsunami.json\"))\n",
+ "\n",
+ "# attach datasets from local files\n",
+ "tsunami.hazardDatasets[0].from_file((os.path.join(dir, \"Tsu_100yr_Vmax.tif\")),\n",
+ " data_type=\"ncsa:probabilisticTsunamiRaster\")\n",
+ "tsunami.hazardDatasets[1].from_file((os.path.join(dir, \"Tsu_100yr_Mmax.tif\")),\n",
+ " data_type=\"ncsa:probabilisticTsunamiRaster\")\n",
+ "tsunami.hazardDatasets[2].from_file((os.path.join(dir, \"Tsu_100yr_Hmax.tif\")),\n",
+ " data_type=\"ncsa:probabilisticTsunamiRaster\")\n",
+ "\n",
+ "payload = [\n",
+ " {\n",
+ " \"demands\": [\"hmax\"],\n",
+ " \"units\": [\"m\"],\n",
+ " \"loc\": \"46.006,-123.935\"\n",
+ " }\n",
+ "]\n",
+ "\n",
+ "values = tsunami.read_hazard_values(payload)\n",
+ "print(values)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Create Floods"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "**Inputs**: JSON representation of a dataset describing a flood. Each available dataset in TIF format."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 16,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "{\n",
+ " \"name\": \"Lumberton Deterministic Flood - riverine flooding\",\n",
+ " \"description\": \"Lumberton dataset based deterministic hazard - 2 datasets\",\n",
+ " \"floodType\": \"dataset\",\n",
+ " \"hazardDatasets\": [\n",
+ " {\n",
+ " \"hazardType\": \"deterministic\",\n",
+ " \"demandType\": \"inundationDepth\",\n",
+ " \"demandUnits\": \"ft\",\n",
+ " \"floodParameters\": {\n",
+ " \"model\": \"riverine flooding\"\n",
+ " }\n",
+ " },\n",
+ " {\n",
+ " \"hazardType\": \"deterministic\",\n",
+ " \"demandType\": \"waterSurfaceElevation\",\n",
+ " \"demandUnits\": \"ft\",\n",
+ " \"floodParameters\": {\n",
+ " \"model\": \"riverine flooding\"\n",
+ " }\n",
+ " }\n",
+ " ]\n",
+ "}\n"
+ ]
+ }
+ ],
+ "source": [
+ "with open(os.path.join(dir, \"flood-dataset.json\"), 'r') as file:\n",
+ " flood_dataset_json = file.read()\n",
+ " print(json.dumps(json.loads(flood_dataset_json), indent=4))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 17,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "[{'demands': ['waterSurfaceElevation'], 'units': ['m'], 'loc': '34.60,-79.16', 'hazardValues': [41.970442822265625]}]\n"
+ ]
+ }
+ ],
+ "source": [
+ "# create the flood object\n",
+ "flood = Flood.from_json_file(os.path.join(dir, \"flood-dataset.json\"))\n",
+ "\n",
+ "# attach datasets from local files\n",
+ "flood.hazardDatasets[0].from_file((os.path.join(dir, \"flood-inundationDepth-50ft.tif\")),\n",
+ " data_type=\"ncsa:probabilisticFloodRaster\")\n",
+ "flood.hazardDatasets[1].from_file(os.path.join(dir, \"flood-WSE-50ft.tif\"),\n",
+ " data_type=\"ncsa:probabilisticFloodRaster\")\n",
+ "\n",
+ "payload = [\n",
+ " {\n",
+ " \"demands\": [\"waterSurfaceElevation\"],\n",
+ " \"units\": [\"m\"],\n",
+ " \"loc\": \"34.60,-79.16\"\n",
+ " }\n",
+ " ]\n",
+ "\n",
+ "values = flood.read_hazard_values(payload)\n",
+ "print(values)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Create Hurricanes"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "**Inputs**: JSON representation of a dataset describing a hurricane. Each available dataset in TIF format."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 18,
+ "metadata": {
+ "scrolled": true
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "{\n",
+ " \"name\": \"pytest - Galveston Deterministic Hurricane - Kriging \",\n",
+ " \"description\": \"Galveston dataset based deterministic hazard - 3 datasets\",\n",
+ " \"hurricaneType\": \"dataset\",\n",
+ " \"hazardDatasets\": [\n",
+ " {\n",
+ " \"hazardType\": \"deterministic\",\n",
+ " \"demandType\": \"waveHeight\",\n",
+ " \"demandUnits\": \"m\",\n",
+ " \"hurricaneParameters\": {\n",
+ " \"model\": \"Kriging\"\n",
+ " }\n",
+ " },\n",
+ " {\n",
+ " \"hazardType\": \"deterministic\",\n",
+ " \"demandType\": \"surgeLevel\",\n",
+ " \"demandUnits\": \"m\",\n",
+ " \"hurricaneParameters\": {\n",
+ " \"model\": \"Kriging\"\n",
+ " }\n",
+ " },\n",
+ " {\n",
+ " \"hazardType\": \"deterministic\",\n",
+ " \"demandType\": \"inundationDuration\",\n",
+ " \"demandUnits\": \"hr\",\n",
+ " \"hurricaneParameters\": {\n",
+ " \"model\": \"Kriging\"\n",
+ " }\n",
+ " }\n",
+ " ]\n",
+ "}\n"
+ ]
+ }
+ ],
+ "source": [
+ "with open(os.path.join(dir, \"hurricane-dataset.json\"), 'r') as file:\n",
+ " hurricane_dataset_json = file.read()\n",
+ " print(json.dumps(json.loads(hurricane_dataset_json), indent=4))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 19,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "[{'demands': ['waveHeight', 'surgeLevel'], 'units': ['m', 'm'], 'loc': '29.22,-95.06', 'hazardValues': [1.54217780024576, 3.663398872786693]}, {'demands': ['waveHeight', 'surgeLevel'], 'units': ['cm', 'cm'], 'loc': '29.23,-95.05', 'hazardValues': [162.9628933899723, 369.7690109274975]}, {'demands': ['waveHeight', 'inundationDuration'], 'units': ['in', 'hr'], 'loc': '29.22,-95.06', 'hazardValues': [60.7156942134556, 18.346923306935572]}]\n"
+ ]
+ }
+ ],
+ "source": [
+ "# create the hurricane object\n",
+ "hurricane = Hurricane.from_json_file((os.path.join(dir, \"hurricane-dataset.json\")))\n",
+ "\n",
+ "# attach datasets from local files\n",
+ "hurricane.hazardDatasets[0].from_file((os.path.join(dir, \"Wave_Raster.tif\")),\n",
+ " data_type=\"ncsa:deterministicHurricaneRaster\")\n",
+ "hurricane.hazardDatasets[1].from_file(os.path.join(dir, \"Surge_Raster.tif\"),\n",
+ " data_type=\"ncsa:deterministicHurricaneRaster\")\n",
+ "hurricane.hazardDatasets[2].from_file(os.path.join(dir, \"Inundation_Raster.tif\"),\n",
+ " data_type=\"ncsa:deterministicHurricaneRaster\")\n",
+ "\n",
+ "payload = [\n",
+ " {\n",
+ " \"demands\": [\"waveHeight\", \"surgeLevel\"],\n",
+ " \"units\": [\"m\", \"m\"],\n",
+ " \"loc\": \"29.22,-95.06\"\n",
+ " },\n",
+ " {\n",
+ " \"demands\": [\"waveHeight\", \"surgeLevel\"],\n",
+ " \"units\": [\"cm\", \"cm\"],\n",
+ " \"loc\": \"29.23,-95.05\"\n",
+ " },\n",
+ " {\n",
+ " \"demands\": [\"waveHeight\", \"inundationDuration\"],\n",
+ " \"units\": [\"in\", \"hr\"],\n",
+ " \"loc\": \"29.22,-95.06\"\n",
+ " }\n",
+ " ]\n",
+ "\n",
+ "values = hurricane.read_hazard_values(payload)\n",
+ "print(values)"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.9.6"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
+}
diff --git a/manual_jb/content/notebooks/create_local_hazard/files/Inundation_Raster.tif b/manual_jb/content/notebooks/create_local_hazard/files/Inundation_Raster.tif
new file mode 100644
index 00000000..c8559768
Binary files /dev/null and b/manual_jb/content/notebooks/create_local_hazard/files/Inundation_Raster.tif differ
diff --git a/manual_jb/content/notebooks/create_local_hazard/files/Surge_Raster.tif b/manual_jb/content/notebooks/create_local_hazard/files/Surge_Raster.tif
new file mode 100644
index 00000000..fbfccf7b
Binary files /dev/null and b/manual_jb/content/notebooks/create_local_hazard/files/Surge_Raster.tif differ
diff --git a/manual_jb/content/notebooks/create_local_hazard/files/Tsu_100yr_Hmax.tif b/manual_jb/content/notebooks/create_local_hazard/files/Tsu_100yr_Hmax.tif
new file mode 100644
index 00000000..2ea0525b
Binary files /dev/null and b/manual_jb/content/notebooks/create_local_hazard/files/Tsu_100yr_Hmax.tif differ
diff --git a/manual_jb/content/notebooks/create_local_hazard/files/Tsu_100yr_Mmax.tif b/manual_jb/content/notebooks/create_local_hazard/files/Tsu_100yr_Mmax.tif
new file mode 100644
index 00000000..ecfc84a6
Binary files /dev/null and b/manual_jb/content/notebooks/create_local_hazard/files/Tsu_100yr_Mmax.tif differ
diff --git a/manual_jb/content/notebooks/create_local_hazard/files/Tsu_100yr_Vmax.tif b/manual_jb/content/notebooks/create_local_hazard/files/Tsu_100yr_Vmax.tif
new file mode 100644
index 00000000..d6998804
Binary files /dev/null and b/manual_jb/content/notebooks/create_local_hazard/files/Tsu_100yr_Vmax.tif differ
diff --git a/manual_jb/content/notebooks/create_local_hazard/files/Wave_Raster.tif b/manual_jb/content/notebooks/create_local_hazard/files/Wave_Raster.tif
new file mode 100644
index 00000000..9c8b9584
Binary files /dev/null and b/manual_jb/content/notebooks/create_local_hazard/files/Wave_Raster.tif differ
diff --git a/manual_jb/content/notebooks/create_local_hazard/files/eq-dataset-PGA.tif b/manual_jb/content/notebooks/create_local_hazard/files/eq-dataset-PGA.tif
new file mode 100644
index 00000000..67824055
Binary files /dev/null and b/manual_jb/content/notebooks/create_local_hazard/files/eq-dataset-PGA.tif differ
diff --git a/manual_jb/content/notebooks/create_local_hazard/files/eq-dataset-SA.tif b/manual_jb/content/notebooks/create_local_hazard/files/eq-dataset-SA.tif
new file mode 100644
index 00000000..a641cab0
Binary files /dev/null and b/manual_jb/content/notebooks/create_local_hazard/files/eq-dataset-SA.tif differ
diff --git a/manual_jb/content/notebooks/create_local_hazard/files/eq-dataset.json b/manual_jb/content/notebooks/create_local_hazard/files/eq-dataset.json
new file mode 100644
index 00000000..0400b97a
--- /dev/null
+++ b/manual_jb/content/notebooks/create_local_hazard/files/eq-dataset.json
@@ -0,0 +1,31 @@
+{
+ "name": "pytest - Memphis Deterministic EQ",
+ "description": "Memphis dataset based deterministic hazard - Pytest",
+ "eqType": "dataset",
+ "hazardDatasets": [
+ {
+ "hazardType": "deterministic",
+ "demandType": "SA",
+ "demandUnits": "g",
+ "period": "0.2",
+ "eqParameters": {
+ "srcLatitude": "35.927",
+ "srcLongitude": "-89.919",
+ "magnitude": "7.9",
+ "depth": "10.0"
+ }
+ },
+ {
+ "hazardType": "deterministic",
+ "demandType": "PGA",
+ "demandUnits": "g",
+ "period": "0.0",
+ "eqParameters": {
+ "srcLatitude": "35.927",
+ "srcLongitude": "-89.919",
+ "magnitude": "7.9",
+ "depth": "10.0"
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/manual_jb/content/notebooks/create_local_hazard/files/flood-WSE-50ft.tif b/manual_jb/content/notebooks/create_local_hazard/files/flood-WSE-50ft.tif
new file mode 100644
index 00000000..3f85d539
Binary files /dev/null and b/manual_jb/content/notebooks/create_local_hazard/files/flood-WSE-50ft.tif differ
diff --git a/manual_jb/content/notebooks/create_local_hazard/files/flood-dataset.json b/manual_jb/content/notebooks/create_local_hazard/files/flood-dataset.json
new file mode 100644
index 00000000..8297796a
--- /dev/null
+++ b/manual_jb/content/notebooks/create_local_hazard/files/flood-dataset.json
@@ -0,0 +1,23 @@
+{
+ "name": "Lumberton Deterministic Flood - riverine flooding",
+ "description": "Lumberton dataset based deterministic hazard - 2 datasets",
+ "floodType": "dataset",
+ "hazardDatasets": [
+ {
+ "hazardType": "deterministic",
+ "demandType": "inundationDepth",
+ "demandUnits": "ft",
+ "floodParameters": {
+ "model": "riverine flooding"
+ }
+ },
+ {
+ "hazardType": "deterministic",
+ "demandType": "waterSurfaceElevation",
+ "demandUnits": "ft",
+ "floodParameters": {
+ "model": "riverine flooding"
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/manual_jb/content/notebooks/create_local_hazard/files/flood-inundationDepth-50ft.tif b/manual_jb/content/notebooks/create_local_hazard/files/flood-inundationDepth-50ft.tif
new file mode 100644
index 00000000..b0bd1597
Binary files /dev/null and b/manual_jb/content/notebooks/create_local_hazard/files/flood-inundationDepth-50ft.tif differ
diff --git a/manual_jb/content/notebooks/create_local_hazard/files/hurricane-dataset.json b/manual_jb/content/notebooks/create_local_hazard/files/hurricane-dataset.json
new file mode 100644
index 00000000..1a759d97
--- /dev/null
+++ b/manual_jb/content/notebooks/create_local_hazard/files/hurricane-dataset.json
@@ -0,0 +1,31 @@
+{
+ "name": "pytest - Galveston Deterministic Hurricane - Kriging ",
+ "description": "Galveston dataset based deterministic hazard - 3 datasets",
+ "hurricaneType": "dataset",
+ "hazardDatasets": [
+ {
+ "hazardType": "deterministic",
+ "demandType": "waveHeight",
+ "demandUnits": "m",
+ "hurricaneParameters": {
+ "model": "Kriging"
+ }
+ },
+ {
+ "hazardType": "deterministic",
+ "demandType": "surgeLevel",
+ "demandUnits": "m",
+ "hurricaneParameters": {
+ "model": "Kriging"
+ }
+ },
+ {
+ "hazardType": "deterministic",
+ "demandType": "inundationDuration",
+ "demandUnits": "hr",
+ "hurricaneParameters": {
+ "model": "Kriging"
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/manual_jb/content/notebooks/create_local_hazard/files/joplin_tornado/joplin_path_wgs84.dbf b/manual_jb/content/notebooks/create_local_hazard/files/joplin_tornado/joplin_path_wgs84.dbf
new file mode 100644
index 00000000..e326eeab
Binary files /dev/null and b/manual_jb/content/notebooks/create_local_hazard/files/joplin_tornado/joplin_path_wgs84.dbf differ
diff --git a/manual_jb/content/notebooks/create_local_hazard/files/joplin_tornado/joplin_path_wgs84.prj b/manual_jb/content/notebooks/create_local_hazard/files/joplin_tornado/joplin_path_wgs84.prj
new file mode 100644
index 00000000..9e354778
--- /dev/null
+++ b/manual_jb/content/notebooks/create_local_hazard/files/joplin_tornado/joplin_path_wgs84.prj
@@ -0,0 +1 @@
+GEOGCS["GCS_WGS_1984", DATUM["D_WGS_1984", SPHEROID["WGS_1984", 6378137.0, 298.257223563]], PRIMEM["Greenwich", 0.0], UNIT["degree", 0.017453292519943295], AXIS["Longitude", EAST], AXIS["Latitude", NORTH]]
\ No newline at end of file
diff --git a/manual_jb/content/notebooks/create_local_hazard/files/joplin_tornado/joplin_path_wgs84.qpj b/manual_jb/content/notebooks/create_local_hazard/files/joplin_tornado/joplin_path_wgs84.qpj
new file mode 100644
index 00000000..5fbc831e
--- /dev/null
+++ b/manual_jb/content/notebooks/create_local_hazard/files/joplin_tornado/joplin_path_wgs84.qpj
@@ -0,0 +1 @@
+GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]
diff --git a/manual_jb/content/notebooks/create_local_hazard/files/joplin_tornado/joplin_path_wgs84.shp b/manual_jb/content/notebooks/create_local_hazard/files/joplin_tornado/joplin_path_wgs84.shp
new file mode 100644
index 00000000..a529f8a6
Binary files /dev/null and b/manual_jb/content/notebooks/create_local_hazard/files/joplin_tornado/joplin_path_wgs84.shp differ
diff --git a/manual_jb/content/notebooks/create_local_hazard/files/joplin_tornado/joplin_path_wgs84.shx b/manual_jb/content/notebooks/create_local_hazard/files/joplin_tornado/joplin_path_wgs84.shx
new file mode 100644
index 00000000..a6093723
Binary files /dev/null and b/manual_jb/content/notebooks/create_local_hazard/files/joplin_tornado/joplin_path_wgs84.shx differ
diff --git a/manual_jb/content/notebooks/create_local_hazard/files/tornado_dataset.json b/manual_jb/content/notebooks/create_local_hazard/files/tornado_dataset.json
new file mode 100644
index 00000000..fa1fee51
--- /dev/null
+++ b/manual_jb/content/notebooks/create_local_hazard/files/tornado_dataset.json
@@ -0,0 +1,7 @@
+{
+ "name": "pytest - Joplin Tornado",
+ "description": "Joplin tornado hazard",
+ "tornadoType": "dataset",
+ "threshold": null,
+ "thresholdUnit": "mph"
+}
diff --git a/manual_jb/content/notebooks/create_local_hazard/files/tsunami.json b/manual_jb/content/notebooks/create_local_hazard/files/tsunami.json
new file mode 100644
index 00000000..dbc597c2
--- /dev/null
+++ b/manual_jb/content/notebooks/create_local_hazard/files/tsunami.json
@@ -0,0 +1,28 @@
+{
+ "name": "pytest - Seaside Probabilistic Tsunami - 100 yr",
+ "description": "pytest - Seaside dataset based probabilistic tsunami hazard. This is just a test!",
+ "tsunamiType": "dataset",
+ "hazardDatasets": [
+ {
+ "hazardType": "probabilistic",
+ "demandType": "Vmax",
+ "demandUnits": "m/s",
+ "recurrenceInterval": "100",
+ "recurrenceUnit": "years"
+ },
+ {
+ "hazardType": "probabilistic",
+ "demandType": "Mmax",
+ "demandUnits": "m^3/s^2",
+ "recurrenceInterval": "100",
+ "recurrenceUnit": "years"
+ },
+ {
+ "hazardType": "probabilistic",
+ "demandType": "Hmax",
+ "demandUnits": "m",
+ "recurrenceInterval": "100",
+ "recurrenceUnit": "years"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/manual_jb/content/notebooks_other.md b/manual_jb/content/notebooks_other.md
index 815ceaca..0bf47a91 100644
--- a/manual_jb/content/notebooks_other.md
+++ b/manual_jb/content/notebooks_other.md
@@ -11,6 +11,7 @@ Note that some Notebooks might not work with the latest version of pyIncore.
[capital_shocks.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/capital_shocks.ipynb)
[combined_wind_wave_surge_building_dmg.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/combined_wind_wave_surge_building_dmg.ipynb)
[combined_wind_wave_surge_building_loss.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/combined_wind_wave_surge_building_loss.ipynb)
+[commercial_building_recovery.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/commercial_building_recovery.ipynb)
[cumulative_building_dmg.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/cumulative_building_dmg.ipynb)
[epf_dmg.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/epf_dmg.ipynb)
[epf_repair_cost.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/epf_repair_cost.ipynb)
diff --git a/manual_jb/content/tutorials.md b/manual_jb/content/tutorials.md
index 4e3833c7..9671df78 100644
--- a/manual_jb/content/tutorials.md
+++ b/manual_jb/content/tutorials.md
@@ -10,20 +10,22 @@ You can also check out IN-CORE resources for other materials to help you navigat
We encourage IN-CORE users to contribute improve the knowledge base. contact us with suggestions for expanding and extending Tutorials.
1. [Create Dataset](notebooks/create_dataset/create_dataset.ipynb)
-2. [Create dfr3 Object](notebooks/create_dfr3_object/create_dfr3_object.ipynb)
+2. [Create DFR3 Object](notebooks/create_dfr3_object/create_dfr3_object.ipynb)
3. [Create Earthquake](notebooks/create_earthquake/create_earthquake.ipynb)
4. [Create Tornado](notebooks/create_tornado/create_tornado.ipynb)
-5. [Retrofit plan Joplin](notebooks/retrofit_plan_Joplin_2021_12_01/retrofit_plan_Joplin_2021_12_01.ipynb)
-6. [Create network dataset](notebooks/create_network_dataset/create_network_dataset.ipynb)
+5. [Retrofit Plan Joplin](notebooks/retrofit_plan_Joplin_2021_12_01/retrofit_plan_Joplin_2021_12_01.ipynb)
+6. [Create Network Dataset](notebooks/create_network_dataset/create_network_dataset.ipynb)
+7. [Create Local Hazard](notebooks/create_local_hazard/create_local_hazard.ipynb)
More tutorials will be added soon.
Download the full Notebooks:
-- Create Dataset object: [create_dataset.zip](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/create_dataset.zip)
-- Create DFR3 object: [create_dfr3_object.zip](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/create_dfr3_object.zip)
+- Create Dataset Object: [create_dataset.zip](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/create_dataset.zip)
+- Create DFR3 Object: [create_dfr3_object.zip](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/create_dfr3_object.zip)
- Create Earthquake Hazard: [create_earthquake.zip](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/create_earthquake.zip)
- Create Tornado Hazard: [create_tornado.zip](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/create_tornado.zip)
-- Retrofit plan Joplin: [retrofit_plan_Joplin_2021_12_01.zip](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/retrofit_plan_Joplin_2021_12_01.zip)
-- Create network dataset: [create_network_dataset.zip](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/create_network_dataset.zip)
+- Retrofit Plan Joplin: [retrofit_plan_Joplin_2021_12_01.zip](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/retrofit_plan_Joplin_2021_12_01.zip)
+- Create Network Dataset: [create_network_dataset.zip](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/create_network_dataset.zip)
+- Create Local Hazard: [create_local_hazard.zip](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/create_local_hazard.zip)
diff --git a/notebooks/commercial_building_recovery.ipynb b/notebooks/commercial_building_recovery.ipynb
new file mode 100644
index 00000000..3f43e07c
--- /dev/null
+++ b/notebooks/commercial_building_recovery.ipynb
@@ -0,0 +1,169 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2023-09-29T15:02:55.785647Z",
+ "start_time": "2023-09-29T15:02:55.777915Z"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "from pyincore import IncoreClient, RepairService, MappingSet\n",
+ "from pyincore.analyses.commercialbuildingrecovery.commercialbuildingrecovery import CommercialBuildingRecovery\n",
+ "\n",
+ "import pyincore.globals as pyglobals"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# Connect to IN-CORE service\n",
+ "client = IncoreClient()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Joplin Commercial Building Recovery."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Joplin\n",
+ "buildings = \"5dbc8478b9219c06dd242c0d\" # ergo:buildingInventoryVer6 5dbc8478b9219c06dd242c0d\n",
+ "\n",
+ "# Create commercial recovery instance\n",
+ "com_recovery = CommercialBuildingRecovery(client)\n",
+ "com_recovery.load_remote_input_dataset(\"buildings\", buildings)\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Recovery mapping\n",
+ "mapping_id = \"60edfa3efc0f3a7af53a21b5\"\n",
+ "# Create repair service\n",
+ "repair_service = RepairService(client)\n",
+ "mapping_set = MappingSet(repair_service.get_mapping(mapping_id))\n",
+ "com_recovery.set_input_dataset('dfr3_mapping_set', mapping_set)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# input datsets ids\n",
+ "sample_damage_states = \"651dcf587faf5a0eae9e3443\" # 10 samples 28k buildings - MCS output format\n",
+ "mcs_failure = '651dced2adce7a5cb977f06e'\n",
+ "delay_factors = \"64ee0bcd553ecf0768e21e55\""
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Load input datasets\n",
+ "com_recovery.load_remote_input_dataset(\"sample_damage_states\", sample_damage_states)\n",
+ "com_recovery.load_remote_input_dataset(\"mcs_failure\", mcs_failure)\n",
+ "com_recovery.load_remote_input_dataset(\"delay_factors\", delay_factors)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Input parameters\n",
+ "num_samples = 10"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Specify the result name\n",
+ "result_name = \"joplin_commercial_test\"\n",
+ "\n",
+ "# Set analysis parameters\n",
+ "com_recovery.set_parameter(\"result_name\", result_name)\n",
+ "com_recovery.set_parameter(\"num_samples\", num_samples)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Run the analysis (NOTE: with SettingWithCopyWarning)\n",
+ "com_recovery.run_analysis()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# Retrieve result dataset\n",
+ "result = com_recovery.get_output_dataset(\"time_stepping_recovery\")\n",
+ "\n",
+ "# Convert dataset to Pandas DataFrame\n",
+ "df = result.get_dataframe_from_csv()\n",
+ "\n",
+ "# Display top 5 rows of output data\n",
+ "df.head()"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.9.17"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 1
+}
diff --git a/notebooks/create_local_hazard.zip b/notebooks/create_local_hazard.zip
new file mode 100644
index 00000000..b0711956
Binary files /dev/null and b/notebooks/create_local_hazard.zip differ
diff --git a/notebooks/power_facility_restoration.ipynb b/notebooks/power_facility_restoration.ipynb
index ee77d38c..46fc8c5f 100644
--- a/notebooks/power_facility_restoration.ipynb
+++ b/notebooks/power_facility_restoration.ipynb
@@ -146,6 +146,21 @@
"from pyincore.analyses.epfrestoration import EpfRestorationUtil"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "47f82bf6-cc0b-4327-9156-7fc2f235652f",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "inventory_restoration_map = epf_rest.get_output_dataset(\"inventory_restoration_map\")\n",
+ "pf_results = epf_rest.get_output_dataset(\"pf_results\")\n",
+ "time_results = epf_rest.get_output_dataset(\"time_results\")\n",
+ "time_interval = epf_rest.get_parameter(\"time_interval\")\n",
+ "pf_interval = epf_rest.get_parameter(\"pf_interval\")\n",
+ "end_time = epf_rest.get_parameter(\"end_time\")"
+ ]
+ },
{
"cell_type": "code",
"execution_count": null,
@@ -153,7 +168,7 @@
"metadata": {},
"outputs": [],
"source": [
- "epf_rest_util = EpfRestorationUtil(epf_rest)"
+ "epf_rest_util = EpfRestorationUtil(inventory_restoration_map, pf_results, time_results, time_interval, pf_interval, end_time)"
]
},
{
@@ -180,19 +195,11 @@
"time = epf_rest_util.get_restoration_time(guid=\"75941d02-93bf-4ef9-87d3-d882384f6c10\", damage_state=\"DS_1\", pf=0.81)\n",
"time"
]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "6c94c900",
- "metadata": {},
- "outputs": [],
- "source": []
}
],
"metadata": {
"kernelspec": {
- "display_name": "Python 3",
+ "display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
@@ -206,7 +213,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.8.10"
+ "version": "3.9.17"
}
},
"nbformat": 4,
diff --git a/notebooks/water_facility_restoration.ipynb b/notebooks/water_facility_restoration.ipynb
index 1ef38531..37fcc8c2 100644
--- a/notebooks/water_facility_restoration.ipynb
+++ b/notebooks/water_facility_restoration.ipynb
@@ -168,6 +168,21 @@
"from pyincore.analyses.waterfacilityrestoration import WaterFacilityRestorationUtil"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "ca500f0a-12bf-45c0-b5a2-096504e48102",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "inventory_restoration_map = wf_rest.get_output_dataset(\"inventory_restoration_map\")\n",
+ "pf_results = wf_rest.get_output_dataset(\"pf_results\")\n",
+ "time_results = wf_rest.get_output_dataset(\"time_results\")\n",
+ "time_interval = wf_rest.get_parameter(\"time_interval\")\n",
+ "pf_interval = wf_rest.get_parameter(\"pf_interval\")\n",
+ "end_time = wf_rest.get_parameter(\"end_time\")"
+ ]
+ },
{
"cell_type": "code",
"execution_count": null,
@@ -176,7 +191,7 @@
"outputs": [],
"source": [
"# Functionality for particular component and damage state\n",
- "wf_util = WaterFacilityRestorationUtil(wf_rest)\n",
+ "wf_util = WaterFacilityRestorationUtil(inventory_restoration_map, pf_results, time_results, time_interval, pf_interval, end_time)\n",
"functionality = wf_util.get_percentage_func(guid=\"e1bce78d-00a1-4605-95f3-3776ff907f73\",\n",
" damage_state=\"DS_0\", time=2.0)\n",
"functionality"
@@ -193,19 +208,11 @@
"time = wf_util.get_restoration_time(guid=\"e1bce78d-00a1-4605-95f3-3776ff907f73\", damage_state=\"DS_1\", pf=0.81)\n",
"time"
]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "e2586370",
- "metadata": {},
- "outputs": [],
- "source": []
}
],
"metadata": {
"kernelspec": {
- "display_name": "Python 3",
+ "display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
@@ -219,7 +226,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.8.10"
+ "version": "3.9.17"
}
},
"nbformat": 4,