diff --git a/CHANGELOG.md b/CHANGELOG.md
index 0a54bf80..b2c06dac 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,25 @@ All notable changes to the INCORE documents generated by Sphinx package will be
The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/).
+
+## [4.12.0] - 2024-06-12
+
+### Changed
+- Rename Building Portfolio Analysis to Building Cluster Recovery Analysis [#559](https://github.com/IN-CORE/pyincore/issues/559)
+- Update flood input to nonstructural damage output for combined wind-wave-surge building damage [#566](https://github.com/IN-CORE/incore-docs/issues/566)
+- pyincore, pyincore-viz and pyincore-data installation instructions to include how to use libmamba solver [#375](https://github.com/IN-CORE/incore-docs/issues/375)
+- Update docs to reflect Non-structural Building Damage change [#562](https://github.com/IN-CORE/pyincore/issues/562)
+- Rename Monte Carlo failure probability analysis to Monte Carlo limit state Probability Analysis [#557](https://github.com/IN-CORE/pyincore/issues/557)
+- Add building structural damage documentation and notebook [#402](https://github.com/IN-CORE/incore-docs/issues/402)
+
+### Added
+- Gas facility damage analysis documentation and example notebook [#387](https://github.com/IN-CORE/incore-docs/issues/387)
+- Traffic flow recovery analysis documentation and example notebook [#389](https://github.com/IN-CORE/incore-docs/issues/389)
+- Social vulnerability score analysis documentation and example notebook [#392](https://github.com/IN-CORE/incore-docs/issues/392)
+- Google Analytics to the main documentation [#399](https://github.com/IN-CORE/incore-docs/issues/399)
+- Google Analytics to the api sphinx documentation [#396](https://github.com/IN-CORE/incore-docs/issues/396)
+- Buyout decision analysis documentation and example notebook [#401](https://github.com/IN-CORE/incore-docs/issues/401)
+
## [4.11.0] - 2024-04-30
### Changed
diff --git a/manual_jb/Dockerfile b/manual_jb/Dockerfile
index 048155db..942312a7 100644
--- a/manual_jb/Dockerfile
+++ b/manual_jb/Dockerfile
@@ -8,11 +8,17 @@ RUN apt-get -y update
WORKDIR /src
COPY requirements.txt /src/
+COPY insert_ga_to_header.py /src/
RUN python3 -m pip install -r requirements.txt
+
COPY content/ /src/content/
RUN jupyter-book build content/
+# Run the insert_ga_to_header.py script to insert Google Analytics code
+RUN python3 -m pip install beautifulsoup4
+RUN python3 insert_ga_to_header.py
+
# ----------------------------------------------------------------------
# Building actual container
# ----------------------------------------------------------------------
@@ -22,3 +28,5 @@ FROM nginx
RUN mkdir -p /usr/share/nginx/html/doc/incore
COPY --from=builder /src/content/_build/html/ /usr/share/nginx/html/doc/incore/
+COPY config /usr/share/nginx/html/doc/incore/config
+
diff --git a/manual_jb/config/config.json b/manual_jb/config/config.json
new file mode 100644
index 00000000..279dd30c
--- /dev/null
+++ b/manual_jb/config/config.json
@@ -0,0 +1,3 @@
+{
+ "GA_KEY": "Test-Google-Analytics-Key-Replace-Me"
+}
\ No newline at end of file
diff --git a/manual_jb/config/googleAnalytics.js b/manual_jb/config/googleAnalytics.js
new file mode 100644
index 00000000..6e43a7e9
--- /dev/null
+++ b/manual_jb/config/googleAnalytics.js
@@ -0,0 +1,31 @@
+// analytics.js
+(function() {
+ // Fetch the runtime configuration
+ fetch('config/config.json')
+ .then(response => {
+ if (!response.ok) {
+ throw new Error('Configuration file not found');
+ }
+ return response.json();
+ })
+ .then(config => {
+ if (!config.GA_KEY) {
+ throw new Error('GA_KEY is missing in the configuration');
+ }
+
+ // Create the script tag for Google Tag Manager
+ const scriptTag = document.createElement('script');
+ scriptTag.async = true;
+ scriptTag.src = `https://www.googletagmanager.com/gtag/js?id=${config.GA_KEY}`;
+ document.head.appendChild(scriptTag);
+
+ // Initialize Google Analytics
+ window.dataLayer = window.dataLayer || [];
+
+ function gtag() { dataLayer.push(arguments); }
+
+ gtag('js', new Date());
+ gtag('config', config.GA_KEY);
+ })
+ .catch(error => console.warn('GA setup skipped:', error.message));
+})();
\ No newline at end of file
diff --git a/manual_jb/content/_config.yml b/manual_jb/content/_config.yml
index e613ae35..4d14ef38 100644
--- a/manual_jb/content/_config.yml
+++ b/manual_jb/content/_config.yml
@@ -3,7 +3,7 @@ project: "IN-CORE Manual"
author: IN-CORE Community
copyright: "2023" # Copyright year to be placed in the footer
logo: images/resilience-logo.png
-version: "4.11.0" # Version is not part of JB config, we use it for autobuild of incore-docs
+version: "4.12.0" # Version is not part of JB config, we use it for autobuild of incore-docs
exclude_patterns: [_build, .DS_Store, "**.ipynb_checkpoints"]
repository:
diff --git a/manual_jb/content/_toc.yml b/manual_jb/content/_toc.yml
index 0f95e065..d87649bc 100644
--- a/manual_jb/content/_toc.yml
+++ b/manual_jb/content/_toc.yml
@@ -15,9 +15,13 @@ chapters:
- file: analyses
sections:
- file: analyses/bridge_dmg
+ - file: analyses/building_cluster_recovery
- file: analyses/building_dmg
- file: analyses/building_func
- file: analyses/building_loss
+ - file: analyses/building_nonstructural_dmg
+ - file: analyses/buyout_decision
+ - file: analyses/building_structural_dmg
- file: analyses/capital_shocks
- file: analyses/combined_wind_wave_surge_building_dmg
- file: analyses/combined_wind_wave_surge_building_loss
@@ -28,6 +32,7 @@ chapters:
- file: analyses/epf_restoration
- file: analyses/epn_functionality
- file: analyses/galveston_cge
+ - file: analyses/gas_facility_damage
- file: analyses/housing_household_recovery
- file: analyses/housing_recovery
- file: analyses/housingunitallocation
@@ -35,10 +40,9 @@ chapters:
- file: analyses/joplin_cge
- file: analyses/joplin_empirical_restoration
- file: analyses/mean_dmg
- - file: analyses/mc_failure_prob
+ - file: analyses/mc_limit_state_prob
- file: analyses/multi_retrofit_optimization
- file: analyses/nci_functionality
- - file: analyses/non_structural_building_dmg
- file: analyses/pipeline_dmg
- file: analyses/pipeline_dmg_w_repair_rate
- file: analyses/pipeline_functionality
@@ -51,7 +55,9 @@ chapters:
- file: analyses/slc_cge
- file: analyses/seaside_cge
- file: analyses/social_vulnerability
+ - file: analyses/social_vulnerability_score
- file: analyses/tornadoepn_dmg
+ - file: analyses/traffic_flow_recovery
- file: analyses/transportation_recovery
- file: analyses/waterfacility_dmg
- file: analyses/wfn_functionality
diff --git a/manual_jb/content/analyses.md b/manual_jb/content/analyses.md
index ea5c938a..af62ba5f 100644
--- a/manual_jb/content/analyses.md
+++ b/manual_jb/content/analyses.md
@@ -1,46 +1,51 @@
# Analyses
1. [Bridge damage](analyses/bridge_dmg.md)
-2. [Building damage](analyses/building_dmg)
-3. [Building functionality](analyses/building_func)
+2. [Building cluster recovery](analyses/building_cluster_recovery)
+3. [Building damage](analyses/building_dmg)
4. [Building economic loss](analyses/building_loss)
-5. [Capital shocks](analyses/capital_shocks)
-6. [Combined wind wave surge building damage](analyses/combined_wind_wave_surge_building_dmg)
-7. [Combined wind wave surge building loss](analyses/combined_wind_wave_surge_building_loss)
-8. [Commercial building recovery](analyses/commercial_building_recovery)
-9. [Cumulative building damage](analyses/cumulative_building_dmg)
-10. [Electric power facility damage](analyses/epf_dmg)
-11. [Electric power facility repair cost](analyses/epf_repair_cost)
-12. [Electric power facility restoration](analyses/epf_restoration)
-13. [Electric power network functionality](analyses/epn_functionality)
-14. [Galveston Computable General Equilibrium (CGE)](analyses/galveston_cge.md)
-15. [Household-level housing sequential recovery](analyses/housing_household_recovery)
-16. [Housing recovery](analyses/housing_recovery)
-17. [Housing unit allocation](analyses/housingunitallocation)
-18. [Interdependent Network Design Problem](analyses/indp)
-19. [Joplin Computable General Equilibrium (CGE)](analyses/joplin_cge)
-20. [Joplin empirical building restoration](analyses/joplin_empirical_building_restoration)
-21. [Machine Learning Enabled Computable General Equilibrium (CGE) - Salt Lake City](analyses/ml_slc_cge.md)
-22. [Mean damage](analyses/mean_dmg)
-23. [Monte Carlo failure probability](analyses/mc_failure_prob)
-24. [Multi-objective retrofit optimization](analyses/multi_retrofit_optimization)
-25. [Network cascading interdependency functionality](analyses/nci_functionality)
-26. [Nonstructural building damage](analyses/non_structural_building_dmg)
-27. [Pipeline damage](analyses/pipeline_dmg)
-28. [Pipeline damage with repair rate](analyses/pipeline_dmg_w_repair_rate)
-29. [Pipeline functionality](analyses/pipeline_functionality)
-30. [Pipeline repair cost](analyses/pipeline_repair_cost)
-31. [Pipeline restoration](analyses/pipeline_restoration)
-32. [Population dislocation](analyses/populationdislocation)
-33. [Portfolio recovery](analyses/portfolio_recovery)
-34. [Residential building recovery](analyses/residential_building_recovery)
-35. [Road damage](analyses/road_dmg)
-36. [Salt Lake City Computable General Equilibrium (CGE)](analyses/slc_cge.md)
-37. [Seaside Computable General Equilibrium (CGE)](analyses/seaside_cge)
-38. [Social Vulnerability](analyses/social_vulnerability)
-39. [Tornado electric power network (EPN) damage](analyses/tornadoepn_dmg)
-40. [Transportation recovery](analyses/transportation_recovery)
-41. [Water facility damage](analyses/waterfacility_dmg)
-42. [Water network functionality](analyses/wfn_functionality)
-43. [Water facility repair cost](analyses/water_facility_repair_cost)
-44. [Water facility restoration](analyses/water_facility_restoration)
+5. [Building functionality](analyses/building_func)
+6. [Building nonstructural damage](analyses/building_nonstructural_dmg)
+7. [Building structural damage](analyses/building_structural_dmg)
+8. [Buyout decision](analyses/buyout_decision)
+9. [Capital shocks](analyses/capital_shocks)
+10. [Combined wind wave surge building damage](analyses/combined_wind_wave_surge_building_dmg)
+11. [Combined wind wave surge building loss](analyses/combined_wind_wave_surge_building_loss)
+12. [Commercial building recovery](analyses/commercial_building_recovery)
+13. [Cumulative building damage](analyses/cumulative_building_dmg)
+14. [Electric power facility damage](analyses/epf_dmg)
+15. [Electric power facility repair cost](analyses/epf_repair_cost)
+16. [Electric power facility restoration](analyses/epf_restoration)
+17. [Electric power network functionality](analyses/epn_functionality)
+18. [Galveston Computable General Equilibrium (CGE)](analyses/galveston_cge.md)
+19. [Gas facility damage](analyses/gas_facility_damage)
+20. [Household-level housing sequential recovery](analyses/housing_household_recovery)
+21. [Housing recovery](analyses/housing_recovery)
+22. [Housing unit allocation](analyses/housingunitallocation)
+23. [Interdependent Network Design Problem](analyses/indp)
+24. [Joplin Computable General Equilibrium (CGE)](analyses/joplin_cge)
+25. [Joplin empirical building restoration](analyses/joplin_empirical_building_restoration)
+26. [Machine Learning Enabled Computable General Equilibrium (CGE) - Salt Lake City](analyses/ml_slc_cge.md)
+27. [Mean damage](analyses/mean_dmg)
+28. [Monte Carlo limit state probability](analyses/mc_limit_state_prob)
+29. [Multi-objective retrofit optimization](analyses/multi_retrofit_optimization)
+30. [Network cascading interdependency functionality](analyses/nci_functionality)
+31. [Pipeline damage](analyses/pipeline_dmg)
+32. [Pipeline damage with repair rate](analyses/pipeline_dmg_w_repair_rate)
+33. [Pipeline functionality](analyses/pipeline_functionality)
+34. [Pipeline repair cost](analyses/pipeline_repair_cost)
+35. [Pipeline restoration](analyses/pipeline_restoration)
+36. [Population dislocation](analyses/populationdislocation)
+37. [Residential building recovery](analyses/residential_building_recovery)
+38. [Road damage](analyses/road_dmg)
+39. [Salt Lake City Computable General Equilibrium (CGE)](analyses/slc_cge.md)
+40. [Seaside Computable General Equilibrium (CGE)](analyses/seaside_cge)
+41. [Social Vulnerability](analyses/social_vulnerability)
+42. [Social Vulnerability Score](analyses/social_vulnerability_score)
+43. [Tornado electric power network (EPN) damage](analyses/tornadoepn_dmg)
+44. [Traffic flow recovery](analyses/traffic_flow_recovery)
+45. [Transportation recovery](analyses/transportation_recovery)
+46. [Water facility damage](analyses/waterfacility_dmg)
+47. [Water network functionality](analyses/wfn_functionality)
+48. [Water facility repair cost](analyses/water_facility_repair_cost)
+49. [Water facility restoration](analyses/water_facility_restoration)
diff --git a/manual_jb/content/analyses/portfolio_recovery.md b/manual_jb/content/analyses/building_cluster_recovery.md
similarity index 56%
rename from manual_jb/content/analyses/portfolio_recovery.md
rename to manual_jb/content/analyses/building_cluster_recovery.md
index a5fbbc40..2f91425b 100644
--- a/manual_jb/content/analyses/portfolio_recovery.md
+++ b/manual_jb/content/analyses/building_cluster_recovery.md
@@ -1,8 +1,8 @@
-# Portfolio recovery
+# Building Cluster Recovery
**Description**
-The code creates two output files *building-recovery.csv* and *portfolio-recovery.csv*
+The code creates two output files *building-recovery.csv* and *cluster-recovery.csv*
**Input Parameters**
@@ -29,9 +29,9 @@ key name | type | name | description
**Output Datasets**
-key name | type | name | description
---- | --- | --- | ---
-`result` * | [`incore:portfolioRecovery`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:portfolioRecovery) | Results | A dataset containing results (format: CSV).
+key name | type | name | description
+--- |--------------------------------------------------------------------------------------------------------------| --- | ---
+`result` * | [`incore:clusterRecovery`](https://incore.ncsa.illinois.
edu/semantics/api/types/incore:clusterRecovery) | Results | A dataset containing results (format: CSV).
(* required)
@@ -41,25 +41,25 @@ code snippet:
```
# Create instance
- bldg_portfolio_recovery = BuildingPortfolioRecoveryAnalysis(client)
+ bldg_cluster_recovery = BuildingClusterRecovery(client)
# Load input datasets
- bldg_portfolio_recovery.load_remote_input_dataset("building_data", bldg_data_dataset)
- bldg_portfolio_recovery.load_remote_input_dataset("occupancy_mapping", occupancy_dataset)
- bldg_portfolio_recovery.load_remote_input_dataset("building_damage", bldg_damage_dataset)
- bldg_portfolio_recovery.load_remote_input_dataset("dmg_ratios", mean_repair_dataset)
- bldg_portfolio_recovery.load_remote_input_dataset("utility", utility_dataset)
- bldg_portfolio_recovery.load_remote_input_dataset("utility_partial", utility_partial_dataset)
- bldg_portfolio_recovery.load_remote_input_dataset("coefFL", coefFL_dataset)
+ bldg_cluster_recovery.load_remote_input_dataset("building_data", bldg_data_dataset)
+ bldg_cluster_recovery.load_remote_input_dataset("occupancy_mapping", occupancy_dataset)
+ bldg_cluster_recovery.load_remote_input_dataset("building_damage", bldg_damage_dataset)
+ bldg_cluster_recovery.load_remote_input_dataset("dmg_ratios", mean_repair_dataset)
+ bldg_cluster_recovery.load_remote_input_dataset("utility", utility_dataset)
+ bldg_cluster_recovery.load_remote_input_dataset("utility_partial", utility_partial_dataset)
+ bldg_cluster_recovery.load_remote_input_dataset("coefFL", coefFL_dataset)
# Set parameters
- bldg_portfolio_recovery.set_parameter("uncertainty", True)
- bldg_portfolio_recovery.set_parameter("sample_size", 35) # default none. Gets size form input dataset
- bldg_portfolio_recovery.set_parameter("random_sample_size", 50) # default 10000
- bldg_portfolio_recovery.set_parameter("no_of_weeks", 100) # default 250
+ bldg_cluster_recovery.set_parameter("uncertainty", True)
+ bldg_cluster_recovery.set_parameter("sample_size", 35) # default none. Gets size form input dataset
+ bldg_cluster_recovery.set_parameter("random_sample_size", 50) # default 10000
+ bldg_cluster_recovery.set_parameter("no_of_weeks", 100) # default 250
- # Creates two output files building-recovery.csv and portfolio-recovery.csv
- bldg_portfolio_recovery.run_analysis()
+ # Creates two output files building-recovery.csv and cluster-recovery.csv
+ bldg_cluster_recovery.run_analysis()
```
-full analysis: [portfolio_recovery.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/portfolio_recovery.ipynb)
\ No newline at end of file
+full analysis: [building_cluster_recovery.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/building_cluster_recovery.ipynb)
diff --git a/manual_jb/content/analyses/building_func.md b/manual_jb/content/analyses/building_func.md
index 94675b5f..2dd119ba 100644
--- a/manual_jb/content/analyses/building_func.md
+++ b/manual_jb/content/analyses/building_func.md
@@ -11,7 +11,7 @@ If both the nearest pole to the building and the substation where buildings belo
functional, buildings are considered to be able to receive electric power.
The outputs of this analysis are 1) a CSV file with probabilities of functionality samples
-for direct comparison with [MC failure probability](mc_failure_prob) outputs and
+for direct comparison with [MC limit state probability](mc_limit_state_prob) outputs and
2) a CSV file with probabilities of functionality.
**Input parameters**
diff --git a/manual_jb/content/analyses/building_nonstructural_dmg.md b/manual_jb/content/analyses/building_nonstructural_dmg.md
new file mode 100644
index 00000000..ff09e124
--- /dev/null
+++ b/manual_jb/content/analyses/building_nonstructural_dmg.md
@@ -0,0 +1,92 @@
+# Building Nonstructural Damage
+
+**Description**
+
+This analysis computes the non-structural damage to buildings based on a particular hazard. Currently, supported
+hazard is: **earthquake**, **flood**, and **hurricane**.
+
+The process is similar to evaluating other structural damages. The probabilities for building damage
+state are obtained using fragility curves and a hazard definition, each building site will have
+a specific PGA (Peak Ground Acceleration), a measurement of an earthquake hazard for each scenario.
+Liquefaction effect, which is defined as a change in stress condition, in which material that is ordinarily
+a solid behaves like a liquid can be considered as well. The LMF (Liquefaction Modification Factor)
+values are implemented as multiplication factors to the median fragility values and they must be present
+in the dataset. This analysis can support various types of fragility curves assigned to the building:
+e.g.acceleration-sensitive (AS) and drift-sensitive (DS).
+
+The code covers Normal and LogNormal fragilities with 3 limit states and creates an output CSV file
+with corresponding damage states. The second output file is a JSON with information about hazard and fragilities.
+
+**Input Parameters**
+
+ key name | type | name | description
+----------------------------|--------|-----------------|---------------------------------------------------------------------
+ `result_name` * | `str` | Result name | Name of the result dataset.
+ `hazard_type` | `str` | Hazard type | Hazard type (earthquake, flood, hurricane).
+ `hazard_id` | `str` | Hazard id | ID of the hazard from the Hazard service.
+ `fragility_key` | `str` | Fragility Key | Fragility key used in mapping dataset.
+ `use_liquefation` | `bool` | Liquefaction | Use liquefaction, if applicable to the hazard.
Default *False*.
+ `liq_geology_dataset_id` | `str` | Liquefaction id | A liquefaction susceptibility dataset.
+ `use_hazard_uncertainty` | `bool` | Uncertainty | Use hazard uncertainty. Default is
*False*.
+ `num_cpu` | `int` | Number of CPUs | Number of CPUs used for parallel computations.
Default *1*.
+
+**Input Hazards**
+
+ key name | type | name | description
+----------|----------------------------------------|--------|-------------------------------------------------------------
+ `hazard` | `earthquake`
`flood`
`hurricane` | Hazard | Supported hazard object for using local and remote hazards.
+
+**Input Datasets**
+
+ key name | type | name | description
+---------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------|---------------------
+ `buildings` * | [`ergo:buildingInventoryVer4`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer4)
[`ergo:buildingInventoryVer5`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer5)
[`ergo:buildingInventoryVer6`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer6)
[`ergo:buildingInventoryVer7`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer7) | Building dataset | A building dataset.
+ `dfr3_mapping_set` * | [`incore:dfr3MappingSet`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:dfr3MappingSet) | DFR3 Mapping Set | DFR3 Mapping Set.
+
+**Output datasets**
+
+ key name | type | parent key | name | description
+------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------|-------------|---------|-------------------------------------------------------------------------
+ `result` * | [`ergo:nsBuildingInventoryDamageVer4`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:nsBuildingInventoryDamageVer4) | `buildings` | Results | A dataset containing results
(format: CSV).
+ `damage_result` * | [`incore:nsBuildingInventoryDamageSupplement`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:nsBuildingInventoryDamageSupplement) | `buildings` | Results | Information about applied hazard value and fragility
(format: JSON).
+
+(* required)
+
+**Execution**
+
+code snippet:
+
+```
+ # Create an instance
+ non_structural_building_dmg = NonStructBuildingDamage(client)
+
+ # Load input datasets
+ non_structural_building_dmg.load_remote_input_dataset("buildings", building_dataset_id)
+
+ # Load fragility mapping
+ fragility_service = FragilityService(client)
+ mapping_set = MappingSet(fragility_service.get_mapping(mapping_id))
+ non_structural_building_dmg.set_input_dataset('dfr3_mapping_set', mapping_set)
+
+ # Specify the result name
+ result_name = "non_structural_building_dmg_result"
+
+ # Set analysis parameters
+ non_structural_building_dmg.set_parameter("result_name", result_name)
+ non_structural_building_dmg.set_parameter("hazard_type", hazard_type)
+ non_structural_building_dmg.set_parameter("hazard_id", hazard_id)
+ non_structural_building_dmg.set_parameter("fragility_key", NonStructBuildingUtil.DEFAULT_FRAGILITY_KEY_AS)
+ non_structural_building_dmg.set_parameter("num_cpu", 4)
+
+ # Shelby County Liquefaction Susceptibility
+ use_liquefaction = True
+ liq_geology_dataset_id = "5a284f55c7d30d13bc0824ba"
+
+ non_structural_building_dmg.set_parameter("use_liquefaction", use_liquefaction)
+ non_structural_building_dmg.set_parameter("liq_geology_dataset_id", liq_geology_dataset_id)
+
+ non_structural_building_dmg.run_analysis()
+```
+
+full
+analysis: [building_nonstructural_dmg.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/building_nonstructural_dmg.ipynb)
diff --git a/manual_jb/content/analyses/building_structural_dmg.md b/manual_jb/content/analyses/building_structural_dmg.md
new file mode 100644
index 00000000..b280dc67
--- /dev/null
+++ b/manual_jb/content/analyses/building_structural_dmg.md
@@ -0,0 +1,83 @@
+# Building structural damage
+
+**Description**
+
+This analysis computes building structural damage based on a particular hazard. Currently supported hazards are: **earthquake**,
+**tsunami**, **tornado**, **hurricane** and **flood**.
+
+The process for computing the structural damage is similar to other parts of the built environment. First, a fragility
+is obtained based on the hazard type and attributes of the building. Based on the fragility, the hazard intensity at the
+location of the building is computed. Using this information, the probability of exceeding each limit state is computed,
+along with the probability of damage. For the case of an earthquake hazard, soil information can be used to
+modify the damage probabilities to include damage due to liquefaction.
+
+The outputs of this analysis are CSV file with probabilities of damage and JSON file with information about hazard and fragilities.
+
+**Input parameters**
+
+key name | type | name | description
+--- | --- | --- | ---
+`result_name` * | `str` | Result name | Name of the result dataset.
+`hazard_type` * | `str` | Hazard type | Hazard type (earthquake, tsunami, tornado, hurricaneWindfields).
+`hazard_id` * | `str` | Hazard id | ID of the hazard from the Hazard service.
+`fragility_key` | `str` | Fragility key | Fragility key used in mapping dataset.
+`use_liquefaction` | `bool` | Liquefaction | Use liquefaction, if applicable to the hazard.
Default is *False*.
+`use_hazard_uncertainty` | `bool` | Uncertainty | Use hazard uncertainty.
Default is *False*.
+`seed` * | `int` | Seed | Initial value to seed the random number generator.
+`num_cpu` | `int` | Number of CPUs | Number of CPUs used for parallel computation.
Default is *1*.
+`liquefaction_geology_dataset_id` | `str` | Liquefaction id | Liquefaction geology/susceptibility dataset id.
If not provided, liquefaction will be ignored.
+
+**Input datasets**
+
+key name | type | name | description
+--- | --- | --- | ---
+`buildings` * | [`ergo:buildingInventoryVer4`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer4)
[`ergo:buildingInventoryVer5`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer5)
[`ergo:buildingInventoryVer6`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer6)
[`ergo:buildingInventoryVer7`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer7) | Building dataset | A building dataset.
+`dfr3_mapping_set` * | [`incore:dfr3MappingSet`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:dfr3MappingSet) | DFR3 Mapping Set | DFR3 Mapping Set.
+`retrofit_strategy` | [`incore:retrofitStrategy`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:retrofitStrategy) | Retrofit Strategy | Building retrofit strategy that contains guid and retrofit method.
+
+**Input Hazards**
+
+key name | type | name | description
+--- |------------------------------------------------------------------|---------------| ---
+`hazard` | `earthquake`
`tornado`
`hurricane`
`flood`
`tsunami` | Hazard | Supported hazard object for using local and remote hazards.
+
+
+**Output datasets**
+
+key name | type | parent key | name | description
+--- | --- | --- | --- | ---
+`ds_result` * | [`ergo:buildingDamageVer5`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingDamageVer5) | `buildings` | Results | A dataset containing results
(format: CSV).
+`damage_result` * | [`incore:buildingDamageSupplement`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:buildingDamageSupplement) | `buildings` | Results | Information about applied hazard value and fragility
(format: JSON).
+
+(* required)
+
+**Execution**
+
+code snippet:
+
+```
+ # Create building structural damage
+ bldg_dmg = BuildingStructuralDamage(client)
+
+ # Load input dataset
+ bldg_dmg.load_remote_input_dataset("buildings", bldg_dataset_id)
+
+ # Load fragility mapping
+ fragility_service = FragilityService(client)
+ mapping_set = MappingSet(fragility_service.get_mapping(mapping_id))
+ bldg_dmg.set_input_dataset('dfr3_mapping_set', mapping_set)
+
+ # Specify the result name
+ result_name = "memphis_bldg_dmg_result"
+
+ # Set analysis parameters
+ bldg_dmg.set_parameter("result_name", result_name)
+ bldg_dmg.set_parameter("hazard_type", hazard_type)
+ bldg_dmg.set_parameter("hazard_id", hazard_id)
+ bldg_dmg.set_parameter("num_cpu", 10)
+
+ # Run building structural damage analysis
+ bldg_dmg.run_analysis()
+```
+
+full analysis: [building_structural_dmg.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/building_structural_dmg.ipynb)
diff --git a/manual_jb/content/analyses/buyout_decision.md b/manual_jb/content/analyses/buyout_decision.md
new file mode 100644
index 00000000..2ddbcf71
--- /dev/null
+++ b/manual_jb/content/analyses/buyout_decision.md
@@ -0,0 +1,67 @@
+# Buyout Decision
+
+**Description**
+
+This analysis helps identify candidate properties for buyout and allows practitioners and researchers to evaluate
+the potential equity outcomes of their selection under different scenario events.
+
+The outputs of this analysis is a CSV file with buildings to consider for buyout based on the set criteria. This can assist local
+practitioners to identify candidate properties for buyout selection and allows practitioners and researchers to evaluate the potential equity
+outcomes of their selection
+
+**Input parameters**
+
+key name | type | name | description
+--- |-------------------------|-------------------------| ---
+`result_name` * | `str` | Result name | Name of the result dataset.
+`residential_archetypes` * | `list` | Residential archtetypes | Residential archetypes to consider for buyout.
+`fema_buyout_cap` * | `float` | FEMA buyout cap | FEMA buyout cap is the maximum appraised value considered for buyout.
+
+**Input datasets**
+
+key name | type | name | description
+--- |-------------------------|-------------------------| ---
+`buildings` * | [`ergo:buildingInventoryVer4`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer4)
[`ergo:buildingInventoryVer5`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer5)
[`ergo:buildingInventoryVer6`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer6)
[`ergo:buildingInventoryVer7`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer7) | Building dataset | A building dataset.
+`past_building_damage` * | [`ergo:buildingDamageVer6`](https://incore.ncsa.illinois.
edu/semantics/api/types/ergo:buildingDamageVer6) | Previous Building damage | Building damage from a previous event.
+`future_building_damage` * | [`ergo:buildingDamageVer6`](https://incore.ncsa.illinois.
edu/semantics/api/types/ergo:buildingDamageVer6) | Future/predicted Building damage | Building damage from a future/predicted event.
+`housing_unit_allocation` * | [`incore:housingUnitAllocation`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:housingUnitAllocation) | Housing unit allocation | Housing unit allocation.
+`population_dislocation` * | [`incore:popDislocation`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:popDislocation) | Population dislocation | Population dislocation results.
+
+
+**Output datasets**
+
+key name | type | parent key | name | description
+--- |-------------------------------------------------------------------------------------------------------| --- | --- | ---
+`result` * | [`incore:buyoutDecision`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:buyoutDecision) | `buildings` | Results | A dataset containing buyout decision results
(format: CSV).
+
+(* required)
+
+**Execution**
+
+code snippet:
+
+```
+ # Create buyout analysis
+ buyout_decision = BuyoutDecision(client)
+
+ # Load input datasets
+ buyout_decision.load_remote_input_dataset("buildings", buildings_id)
+ buyout_decision.load_remote_input_dataset("housing_unit_allocation", hua_id)
+ buyout_decision.load_remote_input_dataset("past_building_damage", past_building_damage_id)
+ buyout_decision.load_remote_input_dataset("future_building_damage", future_building_damage_id)
+ buyout_decision.load_remote_input_dataset("population_dislocation", past_pop_dislocation_id)
+
+
+ # Specify the result name
+ result_name = "galveston_buyout_result"
+
+ # Set analysis parameters
+ buyout_decision.set_parameter("fema_buyout_cap", fema_buyout_cap)
+ buyout_decision.set_parameter("residential_archetypes", residential_archetypes)
+ buyout_decision.set_parameter("result_name", result_name)
+
+ # Run buyout analysis
+ buyout_decision.run_analysis()
+```
+
+full analysis: [buyout_decision.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/buyout_decision.ipynb)
diff --git a/manual_jb/content/analyses/combined_wind_wave_surge_building_dmg.md b/manual_jb/content/analyses/combined_wind_wave_surge_building_dmg.md
index 060bff0d..e7488157 100644
--- a/manual_jb/content/analyses/combined_wind_wave_surge_building_dmg.md
+++ b/manual_jb/content/analyses/combined_wind_wave_surge_building_dmg.md
@@ -18,7 +18,7 @@ key name | type | name | description
--- | --- | --- | ---
`wind_damage` * | [`ergo:buildingDamageVer6`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingDamageVer6) | Building wind damage | A building wind damage dataset.
`surge_wave_damage` * | [`ergo:buildingDamageVer6`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingDamageVer6) | Building surge-wave damage | A building surge-wave damage dataset.
-`flood_damage` * | [`ergo:buildingDamageVer6`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingDamageVer6) | Building flood damage | A building flood damage dataset.
+`flood_damage` * | [`ergo:nsBuildingInventoryDamageVer4`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:nsBuildingInventoryDamageVer4) | Building flood damage | A building flood damage dataset.
**Output datasets**
@@ -46,7 +46,7 @@ code snippet:
w_bldg_dmg.run_analysis()
# Create flood building damage
- f_bldg_dmg = BuildingDamage(client)
+ f_bldg_dmg = NonStructBuildingDamage(client)
# Run building damage analysis
f_bldg_dmg.run_analysis()
diff --git a/manual_jb/content/analyses/combined_wind_wave_surge_building_loss.md b/manual_jb/content/analyses/combined_wind_wave_surge_building_loss.md
index 8d347fd1..8dde011b 100644
--- a/manual_jb/content/analyses/combined_wind_wave_surge_building_loss.md
+++ b/manual_jb/content/analyses/combined_wind_wave_surge_building_loss.md
@@ -20,7 +20,7 @@ key name | type | name | description
`buildings` * | [`ergo:buildingInventoryVer4`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer4)
[`ergo:buildingInventoryVer5`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer5)
[`ergo:buildingInventoryVer6`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer6)
[`ergo:buildingInventoryVer7`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer7) | Building dataset | A building dataset.
`wind_damage` * | [`ergo:buildingDamageVer6`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingDamageVer6) | Building wind damage | A building wind damage dataset.
`surge_wave_damage` * | [`ergo:buildingDamageVer6`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingDamageVer6) | Building surge-wave damage | A building surge-wave damage dataset.
-`flood_damage` * | [`ergo:buildingDamageVer6`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingDamageVer6) | Building flood damage | A building flood damage dataset.
+`flood_damage` * | [`ergo:nsBuildingInventoryDamageVer4`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:nsBuildingInventoryDamageVer4) | Building flood damage | A building flood damage dataset.
`structural_cost` * | [`incore:structuralCostRatio`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:structuralCostRatio) | Building structural cost ratios | A dataset with building structural cost ratios for each archetype.
`content_cost` * | [`incore:contentCostRatio`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:contentCostRatio) | Building content cost ratios | A dataset with building content cost ratios for each damage state.
@@ -49,7 +49,7 @@ code snippet:
w_bldg_dmg.run_analysis()
# Create flood building damage
- f_bldg_dmg = BuildingDamage(client)
+ f_bldg_dmg = NonStructBuildingDamage(client)
# Run building damage analysis
f_bldg_dmg.run_analysis()
diff --git a/manual_jb/content/analyses/gas_facility_damage.md b/manual_jb/content/analyses/gas_facility_damage.md
new file mode 100644
index 00000000..b5c4c131
--- /dev/null
+++ b/manual_jb/content/analyses/gas_facility_damage.md
@@ -0,0 +1,86 @@
+# Gas facility damage
+
+**Description**
+
+This analysis computes gas facility damage based on a particular hazard. Currently supported hazards are:
+**earthquake**.
+
+The process for computing the structural damage is similar to other parts of the built environment. First, a fragility
+is obtained based on the hazard type and attributes of the building. Based on the fragility, the hazard intensity at the
+location of the building is computed. Using this information, the probability of exceeding each limit state is computed,
+along with the probability of damage. For the case of an earthquake hazard, soil information can be used to
+modify the damage probabilities to include damage due to liquefaction.
+
+The outputs of this analysis are CSV file with probabilities of damage and JSON file with information about hazard and fragilities.
+
+**Input Parameters**
+
+key name | type | name | description
+--- | --- | --- | ---
+`result_name` * | `str` | Result name | Name of the result dataset.
+`hazard_type` | `str` | Hazard type | Hazard type (earthquake).
+`hazard_id` | `str` | Hazard id | ID of the hazard from the Hazard service.
+`fragility_key` | `str` | Fragility key | Fragility key used in mapping dataset.
+`use_liquefaction` | `bool` | Liquefaction | Use liquefaction, if applicable to the hazard. Default is
*False*.
+`use_hazard_uncertainty` | `bool` | Uncertainty | Use hazard uncertainty.
+`liquefaction_geology_dataset_id` | `str` | Liquefaction id | Liquefaction susceptibility dataset.
+`liquefaction_fragility_key` | `str` | Fragility key | Fragility key used in mapping dataset.
+`num_cpu` | `int` | Number of CPUs | Number of CPUs used for parallel computations.
Default *1*.
+
+**Input Hazards**
+
+key name | type | name | description
+--- |-------|---------------| ---
+`hazard` | `earthquake` | Hazard | Supported hazard object for using local and remote hazards.
+
+**Input Datasets**
+
+key name | type | name | description
+--- |---------------------------------------------------------------------------------------------------------------| --- | ---
+`gas_facilities` * | [`ergo:gasFacilityInventory`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:gasFacilityInventory) | Facility dataset | A gas facility dataset.
+`dfr3_mapping_set` * | [`incore:dfr3MappingSet`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:dfr3MappingSet) | DFR3 Mapping Set | DFR3 Mapping Set.
+
+**Output datasets**
+
+key name | type | parent key | name | description
+--- |---------------------------------------------------------------------------------------------------------------------------------|------------------| --- | ---
+`result` * | [`ergo:gasFacilityInventoryDamage`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:gasFacilityInventoryDamage) | `gas_facilities` | Results | A dataset containing limit state and damage state probabilities
(format: CSV).
+`metadata` * | [`incore:gasFacilityDamageSupplement`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:gasFacilityDamageSupplement) | `gas_facilities` | Results | Information about applied hazard value and fragility for each facility
(format: JSON).
+
+(* required)
+
+**Execution**
+
+code snippet:
+
+```
+ # Create gas facility damage analysis
+ gf_dmg = GasFacilityDamage(client)
+
+ # Load gas facility inventory dataset
+ gf_dmg.load_remote_input_dataset("gas_facilities", facility_dataset_id)
+
+ # Load fragility mapping
+ fragility_service = FragilityService(client)
+ mapping_set = MappingSet(fragility_service.get_mapping(mapping_id))
+ gf_dmg.set_input_dataset('dfr3_mapping_set', mapping_set)
+
+ # Specify result name
+ result_name = "gf-dmg-results"
+
+ # Set analysis parameters
+ gf_dmg.set_parameter("result_name", result_name)
+ gf_dmg.set_parameter("hazard_type", hazard_type)
+ gf_dmg.set_parameter("hazard_id", hazard_id)
+ gf_dmg.set_parameter("fragility_key", fragility_key)
+ gf_dmg.set_parameter("use_liquefaction", liquefaction)
+ gf_dmg.set_parameter("liquefaction_geology_dataset_id", liq_geology_dataset_id)
+ gf_dmg.set_parameter("liquefaction_fragility_key", liq_fragility_key)
+ gf_dmg.set_parameter("use_hazard_uncertainty", uncertainty)
+ gf_dmg.set_parameter("num_cpu", 4)
+
+ # Run gas facility damage analysis
+ gf_dmg.run_analysis()
+```
+
+full analysis: [gas_facility_dmg.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/gas_facility_dmg.ipynb)
diff --git a/manual_jb/content/analyses/joplin_empirical_building_restoration.md b/manual_jb/content/analyses/joplin_empirical_building_restoration.md
index a4cc0ee3..e0db68cc 100644
--- a/manual_jb/content/analyses/joplin_empirical_building_restoration.md
+++ b/manual_jb/content/analyses/joplin_empirical_building_restoration.md
@@ -32,10 +32,10 @@ key name | type | name | description
**Input datasets**
-key name | type | name | description
---- | --- | --- | ---
-`buildings` * | [`ergo:buildingInventoryVer4`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer4)
[`ergo:buildingInventoryVer5`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer5)
[`ergo:buildingInventoryVer6`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer6)
[`ergo:buildingInventoryVer7`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer7) | Building dataset | A building dataset.
-`building_dmg` * | [`ergo:buildingDamageVer4`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingDamageVer4)
[`ergo:buildingDamageVer5`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingDamageVer5)
[`ergo:nsBuildingInventoryDamage`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:nsBuildingInventoryDamage)
[`ergo:nsBuildingInventoryDamageVer2`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:nsBuildingInventoryDamageVer2) | Building damage | A building damage dataset.
+key name | type | name | description
+--- |------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| --- | ---
+`buildings` * | [`ergo:buildingInventoryVer4`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer4)
[`ergo:buildingInventoryVer5`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer5)
[`ergo:buildingInventoryVer6`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer6)
[`ergo:buildingInventoryVer7`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer7) | Building dataset | A building dataset.
+`building_dmg` * | [`ergo:buildingDamageVer4`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingDamageVer4)
[`ergo:buildingDamageVer5`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingDamageVer5)
[`ergo:nsBuildingInventoryDamage`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:nsBuildingInventoryDamage)
[`ergo:nsBuildingInventoryDamageVer2`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:nsBuildingInventoryDamageVer2)
[`ergo:nsBuildingInventoryDamageVer3`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:nsBuildingInventoryDamageVer3)
[`ergo:nsBuildingInventoryDamageVer4`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:nsBuildingInventoryDamageVer4) | Building damage | A building damage dataset.
**Output datasets**
diff --git a/manual_jb/content/analyses/mc_failure_prob.md b/manual_jb/content/analyses/mc_failure_prob.md
deleted file mode 100644
index 2dbc3e4d..00000000
--- a/manual_jb/content/analyses/mc_failure_prob.md
+++ /dev/null
@@ -1,62 +0,0 @@
-# Monte Carlo failure probability
-
-**Description**
-
-This analysis calculates a probability of failure using a stochastic process. Failure probability and Failure state are derived
-using the dictionary of failed damage states in the input infrastructure dataset. Failure probability is calculated from all
-stochastic runs, failure state shows all infrastructure standings as a string of *failed* (0) and *not failed* (1) states
-of each individual run.
-
-The output of this analysis are two CSV files; a failure proability *base_name*_failure_probability.csv with allocated house units
-and *base_name*_failure_state.csv.
-
-**Input Parameters**
-
-key name | type | name | description
---- | --- | --- | ---
-`result_name` * | `str` | Result name | Name of the result dataset.
-`num_samples` * | `int` | Samples | Number of Monte Carlo samples.
-`damage_interval_keys` * | `List[str]` | Damage keys | Column names of the damage intervals.
-`failure_state_keys` * | `List[str]` | Failure keys | Column names of damage intervals.
-`num_cpu` | `int` | Number of CPUs | Number of CPUs used for parallel computations.
Default *1*.
-`seed` | `int` | Seed | Initial seed for the probabilistic model to ensure reproducibility.
-
-**Input Datasets**
-
-key name | type | name | description
---- | --- | --- | ---
-`damage` * | [`ergo:buildingDamageVer4`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingDamageVer4)
[`ergo:buildingDamageVer5`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingDamageVer5)
[`ergo:bridgeDamage`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:bridgeDamage)
[`ergo:bridgeDamageVer2`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:bridgeDamageVer2)
[`incore:epfDamage`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:epfDamage)
[`incore:epfDamageVer2`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:epfDamageVer2)
[`ergo:nsBuildingInventoryDamage`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:nsBuildingInventoryDamage)
[`ergo:nsBuildingInventoryDamageVer2`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:nsBuildingInventoryDamageVer2)
[`incore:pipelineDamage`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:pipelineDamage)
[`incore:pipelineDamageVer2`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:pipelineDamageVer2)
[`ergo:roadDamage`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:roadDamage)
[`ergo:roadDamageVer2`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:roadDamageVer2)
[`ergo:waterFacilityDamageVer4`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:waterFacilityDamageVer4)
[`ergo:waterFacilityDamageVer5`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:waterFacilityDamageVer5) | Infrastructure damage | A file with infrastructure damage intervals.
-
-**Output Datasets**
-
-key name | type | name | description
---- | --- | --- | ---
-`failure_probability` * | [`incore:failureProbability`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:failureProbability) | Results | A dataset containing failure probability results
(format: CSV).
-`sample_failure_state` * | [`incore:sampleFailureState`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:sampleFailureState) | Results | A dataset containing failure state for each sample
(format: CSV).
-
-(* required)
-
-**Execution**
-
-code snippet:
-
-```
- # Create instance
- mc = MonteCarloFailureProbability(client)
-
- # Load remote datasets
- mc.load_remote_input_dataset("damage", damage_id)
-
- # Set analysis parameters
- mc.set_parameter("result_name", "mc_failure_probability")
- mc.set_parameter("num_cpu", 8)
- mc.set_parameter("num_samples", 10)
- mc.set_parameter("damage_interval_keys", ["DS_0", "DS_1", "DS_2", "DS_3"])
- mc.set_parameter("failure_state_keys", ["DS_1", "DS_2", "DS_3"])
- mc.set_parameter("seed", 1111)
-
- # Run Monte Carlo failure
- mc.run_analysis()
-```
-
-full analysis: [mc_failure_prob.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/mc_failure_prob.ipynb)
diff --git a/manual_jb/content/analyses/mc_limit_state_prob.md b/manual_jb/content/analyses/mc_limit_state_prob.md
new file mode 100644
index 00000000..605e6fb3
--- /dev/null
+++ b/manual_jb/content/analyses/mc_limit_state_prob.md
@@ -0,0 +1,63 @@
+# Monte Carlo Limit State Probability
+
+**Description**
+
+This analysis calculates a probability of limit state using a stochastic process. Limit state probability and
+damage state are derived using the dictionary of failed damage states in the input infrastructure dataset. Limit state
+probability is calculated from all stochastic runs, limit state shows all infrastructure standings as a string of *failed* (0) and *not failed* (1) states
+of each individual run.
+
+The output of this analysis are two CSV files; a limit state proability *base_name*_failure_probability.csv with allocated house units
+and *base_name*_failure_state.csv.
+
+**Input Parameters**
+
+key name | type | name | description
+--- | --- | --- | ---
+`result_name` * | `str` | Result name | Name of the result dataset.
+`num_samples` * | `int` | Samples | Number of Monte Carlo samples.
+`damage_interval_keys` * | `List[str]` | Damage keys | Column names of the damage intervals.
+`failure_state_keys` * | `List[str]` | Failure keys | Column names of damage intervals.
+`num_cpu` | `int` | Number of CPUs | Number of CPUs used for parallel computations.
Default *1*.
+`seed` | `int` | Seed | Initial seed for the probabilistic model to ensure reproducibility.
+
+**Input Datasets**
+
+key name | type | name | description
+--- |------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| --- | ---
+`damage` * | [`ergo:buildingDamageVer4`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingDamageVer4)
[`ergo:buildingDamageVer5`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingDamageVer5)
[`ergo:bridgeDamage`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:bridgeDamage)
[`ergo:bridgeDamageVer2`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:bridgeDamageVer2)
[`incore:epfDamage`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:epfDamage)
[`incore:epfDamageVer2`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:epfDamageVer2)
[`ergo:nsBuildingInventoryDamage`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:nsBuildingInventoryDamage)
[`ergo:nsBuildingInventoryDamageVer2`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:nsBuildingInventoryDamageVer2)
[`ergo:nsBuildingInventoryDamageVer3`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:nsBuildingInventoryDamageVer3)
[`ergo:nsBuildingInventoryDamageVer4`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:nsBuildingInventoryDamageVer4)
[`incore:pipelineDamage`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:pipelineDamage)
[`incore:pipelineDamageVer2`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:pipelineDamageVer2)
[`ergo:roadDamage`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:roadDamage)
[`ergo:roadDamageVer2`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:roadDamageVer2)
[`ergo:waterFacilityDamageVer4`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:waterFacilityDamageVer4)
[`ergo:waterFacilityDamageVer5`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:waterFacilityDamageVer5) | Infrastructure damage | A file with infrastructure damage intervals.
+
+**Output Datasets**
+
+key name | type | name | description
+--- | --- | --- | ---
+`failure_probability` * | [`incore:failureProbability`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:failureProbability) | Results | A dataset containing failure probability results
(format: CSV).
+`sample_failure_state` * | [`incore:sampleFailureState`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:sampleFailureState) | Results | A dataset containing failure state for each sample
(format: CSV).
+`sample_damage_states` * | [`incore:sampleDamageState`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:sampleDamageState) | Results | A dataset containing damage state for each sample
(format: CSV).
+
+(* required)
+
+**Execution**
+
+code snippet:
+
+```
+ # Create instance
+ mc = MonteCarloLimitStateProbability(client)
+
+ # Load remote datasets
+ mc.load_remote_input_dataset("damage", damage_id)
+
+ # Set analysis parameters
+ mc.set_parameter("result_name", "mc_limit_state_probability")
+ mc.set_parameter("num_cpu", 8)
+ mc.set_parameter("num_samples", 10)
+ mc.set_parameter("damage_interval_keys", ["DS_0", "DS_1", "DS_2", "DS_3"])
+ mc.set_parameter("failure_state_keys", ["DS_1", "DS_2", "DS_3"])
+ mc.set_parameter("seed", 1111)
+
+ # Run Monte Carlo limit state analysis
+ mc.run_analysis()
+```
+
+full analysis: [mc_limit_state_prob.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/mc_limit_state_prob.ipynb)
diff --git a/manual_jb/content/analyses/mean_dmg.md b/manual_jb/content/analyses/mean_dmg.md
index 1520c3ea..14e32c5f 100644
--- a/manual_jb/content/analyses/mean_dmg.md
+++ b/manual_jb/content/analyses/mean_dmg.md
@@ -18,10 +18,10 @@ key name | type | name | description
**Input datasets**
-key name | type | name | description
---- | --- | --- | ---
-`damage` * | [`ergo:buildingDamageVer4`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingDamageVer4)
[`ergo:buildingDamageVer5`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingDamageVer5)
[`ergo:nsBuildingInventoryDamage`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:nsBuildingInventoryDamage)
[`ergo:nsBuildingInventoryDamageVer2`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:nsBuildingInventoryDamageVer2)
[`ergo:bridgeDamage`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:bridgeDamage)
[`ergo:bridgeDamageVer2`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:bridgeDamageVer2)
[`ergo:waterFacilityDamageVer4`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:waterFacilityDamageVer4)
[`ergo:waterFacilityDamageVer5`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:waterFacilityDamageVer5)
[`ergo:roadDamage`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:roadDamage)
[`ergo:roadDamageVer2`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:roadDamageVer2)
[`incore:epfDamage`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:epfDamage)
[`incore:epfDamageVer2`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:epfDamageVer2)
[`incore:pipelineDamage`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:pipelineDamage)
[`incore:pipelineDamageVer2`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:pipelineDamageVer2)| Infrastructure dataset | An infrastructure dataset.
-`dmg_ratios` * | [`ergo:buildingDamageRatios`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingDamageRatios)
[`ergo:bridgeDamageRatios`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:bridgeDamageRatios)
[`ergo:buildingContentDamageRatios`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingContentDamageRatios)
[`ergo:buildingASDamageRatios`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingASDamageRatios)
[`ergo:buildingDSDamageRatios`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingDSDamageRatios)
[`ergo:roadDamageRatios`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:roadDamageRatios) | Damage ratios | A damage ratios dataset.
+key name | type | name | description
+--- |------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| --- | ---
+`damage` * | [`ergo:buildingDamageVer4`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingDamageVer4)
[`ergo:buildingDamageVer5`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingDamageVer5)
[`ergo:nsBuildingInventoryDamage`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:nsBuildingInventoryDamage)
[`ergo:nsBuildingInventoryDamageVer2`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:nsBuildingInventoryDamageVer2)
[`ergo:nsBuildingInventoryDamageVer3`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:nsBuildingInventoryDamageVer3)
[`ergo:nsBuildingInventoryDamageVer4`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:nsBuildingInventoryDamageVer4)
[`ergo:bridgeDamage`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:bridgeDamage)
[`ergo:bridgeDamageVer2`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:bridgeDamageVer2)
[`ergo:waterFacilityDamageVer4`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:waterFacilityDamageVer4)
[`ergo:waterFacilityDamageVer5`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:waterFacilityDamageVer5)
[`ergo:roadDamage`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:roadDamage)
[`ergo:roadDamageVer2`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:roadDamageVer2)
[`incore:epfDamage`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:epfDamage)
[`incore:epfDamageVer2`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:epfDamageVer2)
[`incore:pipelineDamage`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:pipelineDamage)
[`incore:pipelineDamageVer2`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:pipelineDamageVer2) | Infrastructure dataset | An infrastructure dataset.
+`dmg_ratios` * | [`ergo:buildingDamageRatios`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingDamageRatios)
[`ergo:bridgeDamageRatios`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:bridgeDamageRatios)
[`ergo:buildingContentDamageRatios`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingContentDamageRatios)
[`ergo:buildingASDamageRatios`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingASDamageRatios)
[`ergo:buildingDSDamageRatios`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingDSDamageRatios)
[`ergo:roadDamageRatios`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:roadDamageRatios) | Damage ratios | A damage ratios dataset.
**Output datasets**
@@ -55,4 +55,4 @@ code snippet:
md.run_analysis()
```
-full analysis: [mean_dmg.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/mean_dmg.ipynb)
\ No newline at end of file
+full analysis: [mean_dmg.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/mean_dmg.ipynb)
diff --git a/manual_jb/content/analyses/non_structural_building_dmg.md b/manual_jb/content/analyses/non_structural_building_dmg.md
deleted file mode 100644
index 0dcc5f2f..00000000
--- a/manual_jb/content/analyses/non_structural_building_dmg.md
+++ /dev/null
@@ -1,92 +0,0 @@
-# Nonstructural building damage
-
-**Description**
-
-This analysis computes the non-structural damage to buildings based on a particular hazard. Currently, supported hazard is: **earthquake**.
-
-The process is similar to evaluating other structural damages. The probabilities for building damage
-state are obtained using fragility curves and a hazard definition, each building site will have
-a specific PGA (Peak Ground Acceleration), a measurement of an earthquake hazard for each scenario.
-Liquefaction effect, which is defined as a change in stress condition, in which material that is ordinarily
-a solid behaves like a liquid can be considered as well. The LMF (Liquefaction Modification Factor)
-values are implemented as multiplication factors to the median fragility values and they must be present
-in the dataset. This analysis uses two types of fragility curves assigned to the building; acceleration-sensitive (AS) and
-drift-sensitive (DS).
-
-The code covers Normal and LogNormal fragilities with 3 limit states for AS and DS and creates an output CSV file
-with corresponding damage states. The second output file is a JSON with information about hazard and fragilities.
-
-
-**Input Parameters**
-
-key name | type | name | description
---- | --- | --- | ---
-`result_name` * | `str` | Result name | Name of the result dataset.
-`hazard_type` | `str` | Hazard type | Hazard type (earthquake).
-`hazard_id` | `str` | Hazard id | ID of the hazard from the Hazard service.
-`fragility_key_as` | `str` | AS fragility | Acceleration-sensitive fragility key used in mapping dataset.
-`fragility_key_ds` | `str` | DS fragility | Drift-sensitive fragility key used in mapping dataset.
-`use_liquefation` | `bool` | Liquefaction | Use liquefaction, if applicable to the hazard.
Default *False*.
-`liq_geology_dataset_id` | `str` | Liquefaction id | A liquefaction susceptibility dataset.
-`use_hazard_uncertainty` | `bool` | Uncertainty | Use hazard uncertainty. Default is
*False*.
-`num_cpu` | `int` | Number of CPUs | Number of CPUs used for parallel computations.
Default *1*.
-
-**Input Hazards**
-
-key name | type | name | description
---- |---------------|---------------| ---
-`hazard` | `earthquake` | Hazard | Supported hazard object for using local and remote hazards.
-
-
-**Input Datasets**
-
-key name | type | name | description
---- | --- | --- | ---
-`buildings` * | [`ergo:buildingInventoryVer4`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer4)
[`ergo:buildingInventoryVer5`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer5)
[`ergo:buildingInventoryVer6`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer6)
[`ergo:buildingInventoryVer7`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:buildingInventoryVer7) | Building dataset | A building dataset.
-`dfr3_mapping_set` * | [`incore:dfr3MappingSet`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:dfr3MappingSet) | DFR3 Mapping Set | DFR3 Mapping Set.
-
-**Output datasets**
-
-key name | type | parent key | name | description
---- | --- | --- | --- | ---
-`result` * | [`ergo:nsBuildingInventoryDamageVer2`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:nsBuildingInventoryDamageVer2) | `buildings` | Results | A dataset containing results
with both, acceleration sensitivy (AS) related
and drift sensitivity (DS) related damage states
(format: CSV).
-`damage_result` * | [`incore:nsBuildingInventoryDamageSupplement`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:nsBuildingInventoryDamageSupplement) | `buildings` | Results | Information about applied hazard value and fragility
(format: JSON).
-
-(* required)
-
-**Execution**
-
-code snippet:
-
-```
- # Create an instance
- non_structural_building_dmg = NonStructBuildingDamage(client)
-
- # Load input datasets
- non_structural_building_dmg.load_remote_input_dataset("buildings", building_dataset_id)
-
- # Load fragility mapping
- fragility_service = FragilityService(client)
- mapping_set = MappingSet(fragility_service.get_mapping(mapping_id))
- non_structural_building_dmg.set_input_dataset('dfr3_mapping_set', mapping_set)
-
- # Specify the result name
- result_name = "non_structural_building_dmg_result"
-
- # Set analysis parameters
- non_structural_building_dmg.set_parameter("result_name", result_name)
- non_structural_building_dmg.set_parameter("hazard_type", hazard_type)
- non_structural_building_dmg.set_parameter("hazard_id", hazard_id)
- non_structural_building_dmg.set_parameter("num_cpu", 4)
-
- # Shelby County Liquefaction Susceptibility
- use_liquefaction = True
- liq_geology_dataset_id = "5a284f55c7d30d13bc0824ba"
-
- non_structural_building_dmg.set_parameter("use_liquefaction", use_liquefaction)
- non_structural_building_dmg.set_parameter("liq_geology_dataset_id", liq_geology_dataset_id)
-
- non_structural_building_dmg.run_analysis()
-```
-
-full analysis: [non_structural_building_dmg.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/non_structural_building_dmg.ipynb)
diff --git a/manual_jb/content/analyses/social_vulnerability.md b/manual_jb/content/analyses/social_vulnerability.md
index 4dfaed02..e4a72785 100644
--- a/manual_jb/content/analyses/social_vulnerability.md
+++ b/manual_jb/content/analyses/social_vulnerability.md
@@ -38,7 +38,7 @@ key name | type | name | description
key name | type | parent key | name | description
--- | --- | --- | --- | ---
-`social_vulnerability_score` * | [`incore:socialVulnerabilityScore`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:socialVulnerabilityScore) | `social_vulnerability_score` | Results | A dataset containing results (format: CSV)
with social vulnerability score at the required geographic level.
+`sv_result` * | [`incore:socialVulnerabilityScore`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:socialVulnerabilityScore) | `social_vulnerability_score` | Results | A dataset containing results (format: CSV)
with social vulnerability score at the required geographic level.
(* required)
diff --git a/manual_jb/content/analyses/social_vulnerability_score.md b/manual_jb/content/analyses/social_vulnerability_score.md
new file mode 100644
index 00000000..23dccdc5
--- /dev/null
+++ b/manual_jb/content/analyses/social_vulnerability_score.md
@@ -0,0 +1,71 @@
+# Social vulnerability Score
+
+**Description**
+
+This analysis computes a social vulnerability score for per associated zone in census data. The computation
+extracts zoning and a social vulnerability score obtained by computing demographic features of interest against
+national average values.
+
+The output of the computation is a dataset in CSV format.
+
+**Contributors**
+
+- Science: Elaina Sutley, Amin Enderami
+- Implementation: Amin Enderami, Santiago Núñez-Corrales, and NCSA IN-CORE Dev Team
+
+**Related publications**
+- Enderami, S. A., and Sutley, E. (2024). Social vulnerability score: a scalable index for representing social
+vulnerability in virtual community resilience testbeds. Natural Hazards. https://doi.org/10.1007/s11069-024-06499-z
+
+**Input parameters**
+
+key name | type | name | description
+--- | --- | --- | ---
+`result_name` * | `str` | Result name | Name of the result dataset.
+`year` * | `int` | Year | Census year for the input datasets.
+`state` * | `str` | State | State according to US Census.
+`county` * | `str` | County | County according to US Census.
+`census_geo_level` * | `str` | Census geographic level | Geographic level of granularity described by the dataset.
+
+**Input datasets**
+
+key name | type | name | description
+--- | --- | --- | ---
+`national_vulnerability_feature_averages` * | [`incore:socialVulnerabilityFeatureAverages`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:socialVulnerabilityFeatureAverages) | National vulnerability feature averages | National averages for features determining social vulnerability.
+`social_vulnerability_demographic_factors` * | [`incore:socialVulnerabilityDemFactors`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:socialVulnerabilityDemFactors) | Demographic factors | social vulnerability demographic factors for a given geographic type.
+
+**Output datasets**
+
+key name | type | parent key | name | description
+--- | --- | --- | --- | ---
+`sv_result` * | [`incore:socialVulnerabilityScore`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:socialVulnerabilityScore) | `social_vulnerability_score` | Results | A dataset containing results (format: CSV)
with social vulnerability score at the required geographic level.
+
+(* required)
+
+**Execution**
+
+code snippet:
+
+```
+ # Create social vulnerability score analysis instance
+ svc = SocialVulnerabilityScore(client)
+
+ # Load input dataset
+ svc.load_remote_input_dataset("national_vulnerability_feature_averages", national_vulnerability_feature_averages)
+ svc.load_remote_input_dataset("social_vulnerability_demographic_factors", social_vulnerability_demographic_factors)
+
+ # Specify the result name
+ result_name = "social_vulnerabilty"
+
+ # Set analysis parameters
+ svc.set_parameter("result_name", result_name)
+ svc.set_parameter("year", year)
+ svc.set_parameter("state", state)
+ svc.set_parameter("county", county)
+ svc.set_parameter("census_geo_level", census_geo_level)
+
+ # Run social vulnerability analysis
+ svc.run_analysis()
+```
+
+full analysis: [social_vulnerability_score.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/social_vulnerability_score.ipynb)
diff --git a/manual_jb/content/analyses/traffic_flow_recovery.md b/manual_jb/content/analyses/traffic_flow_recovery.md
new file mode 100644
index 00000000..28cafab7
--- /dev/null
+++ b/manual_jb/content/analyses/traffic_flow_recovery.md
@@ -0,0 +1,73 @@
+# Traffic flow recovery
+
+**Description**
+
+This analysis computes the damage to bridges, first calling the bridge damage analysis. It then uses nodes and links
+in traffic flow path and Average Daily Traffic (ADT) data of bridges to calculate a traffic flow network post-disaster recovery.
+Additionally, the analysis can be used in stochastic calculations with an integer value being imported
+to seed the random number generator.
+
+The output of this analysis is a CSV file with recovery trajectory timelines and data.
+
+**Input Parameters**
+
+key name | type | name | description
+--- | --- | --- | ---
+`pm` * | `int` | Performance metrics | Name of the result dataset.
+`ini_num_population` * | `int` | Population number | An initial population number.
+`population_size` * | `int` | Population size | A population size.
+`num_generation` * | `int` | Number generation | Number of iterations per scenario.
+`mutation_rate` * | `float` | Mutation rate | Mutation rate for the NSGA-II algorithm used for recovery rate.
+`crossover_rate` * | `float` | Crossover rate | Crossover rate for the NSGA-II algorithm used for recovery rate.
+`num_cpu` | `int` | Number of CPUs | Number of CPUs used for parallel computations.
Default *1*.
+
+**Input Datasets**
+
+key name | type | name | description
+--- | --- | --- | ---
+`bridges` * | [`ergo:bridges`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:bridges) | Bridge dataset | A bridge dataset.
+`nodes` * | [`ergo:roadNetwork`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:roadNetwork) | Rad nodes | A road network dataset.
+`links` * | [`ergo:roadNetwork`](https://incore.ncsa.illinois.edu/semantics/api/types/ergo:roadNetwork) | Road links | A road network dataset.
+`bridge_damage_value` * | [`incore:bridgeDamageValue`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:bridgeDamageValue) | Bridge damages | A bridge dataset.
+`unrepaired_bridge` * | [`incore:unrepairedBridge`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:unrepairedBridge) | Unrepaired bridges | An unrepaired bridge dataset.
+`ADT` * | [`incore:ADT`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:ADT) | Bridge traffic | A dataset of daily traffic.
+
+**Output Datasets**
+
+key name | type | name | description
+--- |-----------------------------------------------------------------------------------------------------------------------------------| --- | ---
+`optimal_solution_of_bridge_repair_schedule` * | [`incore:transportationRepairSchedule`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:transportationRepairSchedule) | Repair schedule | A dataset containing results (format: CSV).
+`overall_traffic_flow_recovery_trajectory` * | [`incore:trafficFlowRecovery`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:trafficFlowRecovery) | Recovery trajectory | A dataset containing results (format: CSV).
+
+(* required)
+
+**Execution**
+
+code snippet:
+
+```
+ # Create traffic flow recovery instance
+ traffic_flow_recovery = TrafficFlowRecovery(client)
+
+ # Load input datasets
+ traffic_flow_recovery.load_remote_input_dataset("nodes", nodes)
+ traffic_flow_recovery.load_remote_input_dataset("links", links)
+ traffic_flow_recovery.load_remote_input_dataset('bridges', bridges)
+ traffic_flow_recovery.load_remote_input_dataset('bridge_damage_value', bridge_damage)
+ traffic_flow_recovery.load_remote_input_dataset('unrepaired_bridge', unrepaired)
+ traffic_flow_recovery.load_remote_input_dataset('ADT', ADT_data)
+
+ # Set analysis parameters
+ traffic_flow_recovery.set_parameter("num_cpu", 4)
+ traffic_flow_recovery.set_parameter("pm", 1)
+ traffic_flow_recovery.set_parameter('ini_num_population', 5)
+ traffic_flow_recovery.set_parameter("population_size", 3)
+ traffic_flow_recovery.set_parameter("num_generation", 2)
+ traffic_flow_recovery.set_parameter("mutation_rate", 0.1)
+ traffic_flow_recovery.set_parameter("crossover_rate", 1.0)
+
+ # Run traffic flow recovery analysis
+ traffic_flow_recovery.run_analysis()
+```
+
+full analysis: [traffic_flow_recovery.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/traffic_flow_recovery.ipynb)
\ No newline at end of file
diff --git a/manual_jb/content/analyses/transportation_recovery.md b/manual_jb/content/analyses/transportation_recovery.md
index b8bfa50e..b8a84f02 100644
--- a/manual_jb/content/analyses/transportation_recovery.md
+++ b/manual_jb/content/analyses/transportation_recovery.md
@@ -34,10 +34,10 @@ key name | type | name | description
**Output Datasets**
-key name | type | name | description
---- | --- | --- | ---
+key name | type | name | description
+--- |-----------------------------------------------------------------------------------------------------------------------------------| --- | ---
`optimal_solution_of_bridge_repair_schedule` * | [`incore:transportationRepairSchedule`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:transportationRepairSchedule) | Repair schedule | A dataset containing results (format: CSV).
-`overall_transportation_recovery_trajectory` * | [`incore:transportationRecovery`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:transportationRecovery) | Recovery trajectory | A dataset containing results (format: CSV).
+`overall_traffic_flow_recovery_trajectory` * | [`incore:trafficFlowRecovery`](https://incore.ncsa.illinois.edu/semantics/api/types/incore:transportationRecovery) | Recovery trajectory | A dataset containing results (format: CSV).
(* required)
diff --git a/manual_jb/content/faq.md b/manual_jb/content/faq.md
index 90c3a975..365c2473 100644
--- a/manual_jb/content/faq.md
+++ b/manual_jb/content/faq.md
@@ -105,6 +105,7 @@
very explicitly if they occur e.g. package X requires package Y version <5.0. We make sure that pyIncore works with
fresh environment and in this case we recommend re-installing pyIncore.
+
* *Should I use virtual environment for running the pyIncore?*
@@ -118,6 +119,18 @@
Note: Use Anaconda if you do not have full administrative privileges on your computer. It has been reported that
Jupyter Notebook can't be subsequently installed in Miniconda environment. With Anaconda **Jupyter Notebook** is already
pre-installed.
+
+
+
+* *When installing pyIncore, it takes a long time to resolve dependencies. Is there anything I can do?*
+
+ We recommend trying the libmamba solver to improve dependency resolution. When installing pyIncore, use the
+ following command:
+
+ ```
+ conda install -c in-core pyincore --solver=libmamba
+ ```
+
### Running pyIncore
diff --git a/manual_jb/content/getting_started.md b/manual_jb/content/getting_started.md
index 2ee8e97e..6d431220 100644
--- a/manual_jb/content/getting_started.md
+++ b/manual_jb/content/getting_started.md
@@ -67,10 +67,24 @@ If you don't have Miniconda installed, do the following steps.
```
conda install -c in-core pyincore
```
+
+ If you have trouble installing pyincore or it is taking a long time to resolve the dependencies, try using the libmamba solver by running the following command:
+
+ ```
+ conda install -c in-core pyincore --solver=libmamba
+ ```
+
A user can also install **pyIncore-viz** module for which **pyIncore** installs as a dependency:
```
conda install -c in-core pyincore-viz
```
+ Similarly, if you have issues installing pyincore-viz, you can use the libmamba solver flag to resolve the
+ dependencies by running the following command:
+
+ ```
+ conda install -c in-core pyincore-viz --solver=libmamba
+ ```
+
If the installed pyincore or pyincore-viz version is not the latest or lower than the desired one, specify the version number in installation command.
```
conda install -c in-core pyincore=1.14.0 (or your version of choice)
@@ -230,4 +244,4 @@ If you have problems running Notebooks, check our [WIKI questions](https://opens
* The Building analysis Jupyter Notebook is also available at [IN-CORE project](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/bridge_dmg.ipynb) on GitHub.
* Our Slack channel is now open to the community. To get started, go to
[https://in-core.slack.com/](https://in-core.slack.com/). Or, you can grab an invitation here:
-[https://join.slack.com/t/in-core/shared_invite/zt-25zffgnae-h0v8uGjpSli1YYp0Ypr68Q](https://join.slack.com/t/in-core/shared_invite/zt-25zffgnae-h0v8uGjpSli1YYp0Ypr68Q)
\ No newline at end of file
+[https://join.slack.com/t/in-core/shared_invite/zt-25zffgnae-h0v8uGjpSli1YYp0Ypr68Q](https://join.slack.com/t/in-core/shared_invite/zt-25zffgnae-h0v8uGjpSli1YYp0Ypr68Q)
diff --git a/manual_jb/content/notebooks_other.md b/manual_jb/content/notebooks_other.md
index e78e349d..ac63c9d6 100644
--- a/manual_jb/content/notebooks_other.md
+++ b/manual_jb/content/notebooks_other.md
@@ -19,6 +19,7 @@ Note that some Notebooks might not work with the latest version of pyIncore.
[galveston_community_app.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/galveston_community_app.ipynb)
[galveston_community_app_retrofit.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/galveston_community_app_retrofit.ipynb)
[galveston_cge.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/galveston_cge.ipynb)
+[gas_facility_dmg.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/gas_facility_dmg.ipynb)
[housing household recovery](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/housing_household_recovery.ipynb)
[housing_recovery.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/housing_recovery.ipynb)
[indp.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/indp.ipynb)
@@ -31,7 +32,7 @@ Note that some Notebooks might not work with the latest version of pyIncore.
[mean_dmg.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/mean_dmg.ipynb)
[multi_retrofit_optimization.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/multi_retrofit_optimization.ipynb)
[nci_functionality.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/nci_functionality.ipynb)
-[non_structural_building_dmg.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/non_structural_building_dmg.ipynb)
+[building_nonstructural_dmg.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/building_nonstructural_dmg.ipynb)
[pipeline_dmg.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/pipeline_dmg.ipynb)
[pipeline_dmg_w_repair_rate.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/pipeline_dmg_w_repair_rate.ipynb)
[pipeline_repair_cost.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/pipeline_repair_cost.ipynb)
@@ -48,7 +49,9 @@ Note that some Notebooks might not work with the latest version of pyIncore.
[slc_community_app_ml_cge.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/slc_community_app_ml_cge.ipynb)
[slc_community_app_retrofit.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/slc_community_app_retrofit.ipynb)
[social_vulnerability.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/social_vulnerability.ipynb)
+[social_vulnerability_score.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/social_vulnerability_score.ipynb)
[tornadoepn_dmg.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/tornadoepn_dmg.ipynb)
+[traffic_flow_recovery.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/traffic_flow_recovery.ipynb)
[transportation_recovery.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/transportation_recovery.ipynb)
[water_facility_dmg.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/water_facility_dmg.ipynb)
[wfn_functionality.ipynb](https://github.com/IN-CORE/incore-docs/blob/main/notebooks/wfn_functionality.ipynb)
diff --git a/manual_jb/content/pyincore/install_pyincore.md b/manual_jb/content/pyincore/install_pyincore.md
index 344e8320..c844f1c9 100644
--- a/manual_jb/content/pyincore/install_pyincore.md
+++ b/manual_jb/content/pyincore/install_pyincore.md
@@ -11,10 +11,25 @@ These steps guides you on how to install both pyIncore and Jupyter Notebooks on
```
conda install -c in-core pyincore
```
+ If you have trouble installing pyincore or it is taking a long time to resolve the dependencies, try using the
+ libmamba solver by running the following command:
+
+ ```
+ conda install -c in-core pyincore --solver=libmamba
+ ```
+
A user can also install **pyIncore-viz** module for which **pyIncore** installs as a dependency:
```
conda install -c in-core pyincore-viz
```
+
+ Similarly, if you have issues installing pyincore-viz, you can use the libmamba solver flag to resolve the
+ dependencies by running the following command:
+
+ ```
+ conda install -c in-core pyincore-viz --solver=libmamba
+ ```
+
If the installed pyincore version is not the latest or lower than the desired one, specify the version number in installation command.
```
conda install -c in-core pyincore=1.14.0 (or your version of choice)
diff --git a/manual_jb/content/pyincore/quick_reference.md b/manual_jb/content/pyincore/quick_reference.md
index 6d08fb12..e4d9f955 100644
--- a/manual_jb/content/pyincore/quick_reference.md
+++ b/manual_jb/content/pyincore/quick_reference.md
@@ -27,6 +27,14 @@
```
conda install -c in-core pyincore-viz
```
+
+ If you have trouble installing pyincore and pyincore-viz or it is taking a long time to resolve the dependencies,
+ try using the libmamba solver by running the following command:
+
+ ```
+ conda install -c in-core pyincore-viz --solver=libmamba
+ ```
+
If the installed pyincore or pyincore-viz version is not the latest or lower than the desired one, specify the version number in installation command.
```
conda install -c in-core pyincore-viz=1.8.3 (or your version of choice)
diff --git a/manual_jb/content/pyincore_data.md b/manual_jb/content/pyincore_data.md
index db20a5c5..e5fcabd1 100644
--- a/manual_jb/content/pyincore_data.md
+++ b/manual_jb/content/pyincore_data.md
@@ -18,6 +18,13 @@ or [Miniconda](https://docs.conda.io/en/latest/miniconda.html).
```
conda install -c in-core pyincore-data
```
+
+ If you have trouble installing pyincore-data or it is taking a long time to resolve the dependencies, try using the
+ libmamba solver by running the following command:
+
+ ```
+ conda install -c in-core pyincore-data --solver=libmamba
+ ```
If the installed pyincore-data version is not the latest or lower than the desired one, specify the version number in installation command.
```
conda install -c in-core pyincore-data=0.5.0 (or your version of choice)
diff --git a/manual_jb/content/pyincore_viz.md b/manual_jb/content/pyincore_viz.md
index 574e9d3b..aeb2eaa1 100644
--- a/manual_jb/content/pyincore_viz.md
+++ b/manual_jb/content/pyincore_viz.md
@@ -32,6 +32,13 @@ or [Miniconda](https://docs.conda.io/en/latest/miniconda.html).
```
conda install -c in-core pyincore-viz
```
+ If you have trouble installing pyincore-viz or it is taking a long time to resolve the dependencies, try using the
+ libmamba solver by running the following command:
+
+ ```
+ conda install -c in-core pyincore-viz --solver=libmamba
+ ```
+
If the installed pyincore-viz version is not the latest or lower than the desired one, specify the version number in installation command.
```
conda install -c in-core pyincore-viz=1.5.0 (or your version of choice)
diff --git a/manual_jb/insert_ga_to_header.py b/manual_jb/insert_ga_to_header.py
new file mode 100644
index 00000000..fedf5e41
--- /dev/null
+++ b/manual_jb/insert_ga_to_header.py
@@ -0,0 +1,46 @@
+import os
+from bs4 import BeautifulSoup
+
+# Directory containing the built HTML files
+build_dir = "content/_build/html"
+
+# Google Analytics code snippet to insert into the HTML files
+ga_code = f"""
+
+"""
+
+# Loop through each HTML file in the build directory
+for filename in os.listdir(build_dir):
+ if filename.endswith(".html"):
+ filepath = os.path.join(build_dir, filename)
+ print(f"Processing file: {filepath}")
+
+ # Read the content of the HTML file
+ with open(filepath, "r", encoding="utf-8") as file:
+ html_content = file.read()
+
+ # Parse HTML content using BeautifulSoup
+ soup = BeautifulSoup(html_content, "html.parser")
+
+ # Find the
tag and insert the Google Analytics code before it
+ head_tag = soup.find("head")
+ if head_tag:
+ print(f"Found tag in {filename}:")
+ print("Inserting Google Analytics code...")
+ head_tag.insert(0, BeautifulSoup(ga_code, "html.parser"))
+
+ # Write the modified HTML content back to the file
+ with open(filepath, "w", encoding="utf-8") as file:
+ file.write(str(soup))
+
+print("Google Analytics code insertion completed.")
\ No newline at end of file
diff --git a/notebooks/portfolio_recovery.ipynb b/notebooks/building_cluster_recovery.ipynb
similarity index 52%
rename from notebooks/portfolio_recovery.ipynb
rename to notebooks/building_cluster_recovery.ipynb
index 5c7d4aa8..b770b703 100644
--- a/notebooks/portfolio_recovery.ipynb
+++ b/notebooks/building_cluster_recovery.ipynb
@@ -6,7 +6,7 @@
"metadata": {},
"outputs": [],
"source": [
- "from pyincore.analyses.buildingportfolio import BuildingPortfolioRecoveryAnalysis\n",
+ "from pyincore.analyses.buildingclusterrecovery import BuildingClusterRecovery\n",
"\n",
"from pyincore import IncoreClient"
]
@@ -41,13 +41,13 @@
"metadata": {},
"outputs": [],
"source": [
- "bldg_portfolio_recovery = BuildingPortfolioRecoveryAnalysis(client)\n",
- "bldg_portfolio_recovery.set_parameter(\"uncertainty\", True)\n",
- "bldg_portfolio_recovery.set_parameter(\"sample_size\", 35) # default none. Gets size form input dataset\n",
- "bldg_portfolio_recovery.set_parameter(\"random_sample_size\", 50) # default 10000\n",
- "bldg_portfolio_recovery.set_parameter(\"no_of_weeks\", 100) # default 250\n",
- "bldg_portfolio_recovery.set_parameter(\"result_name\", \"memphis\")\n",
- "# bldg_portfolio_recovery.set_parameter(\"num_cpu\", 1) Parallelization isn't implemented"
+ "bldg_cluster_recovery = BuildingClusterRecovery(client)\n",
+ "bldg_cluster_recovery.set_parameter(\"uncertainty\", True)\n",
+ "bldg_cluster_recovery.set_parameter(\"sample_size\", 35) # default none. Gets size form input dataset\n",
+ "bldg_cluster_recovery.set_parameter(\"random_sample_size\", 50) # default 10000\n",
+ "bldg_cluster_recovery.set_parameter(\"no_of_weeks\", 100) # default 250\n",
+ "bldg_cluster_recovery.set_parameter(\"result_name\", \"memphis\")\n",
+ "# bldg_cluster_recovery.set_parameter(\"num_cpu\", 1) Parallelization isn't implemented"
]
},
{
@@ -56,25 +56,22 @@
"metadata": {},
"outputs": [],
"source": [
- "bldg_portfolio_recovery.load_remote_input_dataset(\"building_data\", bldg_data_dataset)\n",
- "bldg_portfolio_recovery.load_remote_input_dataset(\"occupancy_mapping\", occupancy_dataset)\n",
- "bldg_portfolio_recovery.load_remote_input_dataset(\"building_damage\", bldg_damage_dataset)\n",
- "bldg_portfolio_recovery.load_remote_input_dataset(\"dmg_ratios\", mean_repair_dataset)\n",
- "bldg_portfolio_recovery.load_remote_input_dataset(\"utility\", utility_dataset)\n",
- "bldg_portfolio_recovery.load_remote_input_dataset(\"utility_partial\", utility_partial_dataset)\n",
- "bldg_portfolio_recovery.load_remote_input_dataset(\"coefFL\", coefFL_dataset)"
+ "bldg_cluster_recovery.load_remote_input_dataset(\"building_data\", bldg_data_dataset)\n",
+ "bldg_cluster_recovery.load_remote_input_dataset(\"occupancy_mapping\", occupancy_dataset)\n",
+ "bldg_cluster_recovery.load_remote_input_dataset(\"building_damage\", bldg_damage_dataset)\n",
+ "bldg_cluster_recovery.load_remote_input_dataset(\"dmg_ratios\", mean_repair_dataset)\n",
+ "bldg_cluster_recovery.load_remote_input_dataset(\"utility\", utility_dataset)\n",
+ "bldg_cluster_recovery.load_remote_input_dataset(\"utility_partial\", utility_partial_dataset)\n",
+ "bldg_cluster_recovery.load_remote_input_dataset(\"coefFL\", coefFL_dataset)"
]
},
{
"cell_type": "code",
"execution_count": null,
- "metadata": {
- "scrolled": true
- },
+ "metadata": {},
"outputs": [],
"source": [
- "# Creates two output files building-recovery.csv and portfolio-recovery.csv\n",
- "bldg_portfolio_recovery.run_analysis()"
+ "bldg_cluster_recovery.run_analysis()"
]
},
{
@@ -83,7 +80,7 @@
"metadata": {},
"outputs": [],
"source": [
- "bldg_portfolio_recovery.get_output_dataset(\"result\").get_dataframe_from_csv().head()"
+ "bldg_cluster_recovery.get_output_dataset(\"result\").get_dataframe_from_csv().head()"
]
},
{
@@ -110,7 +107,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.11.6"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/notebooks/building_nonstructural_dmg.ipynb b/notebooks/building_nonstructural_dmg.ipynb
new file mode 100644
index 00000000..30c70cec
--- /dev/null
+++ b/notebooks/building_nonstructural_dmg.ipynb
@@ -0,0 +1,199 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from pyincore import IncoreClient, FragilityService, MappingSet\n",
+ "from pyincore.analyses.nonstructbuildingdamage import NonStructBuildingDamage, NonStructBuildingUtil"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## set input parameters and input datasets"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "client = IncoreClient()\n",
+ "\n",
+ "# Memphis 7.9 AB-95\n",
+ "hazard_type = \"earthquake\"\n",
+ "hazard_id = \"5b902cb273c3371e1236b36b\"\n",
+ "\n",
+ "# damage ratio \n",
+ "dmg_ratio_id_as = \"5a284f2ec7d30d13bc08207c\"\n",
+ "dmg_ratio_id_ds = \"5a284f2ec7d30d13bc082090\"\n",
+ "dmg_ratio_id_content = \"5a284f2ec7d30d13bc082086\"\n",
+ "\n",
+ "# Shelby County Essential Facilities\n",
+ "building_dataset_id = \"5a284f42c7d30d13bc0821ba\"\n",
+ "\n",
+ "# Default Building Fragility Mapping v1.0\n",
+ "mapping_id = \"5b47b350337d4a3629076f2c\"\n",
+ "fragility_service = FragilityService(client)\n",
+ "mapping_set = MappingSet(fragility_service.get_mapping(mapping_id))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### use liquefaction (slow)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Shelby County Liquefaction Susceptibility\n",
+ "use_liquefaction = True\n",
+ "liq_geology_dataset_id = \"5a284f55c7d30d13bc0824ba\""
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Acceleration sensitive Non-Structural Damage"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "non_structural_building_dmg_as = NonStructBuildingDamage(client)\n",
+ "\n",
+ "# Load input datasets\n",
+ "non_structural_building_dmg_as.load_remote_input_dataset(\"buildings\", building_dataset_id)\n",
+ "non_structural_building_dmg_as.set_input_dataset(\"dfr3_mapping_set\", mapping_set)\n",
+ "\n",
+ "# Specify the result name\n",
+ "result_name = \"non_structural_building_dmg_result_as\"\n",
+ "\n",
+ "# Set analysis parameters\n",
+ "non_structural_building_dmg_as.set_parameter(\"result_name\", result_name)\n",
+ "non_structural_building_dmg_as.set_parameter(\"hazard_type\", hazard_type)\n",
+ "non_structural_building_dmg_as.set_parameter(\"hazard_id\", hazard_id)\n",
+ "non_structural_building_dmg_as.set_parameter(\"fragility_key\", NonStructBuildingUtil.DEFAULT_FRAGILITY_KEY_AS)\n",
+ "non_structural_building_dmg_as.set_parameter(\"num_cpu\", 4)\n",
+ "non_structural_building_dmg_as.set_parameter(\"use_liquefaction\", use_liquefaction)\n",
+ "non_structural_building_dmg_as.set_parameter(\"liq_geology_dataset_id\", liq_geology_dataset_id)\n",
+ "\n",
+ "# Run analysis\n",
+ "non_structural_building_dmg_as.run_analysis()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# Retrieve result dataset\n",
+ "result_as = non_structural_building_dmg_as.get_output_dataset(\"result\")\n",
+ "\n",
+ "# Convert dataset to Pandas DataFrame\n",
+ "df_as = result_as.get_dataframe_from_csv()\n",
+ "\n",
+ "# Display top 5 rows of output data\n",
+ "df_as.head()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Drift sensitive Non-Structural Damage"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "non_structural_building_dmg_ds = NonStructBuildingDamage(client)\n",
+ "\n",
+ "# Load input datasets\n",
+ "non_structural_building_dmg_ds.load_remote_input_dataset(\"buildings\", building_dataset_id)\n",
+ "non_structural_building_dmg_ds.set_input_dataset(\"dfr3_mapping_set\", mapping_set)\n",
+ "\n",
+ "# Specify the result name\n",
+ "result_name = \"non_structural_building_dmg_result_ds\"\n",
+ "\n",
+ "# Set analysis parameters\n",
+ "non_structural_building_dmg_ds.set_parameter(\"result_name\", result_name)\n",
+ "non_structural_building_dmg_ds.set_parameter(\"hazard_type\", hazard_type)\n",
+ "non_structural_building_dmg_ds.set_parameter(\"hazard_id\", hazard_id)\n",
+ "non_structural_building_dmg_ds.set_parameter(\"fragility_key\", NonStructBuildingUtil.DEFAULT_FRAGILITY_KEY_DS)\n",
+ "non_structural_building_dmg_ds.set_parameter(\"num_cpu\", 4)\n",
+ "non_structural_building_dmg_ds.set_parameter(\"use_liquefaction\", use_liquefaction)\n",
+ "non_structural_building_dmg_ds.set_parameter(\"liq_geology_dataset_id\", liq_geology_dataset_id)\n",
+ "\n",
+ "# Run analysis\n",
+ "non_structural_building_dmg_ds.run_analysis()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Retrieve result dataset\n",
+ "result_ds = non_structural_building_dmg_ds.get_output_dataset(\"result\")\n",
+ "\n",
+ "# Convert dataset to Pandas DataFrame\n",
+ "df_ds = result_ds.get_dataframe_from_csv()\n",
+ "\n",
+ "# Display top 5 rows of output data\n",
+ "df_ds.head()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.9.19"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
+}
diff --git a/notebooks/building_structural_dmg.ipynb b/notebooks/building_structural_dmg.ipynb
new file mode 100644
index 00000000..41b98227
--- /dev/null
+++ b/notebooks/building_structural_dmg.ipynb
@@ -0,0 +1,423 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "from pyincore import IncoreClient, FragilityService, MappingSet\n",
+ "from pyincore.analyses.buildingstructuraldamage import BuildingStructuralDamage"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# Connect to IN-CORE service\n",
+ "client = IncoreClient()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Building damage for Shelby county, TN with New Madrid earthquake."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# New madrid earthquake using Atkinson Boore 1995\n",
+ "hazard_type = \"earthquake\"\n",
+ "hazard_id = \"5b902cb273c3371e1236b36b\"\n",
+ "\n",
+ "# Building inventory in Shelby county, TN\n",
+ "bldg_dataset_id = \"5a284f0bc7d30d13bc081a28\"\n",
+ "\n",
+ "# Default Building Fragility mapping\n",
+ "mapping_id = \"5b47b350337d4a3629076f2c\""
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# Create building damage\n",
+ "bldg_dmg = BuildingStructuralDamage(client)\n",
+ "\n",
+ "# Load input dataset\n",
+ "bldg_dmg.load_remote_input_dataset(\"buildings\", bldg_dataset_id)\n",
+ "\n",
+ "# Load fragility mapping\n",
+ "fragility_service = FragilityService(client)\n",
+ "mapping_set = MappingSet(fragility_service.get_mapping(mapping_id))\n",
+ "bldg_dmg.set_input_dataset(\"dfr3_mapping_set\", mapping_set)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# Specify the result name\n",
+ "result_name = \"memphis_bldg_dmg_result\"\n",
+ "\n",
+ "# Set analysis parameters\n",
+ "bldg_dmg.set_parameter(\"result_name\", result_name)\n",
+ "bldg_dmg.set_parameter(\"hazard_type\", hazard_type)\n",
+ "bldg_dmg.set_parameter(\"hazard_id\", hazard_id)\n",
+ "bldg_dmg.set_parameter(\"num_cpu\", 1)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# Run building damage analysis\n",
+ "bldg_dmg.run_analysis()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# Retrieve result dataset\n",
+ "result = bldg_dmg.get_output_dataset(\"ds_result\")\n",
+ "\n",
+ "# Convert dataset to Pandas DataFrame\n",
+ "df = result.get_dataframe_from_csv()\n",
+ "\n",
+ "# Display top 5 rows of output data\n",
+ "df.head()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Building damage for Shelby county, TN with New Madrid earthquake and liquefaction."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Geology dataset, required for liquefaction to provide susceptibility\n",
+ "liq_geology_dataset_id = \"5a284f53c7d30d13bc08249c\""
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Create building damage\n",
+ "bldg_dmg = BuildingStructuralDamage(client)\n",
+ "\n",
+ "# Load input dataset\n",
+ "bldg_dmg.load_remote_input_dataset(\"buildings\", bldg_dataset_id)\n",
+ "\n",
+ "# Load fragility mapping\n",
+ "fragility_service = FragilityService(client)\n",
+ "mapping_set = MappingSet(fragility_service.get_mapping(mapping_id))\n",
+ "bldg_dmg.set_input_dataset(\"dfr3_mapping_set\", mapping_set)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Specify the result name\n",
+ "result_name = \"memphis_bldg_dmg_result-liquefaction\"\n",
+ "\n",
+ "# Set analysis parameters\n",
+ "bldg_dmg.set_parameter(\"result_name\", result_name)\n",
+ "bldg_dmg.set_parameter(\"hazard_type\", hazard_type)\n",
+ "bldg_dmg.set_parameter(\"hazard_id\", hazard_id)\n",
+ "bldg_dmg.set_parameter(\"num_cpu\", 1)\n",
+ "\n",
+ "# Set analysis parameters for liquefaction\n",
+ "bldg_dmg.set_parameter(\"use_liquefaction\", True)\n",
+ "bldg_dmg.set_parameter(\"liquefaction_geology_dataset_id\", liq_geology_dataset_id)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Run building damage analysis\n",
+ "bldg_dmg.run_analysis()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Retrieve result dataset\n",
+ "result = bldg_dmg.get_output_dataset(\"ds_result\")\n",
+ "\n",
+ "# Convert dataset to Pandas DataFrame\n",
+ "df = result.get_dataframe_from_csv()\n",
+ "\n",
+ "# Display top 5 rows of output data\n",
+ "df.head()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "#### Creating a chart"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "ax = df['DS_3'].hist(bins=20, figsize=[10,5])\n",
+ "ax.set_title(\"complete damage distribution\", fontsize=12)\n",
+ "ax.set_xlabel(\"complete damage value\", fontsize=12)\n",
+ "ax.set_ylabel(\"counts\", fontsize=12)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "ax = df['DS_3'].hist(bins=20, figsize=[10,5])\n",
+ "ax.set_title(\"complete damage distribution\", fontsize=12)\n",
+ "ax.set_xlabel(\"complete damage value\", fontsize=12)\n",
+ "ax.set_ylabel(\"counts\", fontsize=12)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Building damage for Seaside, OR with tsunami."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "hazard_type = \"tsunami\"\n",
+ "hazard_id = \"5bc9e25ef7b08533c7e610dc\"\n",
+ "\n",
+ "# Seaside, OR building dataset\n",
+ "bldg_dataset_id = \"5bcf2fcbf242fe047ce79dad\"\n",
+ "\n",
+ "# Tsunami mapping\n",
+ "mapping_id = \"5b48fb1f337d4a478e7bd54d\""
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# Create Seaside tsunami building damage\n",
+ "bldg_dmg = BuildingStructuralDamage(client)\n",
+ "\n",
+ "# Load input dataset and fragility mapping\n",
+ "bldg_dmg.load_remote_input_dataset(\"buildings\", bldg_dataset_id)\n",
+ "mapping_set = MappingSet(fragility_service.get_mapping(mapping_id))\n",
+ "bldg_dmg.set_input_dataset(\"dfr3_mapping_set\", mapping_set)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# Specify the result name\n",
+ "result_name = \"seaside_bldg_dmg_result\"\n",
+ "\n",
+ "bldg_dmg.set_parameter(\"result_name\", result_name)\n",
+ "bldg_dmg.set_parameter(\"hazard_type\", hazard_type)\n",
+ "bldg_dmg.set_parameter(\"hazard_id\", hazard_id)\n",
+ "bldg_dmg.set_parameter(\"num_cpu\", 1)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# Run building damage analysis\n",
+ "bldg_dmg.run_analysis()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.9.19"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
+}
diff --git a/notebooks/buyout_decision.ipynb b/notebooks/buyout_decision.ipynb
new file mode 100644
index 00000000..264d1503
--- /dev/null
+++ b/notebooks/buyout_decision.ipynb
@@ -0,0 +1,865 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "fbf4b1cf-d087-4b38-8236-faf5c668b2e0",
+ "metadata": {},
+ "source": [
+ "# Buyout Decision Notebook"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "4483f080-48c6-4191-85b2-0109240f9061",
+ "metadata": {},
+ "source": [
+ "## Authors:\n",
+ "- Farinaz Motlagh, Stony Brook University\n",
+ "- Sara Hamideh, Stony Brook University"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "d092027b-4f3c-4e39-883b-7e4969c3efa0",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import geopandas as gpd\n",
+ "import numpy as np \n",
+ "import matplotlib.pyplot as plt \n",
+ "import matplotlib.image as mpimg\n",
+ "from matplotlib.colors import ListedColormap,LinearSegmentedColormap\n",
+ "import pandas as pd \n",
+ "from osgeo import gdal\n",
+ "import rasterio\n",
+ "from rasterio.features import rasterize\n",
+ "from rasterio.mask import mask\n",
+ "import random\n",
+ "from shapely.geometry import Point\n",
+ "from shapely.wkt import loads\n",
+ "from matplotlib.table import Table\n",
+ "from IPython.display import display, Markdown\n",
+ "from geopy.distance import great_circle\n",
+ "from heapq import nsmallest\n",
+ "from geopy.distance import geodesic\n",
+ "from itertools import combinations\n",
+ "import contextily as ctx\n",
+ "\n",
+ "import warnings\n",
+ "warnings.filterwarnings('ignore')\n",
+ "\n",
+ "from pyincore import IncoreClient, DataService, Dataset, FragilityService, MappingSet\n",
+ "from pyincore.utils.dataprocessutil import DataProcessUtil\n",
+ "# importing pyIncone analyses:\n",
+ "from pyincore.analyses.buildingdamage import BuildingDamage\n",
+ "from pyincore.analyses.nonstructbuildingdamage import NonStructBuildingDamage\n",
+ "from pyincore.analyses.combinedwindwavesurgebuildingdamage import CombinedWindWaveSurgeBuildingDamage\n",
+ "from pyincore.analyses.populationdislocation import PopulationDislocation\n",
+ "from pyincore.analyses.buyoutdecision import BuyoutDecision"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "866a0f39-a32d-4756-a9d5-c4fb7f1e0d94",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "pd.set_option('display.float_format', lambda x: '%.3f' % x)\n",
+ "pd.set_option('display.max_colwidth', None)\n",
+ "pd.set_option('display.expand_frame_repr', False)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "5e54be3d-1e94-44dc-94d2-8bd6abba19b6",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Connect to IN-CORE Services\n",
+ "client = IncoreClient()\n",
+ "client.clear_cache()\n",
+ "data_service = DataService(client)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "b5ff1910-6b5a-4bfe-87b4-3a1ebfc91e9a",
+ "metadata": {},
+ "source": [
+ "## Data Preparation"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "f7caaecd-ee37-46e2-bbcc-2e17b2128763",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Loading Galveston building inventory\n",
+ "bldg_dataset_id = \"63ff6b135c35c0353d5ed3ac\"\n",
+ "buildings = Dataset.from_data_service(bldg_dataset_id, data_service)\n",
+ "bld_invtry = buildings.get_dataframe_from_shapefile()\n",
+ "bld_invtry.head()\n",
+ "#bld_invtry.shape"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "8d03938d-a737-4bb8-bbac-40b38e6cdec9",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Removing any properties that have NaN values and values less than $1000 in column \"appr_bldg\"\n",
+ "# (the threshold is based on Galveston County's tax assessor data for mobile homes)\n",
+ "bld_invtry = bld_invtry[bld_invtry['appr_bldg'].notna() & (bld_invtry['appr_bldg'] >= 1000)]\n",
+ "#bld_invtry.shape"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "faad88df",
+ "metadata": {},
+ "source": [
+ "## Max damage state from Combined Building Damage Analyses"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "5a108f90",
+ "metadata": {},
+ "source": [
+ "The decision framework selects candidate properties for buyout projects that (1) have been completely damaged due to a past hurricane scenario and (2) are projected to be completely damaged due to a 100-year hurricane event in the future. To check for the first requirement, we will use IN-CORE's Hurricane Ike's hindcast of combined building damage model and will include properties that have been completely damaged during this event. Next, we will use IN-CORE's projected building damage model output from a 100-year event in the future to check for the second requirement. The building damage model is developed from fragility analysis to estimate the probability of four damage states based on the hazard type and attributes of the building.\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "fd5a4a04",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def calculate_combined_building_damage(hazard_type, hazard_id, bldg_dataset_id, wind_mapping_id,\n",
+ " surge_wave_mapping_id, flood_mapping_id,result_name):\n",
+ "\n",
+ " # Wind building damage\n",
+ " fragility_service = FragilityService(client)\n",
+ " wind_mapping_set = MappingSet(fragility_service.get_mapping(wind_mapping_id))\n",
+ " w_bldg_dmg = BuildingDamage(client)\n",
+ " w_bldg_dmg.load_remote_input_dataset(\"buildings\", bldg_dataset_id)\n",
+ " w_bldg_dmg.set_input_dataset('dfr3_mapping_set', wind_mapping_set)\n",
+ " w_bldg_dmg.set_parameter(\"result_name\", \"Galveston-wind-dmg\")\n",
+ " w_bldg_dmg.set_parameter(\"hazard_type\", hazard_type)\n",
+ " w_bldg_dmg.set_parameter(\"hazard_id\", hazard_id)\n",
+ " w_bldg_dmg.set_parameter(\"num_cpu\", 8)\n",
+ " w_bldg_dmg.run_analysis()\n",
+ "\n",
+ " # surge-wave building damage\n",
+ " sw_bldg_dmg = BuildingDamage(client)\n",
+ " surge_wave_mapping_set = MappingSet(fragility_service.get_mapping(surge_wave_mapping_id))\n",
+ " sw_bldg_dmg.load_remote_input_dataset(\"buildings\", bldg_dataset_id)\n",
+ " sw_bldg_dmg.set_input_dataset('dfr3_mapping_set', surge_wave_mapping_set)\n",
+ " sw_bldg_dmg.set_parameter(\"result_name\", \"Galveston-sw-dmg\")\n",
+ " sw_bldg_dmg.set_parameter(\"hazard_type\", hazard_type)\n",
+ " sw_bldg_dmg.set_parameter(\"hazard_id\", hazard_id)\n",
+ " sw_bldg_dmg.set_parameter(\"num_cpu\", 8)\n",
+ " sw_bldg_dmg.run_analysis()\n",
+ "\n",
+ " # Flood mapping damage\n",
+ " flood_mapping_set = MappingSet(fragility_service.get_mapping(flood_mapping_id))\n",
+ " f_bldg_dmg = NonStructBuildingDamage(client)\n",
+ " f_bldg_dmg.load_remote_input_dataset(\"buildings\", bldg_dataset_id)\n",
+ " f_bldg_dmg.set_input_dataset('dfr3_mapping_set', flood_mapping_set)\n",
+ " f_bldg_dmg.set_parameter(\"result_name\", \"Galveston-flood-dmg\")\n",
+ " f_bldg_dmg.set_parameter(\"fragility_key\", \"Non-Retrofit Fragility ID Code\")\n",
+ " f_bldg_dmg.set_parameter(\"hazard_type\", hazard_type)\n",
+ " f_bldg_dmg.set_parameter(\"hazard_id\", hazard_id)\n",
+ " f_bldg_dmg.set_parameter(\"num_cpu\", 8)\n",
+ " f_bldg_dmg.run_analysis()\n",
+ "\n",
+ " # Running combined damage analysis\n",
+ " surge_wave_damage = sw_bldg_dmg.get_output_dataset(\"ds_result\")\n",
+ " wind_damage = w_bldg_dmg.get_output_dataset(\"ds_result\")\n",
+ " flood_damage = f_bldg_dmg.get_output_dataset(\"result\")\n",
+ " \n",
+ " combined_bldg_dmg = CombinedWindWaveSurgeBuildingDamage(client)\n",
+ " combined_bldg_dmg.set_input_dataset(\"surge_wave_damage\", surge_wave_damage)\n",
+ " combined_bldg_dmg.set_input_dataset(\"wind_damage\", wind_damage)\n",
+ " combined_bldg_dmg.set_input_dataset(\"flood_damage\", flood_damage)\n",
+ " combined_bldg_dmg.set_parameter(\"result_name\", result_name)\n",
+ " combined_bldg_dmg.run_analysis()\n",
+ "\n",
+ " # Returning combined damage result \n",
+ " combined_dmg = combined_bldg_dmg.get_output_dataset(\"ds_result\")\n",
+ " max_dmg = combined_dmg.get_dataframe_from_csv()\n",
+ " \n",
+ " return combined_dmg"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "2de7912e",
+ "metadata": {},
+ "source": [
+ "## Get max damage states"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "11175c51",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Hurricane Ike Building combined damage\n",
+ "\n",
+ "wind_mapping_id = \"62fef3a6cef2881193f2261d\"\n",
+ "surge_wave_mapping_id = \"6303e51bd76c6d0e1f6be080\"\n",
+ "flood_mapping_id = \"62fefd688a30d30dac57bbd7\"\n",
+ "\n",
+ "\n",
+ "hazard_type = \"hurricane\"\n",
+ "hazard_id = \"5fa5a228b6429615aeea4410\"\n",
+ "bldg_dataset_id = \"63ff6b135c35c0353d5ed3ac\"\n",
+ "result_name = \"Galveston-Ike-combined-dmg\"\n",
+ "\n",
+ "combined_dmg_ike = calculate_combined_building_damage(hazard_type, hazard_id, bldg_dataset_id, wind_mapping_id,\n",
+ " surge_wave_mapping_id, flood_mapping_id,result_name)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "55f85190",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Hurricane Ike 100 year Building combined damage\n",
+ "wind_mapping_id = \"62fef3a6cef2881193f2261d\"\n",
+ "surge_wave_mapping_id = \"6303e51bd76c6d0e1f6be080\"\n",
+ "flood_mapping_id = \"62fefd688a30d30dac57bbd7\"\n",
+ "\n",
+ "\n",
+ "hazard_type = \"hurricane\"\n",
+ "hazard_id = \"5fa5a9497e5cdf51ebf1add2\"\n",
+ "bldg_dataset_id = \"63ff6b135c35c0353d5ed3ac\"\n",
+ "result_name = \"Galveston-Ike-100yr-combined-dmg\"\n",
+ "\n",
+ "combined_dmg_ike_100yrs = calculate_combined_building_damage(hazard_type, hazard_id, bldg_dataset_id, wind_mapping_id,\n",
+ " surge_wave_mapping_id, flood_mapping_id,result_name)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "71becddf",
+ "metadata": {},
+ "source": [
+ "### Population Dislocation"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "bf0c2ee0",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "hua_id = \"63ff8e895367c2261b4cb2ef\"\n",
+ "\n",
+ "value_loss = \"60354810e379f22e16560dbd\"\n",
+ "bg_data = \"603545f2dcda03378087e708\"\n",
+ "result_name = f\"galveston-pop-disl-results_combined_damage\"\n",
+ "seed = 1111\n",
+ "pop_dis = PopulationDislocation(client)\n",
+ "\n",
+ "pop_dis.set_input_dataset(\"building_dmg\", combined_dmg_ike)\n",
+ "pop_dis.set_parameter(\"result_name\", result_name)\n",
+ "pop_dis.set_parameter(\"seed\", seed)\n",
+ "\n",
+ "pop_dis.load_remote_input_dataset(\"block_group_data\", bg_data)\n",
+ "pop_dis.load_remote_input_dataset(\"value_loss_param\", value_loss)\n",
+ "pop_dis.load_remote_input_dataset(\"housing_unit_allocation\", hua_id)\n",
+ "\n",
+ "pop_dis.run_analysis()\n",
+ "\n",
+ "population_dislocation_result = pop_dis.get_output_dataset(\"result\")\n",
+ "dislocation = population_dislocation_result.get_dataframe_from_csv(low_memory=False)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "c8910a89",
+ "metadata": {},
+ "source": [
+ "## Buyout Model Analyses"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "efbda1ac",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "fema_buyout_cap = 321291.600\n",
+ "residential_archetypes = [1, 2, 3, 4, 5, 17]\n",
+ "\n",
+ "hua_id = \"63ff8e895367c2261b4cb2ef\"\n",
+ "\n",
+ "buyout_decision = BuyoutDecision(client)\n",
+ "buyout_decision.set_parameter(\"fema_buyout_cap\", fema_buyout_cap)\n",
+ "buyout_decision.set_parameter(\"residential_archetypes\", residential_archetypes)\n",
+ "buyout_decision.set_parameter(\"result_name\", \"galveston_buyout\")\n",
+ "\n",
+ "buyout_decision.load_remote_input_dataset(\"buildings\", bldg_dataset_id)\n",
+ "buyout_decision.load_remote_input_dataset(\"housing_unit_allocation\", hua_id)\n",
+ "buyout_decision.set_input_dataset(\"past_building_damage\", combined_dmg_ike)\n",
+ "buyout_decision.set_input_dataset(\"future_building_damage\", combined_dmg_ike_100yrs)\n",
+ "buyout_decision.set_input_dataset(\"population_dislocation\", population_dislocation_result)\n",
+ "\n",
+ "buyout_decision.run_analysis()\n",
+ "buyout_result = buyout_decision.get_output_dataset(\"result\")\n",
+ "buyout_df = buyout_result.get_dataframe_from_csv()\n",
+ "\n",
+ "#Convert pandas dataframe to geopandas dataframe\n",
+ "buyout_df['geometry'] = buyout_df['geometry'].apply(loads)\n",
+ "buyout_df = gpd.GeoDataFrame(buyout_df, geometry='geometry')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "cadac669",
+ "metadata": {},
+ "source": [
+ "## Results"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "582dfb9e",
+ "metadata": {},
+ "source": [
+ "### 1) Tenure status"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "0593fc78",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "buyout_df['ownership'] = buyout_df['ownership'].replace({1: 'Owner', 2: 'Renter'})\n",
+ "\n",
+ "owners = (buyout_df['ownership'] == 'Owner').sum()\n",
+ "renters = (buyout_df['ownership'] == 'Renter').sum()\n",
+ "\n",
+ "normal_offer_owners = round(buyout_df.loc[buyout_df['ownership'] == 'Owner', 'housing_unit_appraisal_value'].sum())\n",
+ "normal_offer_renters = round(buyout_df.loc[buyout_df['ownership'] == 'Renter', 'housing_unit_appraisal_value'].sum())\n",
+ "\n",
+ "benefit_owner = buyout_df.loc[(buyout_df['dislocated'] == True) & (buyout_df['ownership'] == 'Owner'), 'dislocated'].sum()\n",
+ "benefit_renter = buyout_df.loc[(buyout_df['dislocated'] == True) & (buyout_df['ownership'] == 'Renter'), 'dislocated'].sum()\n",
+ "\n",
+ "\n",
+ "ownership_summary = {\n",
+ " '': ['# candidate housing units', 'Total purchase offer', 'Potential benefits (# people not dislocated by a simulated 100-year event)',\n",
+ " 'Potential challenges', 'Potential consequences', 'Equity considerations'],\n",
+ " 'Owners': [owners, f\"${normal_offer_owners:,.2f}\", benefit_owner,\n",
+ " '(1) Program participation depends on financial standing, place-based attachments, risk perception, flood exposure, family composition, and community ties.
'+\n",
+ " '(2) Homeowners are less likely to participate if they have a mortgage.
'+\n",
+ " '(3) Elderly homeowners (with paid off mortgages) may oppose relocation, due to place-based attachments and their unwillingness to take out a second mortgage for a more expensive home.
'+\n",
+ " '(4) Homeowners tend to decline offers if their property has undergone improvements, as home improvements are not factored into the market value.
'+\n",
+ " '(5) Lower chance of accepting an offer if cost sharing is required.
'+\n",
+ " '(6) Lower chance of accepting an offer if their house has undergone repair and reconstruction.
'+\n",
+ " '(7) Owners of substantially damaged properties may have limited post-disaster rebuilding options. While eminent domain is prohibited in buyout programs, the owners are legally restricted from rebuilding unless they flood-proof the structure or relocate. To avoid such additional expenses, buyout program participation may increase.
'+\n",
+ " '(8) Homeowners are less inclined to participate if the purchase offer is lower than their expected property value.
'+\n",
+ " '(9) Homeowners with inadequate property documentation are less likely to participate.', \n",
+ " '(1) Possibility of unaffordable housing options upon relocation
'+\n",
+ " '(2) Losing social network if relocated outside their community', \n",
+ " '(1) Sufficient financial incentives can alleviate post-buyout financial burdens and encourage program participation.
'+\n",
+ " '(2) Up to $22,500 compensation under URA for additional payments such as comparable homes, closing costs, or increased interest costs.
'+\n",
+ " '(3) Basement coverage through FEMA’s NFIP is limited, and compensation may be needed for contents such as washer, dryer, TV, or food freezer.
'+\n",
+ " '(4) Consider funding for mental health support of household members who experienced flooding.
'+\n",
+ " '(5) Fostering relationships with residents and community organizations is vital to establish trust and make buyouts more favorable.
'+\n",
+ " '(6) Consider conducting longitudinal health studies to assess and address potential physical and mental health implications for participants undergoing the relocation processes in buyout projects.
'+\n",
+ " '(7) Partnering with community-based organizations and assigning local relocation specialists helps to ensuring an equitable and supportive buyout process, given their role in navigating and addressing the complexities of relocation.
'+\n",
+ " '(8) To counter tax base losses from buyouts, program administrators can incentivize owners to relocate within the county but outside floodplains.
'+\n",
+ " '(9) If a participating household owes a mortgage, FEMA may pay residual funds after they pay the lienholder.
'+\n",
+ " '(10) Effective risk communication does not always lead to protective actions like participating in a buyout program. Homeowners responses depend on assessing benefits and their ability to act. Thus, sharing information on buyout benefits, resources like temporary housing options, and available compensations increases confidence in homeowners ability to afford and undergo buyouts.'], \n",
+ " 'Renters': [renters, f\"${normal_offer_renters:,.2f}\", benefit_renter,\n",
+ " '(1) Renters-occupied homes are less likely to undergo buyouts.
'+\n",
+ " '(2) Foreign national renters are ineligible for federal relocation benefits and post-buyout assistance, intensifying challenges for local governments in providing alternative solutions.',\n",
+ " '(1) Mandatory relocation
'+\n",
+ " '(2) Financial burden
'+\n",
+ " '(3) Higher chance of relocating to areas with equal or greater flood exposure and higher social vulnerability
'+\n",
+ " '(4) Losing social networks if relocated outside of the community
'+\n",
+ " '(5) Lower ability to find affordable homes when relocated', \n",
+ " '(1) Renters are less likely to carry flood insurance. Local governments may explore strategies to enhance coverage and overall disaster preparedness.
'+\n",
+ " '(2) Under the URA of 1970, displaced tenants from a property (occupied for 90+ days) can receive a payment (up to $5,250) for renting another property for up to 42 months.
'+\n",
+ " '(3) Consider funding for mental health support of household members who experienced flooding.
'+\n",
+ " '(4) Consider conducting longitudinal health studies to assess and address potential physical and mental health implications for participants undergoing the relocation processes in buyout projects.
'+\n",
+ " '(5) Partnering with community-based organizations and assigning local relocation specialists helps to ensuring an equitable and supportive buyout process, given their role in navigating and addressing the complexities of relocation.']\n",
+ "}\n",
+ "\n",
+ "\n",
+ "ownership_table = pd.DataFrame(ownership_summary, index=None)\n",
+ "\n",
+ "\n",
+ "ownership_table = ownership_table.style.set_properties(\n",
+ " subset=ownership_table.columns[1:], \n",
+ " **{'text-align': 'left'}\n",
+ ").set_properties(\n",
+ " subset=ownership_table.columns[0], \n",
+ " **{'font-weight': 'bold', 'text-align': 'left'}\n",
+ ").set_table_styles([\n",
+ " {'selector': 'th', 'props': [('text-align', 'center')]}\n",
+ "])\n",
+ "ownership_table"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "e0dfa1e9",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "buyout_df.crs = \"EPSG:4326\"\n",
+ "buyout_df = buyout_df.to_crs(epsg=3857)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "d0087a81",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "color_dict = {\n",
+ " 'Owner': 'blue',\n",
+ " 'Renter': 'red'\n",
+ "}\n",
+ "\n",
+ "# Create a ListedColormap with your specific colors\n",
+ "colors = [color_dict[ownership] for ownership in buyout_df['ownership'].unique()]\n",
+ "cmap = ListedColormap(colors)\n",
+ "\n",
+ "buyout_df.explore(\n",
+ " column=\"ownership\", # column for archetype info\n",
+ " tooltip=\"ownership\", \n",
+ " #geometry='geometry',\n",
+ " popup=True,\n",
+ " tiles='OpenStreetMap',\n",
+ " cmap=cmap, # use \"Set1\" matplotlib colormap\n",
+ " legend_kwds = {\"caption\": \"Ownership\"},\n",
+ " marker_kwds= {'radius': 4}\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "12f97ab7",
+ "metadata": {},
+ "source": [
+ "### 2) Owner-occupied homes broken down by owners' race"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "b11da317",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "white = buyout_df.loc[(buyout_df['race'] == 1) & (buyout_df['hispan'] == 0) & (buyout_df['ownership'] == 'Owner'), 'huid'].count()\n",
+ "racial_minority = buyout_df.loc[(buyout_df['race'] >= 2) & (buyout_df['hispan'] == 0) & (buyout_df['ownership'] == 'Owner'), 'huid'].count()\n",
+ "hisp = buyout_df.loc[(buyout_df['hispan'] == 1) & (buyout_df['ownership'] == 'Owner'), 'huid'].count()\n",
+ "\n",
+ "offer_owner_white = round(buyout_df.loc[(buyout_df['race'] == 1) & (buyout_df['hispan'] == 0) & (buyout_df['ownership'] == 'Owner'), 'housing_unit_appraisal_value'].sum())\n",
+ "offer_owner_racial_minority = round(buyout_df.loc[(buyout_df['race'] >= 2) & (buyout_df['hispan'] == 0) & (buyout_df['ownership'] == 'Owner'), 'housing_unit_appraisal_value'].sum())\n",
+ "offer_owner_hispan = round(buyout_df.loc[(buyout_df['hispan'] == 1) & (buyout_df['ownership'] == 'Owner'), 'housing_unit_appraisal_value'].sum())\n",
+ "\n",
+ "benefit_owner_white = buyout_df.loc[(buyout_df['dislocated'] == True) & (buyout_df['ownership'] == 'Owner') & (buyout_df['race'] == 1) & (buyout_df['hispan'] == 0), 'dislocated'].sum()\n",
+ "benefit_owner_racial_minority = buyout_df.loc[(buyout_df['dislocated'] == True) & (buyout_df['ownership'] == 'Owner') & (buyout_df['race'] >= 2) & (buyout_df['hispan'] == 0), 'dislocated'].sum()\n",
+ "benefit_owner_hispan = buyout_df.loc[(buyout_df['dislocated'] == True) & (buyout_df['ownership'] == 'Owner') & (buyout_df['hispan'] == 1), 'dislocated'].sum()\n",
+ "\n",
+ "# NOTE: Equity considerations are similar for all homeowners\n",
+ "tenure_race_summary = {\n",
+ " '': ['# candidate housing units', 'Total purchase offer', 'Potential benefits (# people not dislocated by a simulated 100-year event)',\n",
+ " 'Potential challenges','Potential consequences', 'Equity considerations'],\n",
+ " 'White homeowners': [white, f\"${offer_owner_white:,.2f}\", benefit_owner_white,\n",
+ " '(1) Less likely to participate due to higher flood risk tolerance.',\n",
+ " '(1) If agree to participate, may prefer relocating to majority-White communities.', \n",
+ " '(1) Sufficient financial incentives can alleviate post-buyout financial burdens and encourage program participation.
'+\n",
+ " '(2) Up to $22,500 compensation under URA for additional payments such as comparable homes, closing costs, or increased interest costs.
'+\n",
+ " '(3) Basement coverage through FEMA’s NFIP is limited, and compensation may be needed for contents such as washer, dryer, TV, or food freezer.
'+\n",
+ " '(4) Consider funding for mental health support of household members who experienced flooding.
'+\n",
+ " '(5) Fostering relationships with residents and community organizations is vital to establish trust and make buyouts more favorable.
'+\n",
+ " '(6) Consider conducting longitudinal health studies to assess and address potential physical and mental health implications for participants undergoing the relocation processes in buyout projects.
'+\n",
+ " '(7) Partnering with community-based organizations and assigning local relocation specialists helps to ensuring an equitable and supportive buyout process, given their role in navigating and addressing the complexities of relocation.
'+\n",
+ " '(8) To counter tax base losses from buyouts, program administrators can incentivize owners to relocate within the county but outside floodplains.
'+\n",
+ " '(9) If a participating household owes a mortgage, FEMA may pay residual funds after they pay the lienholder.
'+\n",
+ " '(10) Effective risk communication does not always lead to protective actions like participating in a buyout program. Homeowners responses depend on assessing benefits and their ability to act. Thus, sharing information on buyout benefits, resources like temporary housing options, and available compensations increases confidence in homeowners ability to afford and undergo buyouts.'], \n",
+ " 'Racial minority homeowners': [racial_minority, f\"${offer_owner_racial_minority:,.2f}\", benefit_owner_racial_minority,\n",
+ " '(1) More prone to relocation due to higher exposure to disasters
'+\n",
+ " '(2) Reluctance to join the program due to emotional attachment and distrust in government-led programs
'+\n",
+ " '(3) Properties in Black neighborhoods are valued lower compared to White neighborhoods, posing challenges for finding comparable homes post-buyout.',\n",
+ " '(1) Higher chance of relocating to areas with equal or greater flood exposure and higher social vulnerability.
'+\n",
+ " '(2) Losing social network if relocated outside their community',\n",
+ " ' '],\n",
+ " 'Hispanic homeowners': [hisp, f\"${offer_owner_hispan:,.2f}\", benefit_owner_hispan,\n",
+ " '(1) More prone to relocation due to higher exposure to disasters
'+\n",
+ " '(2) Properties in Hispanic neighborhoods are valued lower compared to White neighborhoods, posing challenges for finding comparable homes post-buyout
'+\n",
+ " '(3) Reluctance to join the program due to emotional attachment and distrust in government-led programs',\n",
+ " '(1) Higher chance of relocating to areas with equal or greater flood exposure and higher social vulnerability
'+\n",
+ " '(2) Losing social network if relocated outside their community',\n",
+ " ' '],\n",
+ "}\n",
+ "\n",
+ "tenure_race_table = pd.DataFrame(tenure_race_summary, index=None)\n",
+ "\n",
+ "tenure_race_table = tenure_race_table.style.set_properties(\n",
+ " subset=tenure_race_table.columns[1:], \n",
+ " **{'text-align': 'left'}\n",
+ ").set_properties(\n",
+ " subset=tenure_race_table.columns[0], \n",
+ " **{'font-weight': 'bold', 'text-align': 'left'}\n",
+ ").set_table_styles([\n",
+ " {'selector': 'th', 'props': [('text-align', 'center')]}\n",
+ "])\n",
+ "\n",
+ "tenure_race_table"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "c009aae2",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def categorize_race_tenure(row):\n",
+ " if row['race'] == 1 and row['hispan'] == 0 and row['ownership'] == 'Owner':\n",
+ " return 'White alone homeowners'\n",
+ " elif row['hispan'] == 1 and row['ownership'] == 'Owner':\n",
+ " return 'Hispanic homeowners'\n",
+ " elif row['race'] >= 2 and row['hispan'] == 0 and row['ownership'] == 'Owner':\n",
+ " return 'Racial minority homeowners'\n",
+ " else:\n",
+ " return 'Other'\n",
+ "\n",
+ "\n",
+ "# Apply the function to each row\n",
+ "buyout_df['race_category'] = buyout_df.apply(categorize_race_tenure, axis=1)\n",
+ "\n",
+ "colors = ['red','green','blue']\n",
+ "cmap = ListedColormap(colors)\n",
+ "\n",
+ "buyout_df[buyout_df['race_category'] != 'Other'].explore(\n",
+ " column=\"race_category\", # column for archetype info\n",
+ " tooltip=\"race_category\", \n",
+ " #geometry='geometry',\n",
+ " popup=True,\n",
+ " tiles='OpenStreetMap',\n",
+ " cmap=cmap, # use \"Set1\" matplotlib colormap\n",
+ " legend_kwds = {\"caption\": \"Race Category\"},\n",
+ " marker_kwds= {'radius': 4}\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "8e467423",
+ "metadata": {},
+ "source": [
+ "### 3) Owner-occupied homes broken down by owners' income level"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "175703f9",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "low_middle_income = buyout_df.loc[(buyout_df['hhinc'].isin([1,2,3])) & (buyout_df['ownership'] == 'Owner'), 'huid'].count()\n",
+ "high_income = buyout_df.loc[(buyout_df['hhinc'].isin([4,5])) & (buyout_df['ownership'] == 'Owner'), 'huid'].count()\n",
+ "\n",
+ "offer_owner_low_middle_income = round(buyout_df.loc[(buyout_df['hhinc'].isin([1,2,3])) & (buyout_df['ownership'] == 'Owner'), 'housing_unit_appraisal_value'].sum())\n",
+ "offer_owner_high_income = round(buyout_df.loc[(buyout_df['hhinc'].isin([4,5])) & (buyout_df['ownership'] == 'Owner'), 'housing_unit_appraisal_value'].sum())\n",
+ "\n",
+ "benefit_owner_low_middle_income = buyout_df.loc[(buyout_df['hhinc'].isin([1,2,3])) & (buyout_df['ownership'] == 'Owner'), 'dislocated'].sum()\n",
+ "benefit_owner_high_income = buyout_df.loc[(buyout_df['hhinc'].isin([4,5])) & (buyout_df['ownership'] == 'Owner'), 'dislocated'].sum()\n",
+ "\n",
+ "# NOTE: Equity considerations are similar for all homeowners\n",
+ "income_tenure_summary = {\n",
+ " '': ['# candidate housing units', 'Total purchase offer', 'Potential benefits (# people not dislocated by a simulated 100-year event)',\n",
+ " 'Potential challenges','Potential consequences', 'Equity considerations'],\n",
+ " 'Low and middle income homeowners': [low_middle_income, f\"${offer_owner_low_middle_income:,.2f}\", benefit_owner_low_middle_income,\n",
+ " '(1) Higher chance of being targeted for program participation as low-value homes sustain more damage
'+\n",
+ " '(2) Lower chance of accepting an offer if cost sharing is required
'+\n",
+ " '(3) May accept a low offer due to little power to negotiate the terms
'+\n",
+ " '(4) Reluctance to relocate due to limited perceived options, lower awareness, and affordability concerns
' +\n",
+ " '(5) More prone to relocation due to higher exposure to disasters',\n",
+ " '(1) Less likely to find affordable comparable homes when relocated
'+\n",
+ " '(2) Higher chance of relocating to areas with equal or greater flood exposure and higher social vulnerability
'+\n",
+ " '(3) Losing social networks if relocated outside of the community
'+\n",
+ " '(4) Though voluntary, program participation may resemble forced relocation due to new post-disaster policies mandating costly mitigation measures or increased insurance premiums that are unaffordable to households.',\n",
+ " '(1) Sufficient financial incentives can alleviate post-buyout financial burdens and encourage program participation.
'+\n",
+ " '(2) Up to $22,500 compensation under URA for additional payments such as comparable homes, closing costs, or increased interest costs.
'+\n",
+ " '(3) Basement coverage through FEMA’s NFIP is limited, and compensation may be needed for contents such as washer, dryer, TV, or food freezer.
'+\n",
+ " '(4) Consider funding for mental health support of household members who experienced flooding.
'+\n",
+ " '(5) Fostering relationships with residents and community organizations is vital to establish trust and make buyouts more favorable.
'+\n",
+ " '(6) Consider conducting longitudinal health studies to assess and address potential physical and mental health implications for participants undergoing the relocation processes in buyout projects.
'+\n",
+ " '(7) Partnering with community-based organizations and assigning local relocation specialists helps to ensuring an equitable and supportive buyout process, given their role in navigating and addressing the complexities of relocation.
'+\n",
+ " '(8) To counter tax base losses from buyouts, program administrators can incentivize owners to relocate within the county but outside floodplains.
'+\n",
+ " '(9) If a participating household owes a mortgage, FEMA may pay residual funds after they pay the lienholder.
'+\n",
+ " '(10) Effective risk communication does not always lead to protective actions like participating in a buyout program. Homeowners responses depend on assessing benefits and their ability to act. Thus, sharing information on buyout benefits, resources like temporary housing options, and available compensations increases confidence in homeowners ability to afford and undergo buyouts.'], \n",
+ " 'High income homeowners': [high_income, f\"${offer_owner_high_income:,.2f}\", benefit_owner_high_income,\n",
+ " '(1) Higher chance of ineligibility due to owning high-value properties and not meeting the BCA requirements
'+\n",
+ " '(2) May not receive a buyout offer as municipalities are concerned about the appearance of federal funds assisting wealthy households
'+\n",
+ " '(3) May decline the offer due to more financial independence and self-efficacy to implement alternative mitigation measures
'+\n",
+ " '(4) May decline offers if their property has undergone improvements, as home improvements are not factored into the market value',\n",
+ " '(1) May need to adopt alternative mitigation measures, if not participating',\n",
+ " ' ']\n",
+ "}\n",
+ "\n",
+ "income_tenure_table = pd.DataFrame(income_tenure_summary, index=None)\n",
+ "\n",
+ "income_tenure_table = income_tenure_table.style.set_properties(\n",
+ " subset=income_tenure_table.columns[1:], \n",
+ " **{'text-align': 'left'}\n",
+ ").set_properties(\n",
+ " subset=income_tenure_table.columns[0], \n",
+ " **{'font-weight': 'bold', 'text-align': 'left'}\n",
+ ").set_table_styles([\n",
+ " {'selector': 'th', 'props': [('text-align', 'center')]}\n",
+ "])\n",
+ "\n",
+ "income_tenure_table"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "326511a8",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def categorize_income(row):\n",
+ " if row['hhinc'] in [1, 2, 3] and row['ownership'] == 'Owner':\n",
+ " return 'Low and middle income homeowners'\n",
+ " elif row['hhinc'] in [4, 5] and row['ownership'] == 'Owner':\n",
+ " return 'High income homeowners'\n",
+ " else:\n",
+ " return 'Other'\n",
+ "\n",
+ "# Apply the function to each row\n",
+ "buyout_df['income_category'] = buyout_df.apply(categorize_income, axis=1)\n",
+ "colors = ['red','blue']\n",
+ "cmap = ListedColormap(colors)\n",
+ "\n",
+ "buyout_df[buyout_df['income_category'] != 'Other'].explore(\n",
+ " column=\"income_category\", # column for archetype info\n",
+ " tooltip=\"income_category\", \n",
+ " #geometry='geometry',\n",
+ " popup=True,\n",
+ " tiles='OpenStreetMap',\n",
+ " cmap=cmap,\n",
+ " legend_kwds = {\"caption\": \"Income Group\"},\n",
+ " marker_kwds= {'radius': 4}\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "843f286f",
+ "metadata": {},
+ "source": [
+ "### 4) Cluster buyout"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "587ca120",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Calculating the distance of closest building to each building in the inventory\n",
+ "# geometry_col = gpd.GeoSeries.from_wkt(bld_invtry['geometry'])\n",
+ "distance = gpd.GeoDataFrame(geometry=bld_invtry['geometry'])\n",
+ "\n",
+ "# Setting the CRS to WGS 84\n",
+ "distance = distance.set_crs(epsg=4326)\n",
+ "\n",
+ "# Converting the CRS to a projection that uses feet (e.g., UTM)\n",
+ "distance = distance.to_crs(epsg=3857) \n",
+ "bld_invtry['closest_distance'] = distance.geometry.apply(lambda x: distance.distance(x).nsmallest(2).iloc[1])"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "9c583f34",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "bld_invtry['closest_distance'].describe()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "b8b05de4",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "df_cluster=buyout_df"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "e7617d89",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Setting the threshold for identifying clusters, which is 10 times the standard deviation of calcualted distance\n",
+ "cluster_threshold = 12 * np.std(bld_invtry['closest_distance'])\n",
+ "\n",
+ "def identify_clusters(coord1, coord2):\n",
+ " return geodesic(coord1, coord2).feet\n",
+ "df_cluster['cluster'] = ''\n",
+ "\n",
+ "group_counter = 1\n",
+ "\n",
+ "for i in range(len(df_cluster)):\n",
+ " if df_cluster.at[i, 'cluster'] == '':\n",
+ " df_cluster.at[i, 'cluster'] = f'Group_{group_counter}'\n",
+ " group_counter += 1\n",
+ "\n",
+ " for j in range(i + 1, len(df_cluster)):\n",
+ " coord1 = (df_cluster.at[i, 'y'], df_cluster.at[i, 'x'])\n",
+ " coord2 = (df_cluster.at[j, 'y'], df_cluster.at[j, 'x'])\n",
+ " distance = identify_clusters(coord1, coord2)\n",
+ "\n",
+ " if distance <= cluster_threshold:\n",
+ " df_cluster.at[j, 'cluster'] = df_cluster.at[i, 'cluster']\n",
+ "\n",
+ "pd.set_option('display.float_format', '{:.15f}'.format)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "0c931958",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# keeping only one housing unit per building to avoid getting clusters just because one building has two housing units (drop duplicate)\n",
+ "df_cluster = df_cluster.drop_duplicates(subset='guid', keep='first')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "9e6c9c83",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Removing properties that are not in a cluster by dropping the values in \"cluster\" col that are repeated only once\n",
+ "df_cluster = df_cluster[df_cluster.groupby('cluster')['cluster'].transform('count') > 1]\n",
+ "#df_cluster.shape"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "7a469a14",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "unique_clusters = df_cluster['cluster'].unique()\n",
+ "mapping_dict = {group: f'Cluster {str(i + 1).zfill(2)}' for i, group in enumerate(unique_clusters)}\n",
+ "\n",
+ "\n",
+ "df_cluster['cluster_label'] = df_cluster['cluster'].map(mapping_dict)\n",
+ "df_cluster = gpd.GeoDataFrame(df_cluster, geometry='geometry')\n",
+ "\n",
+ "df_cluster.explore(\n",
+ " column=\"cluster_label\", # column for archetype info\n",
+ " tooltip=\"cluster_label\", \n",
+ " #geometry='geometry',\n",
+ " popup=True,\n",
+ " tiles='OpenStreetMap',\n",
+ " cmap=\"Paired\", # use \"Set1\" matplotlib colormap\n",
+ " legend_kwds = {\"caption\": \"Clusters\"},\n",
+ " marker_kwds= {'radius': 4}\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "5e146e57",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "cluster_buyout_summary = {\n",
+ " 'Potential advantages of cluster buyout': [\n",
+ " '(1) Incentivizes participation if other neighbors are relocating
' +\n",
+ " '(2) Losing tax base is a known issue of buyout. Recreational areas developed through cluster buyout, such as parks help increase property values and improve physical and mental health
' +\n",
+ " '(3) Boosts tourism and recreation by creating new spaces
' +\n",
+ " '(4) Helps restoring ecological values and enhancing ecosystem services such as improved water quality and reduced risks of future hazards
' +\n",
+ " '(5) Cluster buyouts align with FEMA open space management objectives of FEMA. While certain communities have repurposed checkerboard-patterned lots for recreation, these lots generally limit open space development opportunities.'\n",
+ " ]\n",
+ "}\n",
+ "\n",
+ "cluster_buyout_table = pd.DataFrame(cluster_buyout_summary, index=None)\n",
+ "\n",
+ "cluster_buyout_table = cluster_buyout_table.style.set_properties(\n",
+ " subset=cluster_buyout_table.columns, \n",
+ " **{'text-align': 'left'}\n",
+ ").set_table_styles([\n",
+ " {'selector': 'th', 'props': [('text-align', 'center')]}\n",
+ "])\n",
+ "\n",
+ "cluster_buyout_table"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "477ee2a3-e3a0-42d3-ad7b-bd368169670a",
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.9.19"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/notebooks/combined_wind_wave_surge_building_dmg.ipynb b/notebooks/combined_wind_wave_surge_building_dmg.ipynb
index 108a0925..176b9633 100644
--- a/notebooks/combined_wind_wave_surge_building_dmg.ipynb
+++ b/notebooks/combined_wind_wave_surge_building_dmg.ipynb
@@ -13,7 +13,8 @@
"source": [
"from pyincore import IncoreClient, FragilityService, MappingSet, Dataset\n",
"from pyincore.analyses.combinedwindwavesurgebuildingdamage import CombinedWindWaveSurgeBuildingDamage\n",
- "from pyincore.analyses.buildingdamage import BuildingDamage"
+ "from pyincore.analyses.buildingdamage import BuildingDamage\n",
+ "from pyincore.analyses.nonstructbuildingdamage import NonStructBuildingDamage"
]
},
{
@@ -184,10 +185,11 @@
"outputs": [],
"source": [
"# flood building damage\n",
- "f_bldg_dmg = BuildingDamage(client)\n",
+ "f_bldg_dmg = NonStructBuildingDamage(client)\n",
"f_bldg_dmg.load_remote_input_dataset(\"buildings\", bldg_dataset_id)\n",
"f_bldg_dmg.set_input_dataset('dfr3_mapping_set', mapping_set)\n",
"f_bldg_dmg.set_parameter(\"result_name\", \"Galveston-flood-dmg\")\n",
+ "f_bldg_dmg.set_parameter(\"fragility_key\", \"Non-Retrofit Fragility ID Code\")\n",
"f_bldg_dmg.set_parameter(\"hazard_type\", hazard_type)\n",
"f_bldg_dmg.set_parameter(\"hazard_id\", hazard_id)\n",
"f_bldg_dmg.set_parameter(\"num_cpu\", 8)\n",
@@ -219,7 +221,7 @@
"source": [
"surge_wave_damage = sw_bldg_dmg.get_output_dataset(\"ds_result\")\n",
"wind_damage = w_bldg_dmg.get_output_dataset(\"ds_result\")\n",
- "flood_damage = f_bldg_dmg.get_output_dataset(\"ds_result\")"
+ "flood_damage = f_bldg_dmg.get_output_dataset(\"result\")"
]
},
{
@@ -279,9 +281,9 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.8.16"
+ "version": "3.9.19"
}
},
"nbformat": 4,
"nbformat_minor": 5
-}
\ No newline at end of file
+}
diff --git a/notebooks/combined_wind_wave_surge_building_loss.ipynb b/notebooks/combined_wind_wave_surge_building_loss.ipynb
index bbf205c0..96637dfc 100644
--- a/notebooks/combined_wind_wave_surge_building_loss.ipynb
+++ b/notebooks/combined_wind_wave_surge_building_loss.ipynb
@@ -13,7 +13,8 @@
"source": [
"from pyincore import IncoreClient, FragilityService, MappingSet, Dataset\n",
"from pyincore.analyses.combinedwindwavesurgebuildingloss import CombinedWindWaveSurgeBuildingLoss\n",
- "from pyincore.analyses.buildingdamage import BuildingDamage"
+ "from pyincore.analyses.buildingdamage import BuildingDamage\n",
+ "from pyincore.analyses.nonstructbuildingdamage import NonStructBuildingDamage"
]
},
{
@@ -184,10 +185,11 @@
"outputs": [],
"source": [
"# flood building damage\n",
- "f_bldg_dmg = BuildingDamage(client)\n",
+ "f_bldg_dmg = NonStructBuildingDamage(client)\n",
"f_bldg_dmg.load_remote_input_dataset(\"buildings\", bldg_dataset_id)\n",
"f_bldg_dmg.set_input_dataset('dfr3_mapping_set', mapping_set)\n",
"f_bldg_dmg.set_parameter(\"result_name\", \"Galveston-flood-dmg\")\n",
+ "f_bldg_dmg.set_parameter(\"fragility_key\", \"Non-Retrofit Fragility ID Code\")\n",
"f_bldg_dmg.set_parameter(\"hazard_type\", hazard_type)\n",
"f_bldg_dmg.set_parameter(\"hazard_id\", hazard_id)\n",
"f_bldg_dmg.set_parameter(\"num_cpu\", 8)\n",
@@ -219,7 +221,7 @@
"source": [
"surge_wave_damage = sw_bldg_dmg.get_output_dataset(\"ds_result\")\n",
"wind_damage = w_bldg_dmg.get_output_dataset(\"ds_result\")\n",
- "flood_damage = f_bldg_dmg.get_output_dataset(\"ds_result\")"
+ "flood_damage = f_bldg_dmg.get_output_dataset(\"result\")"
]
},
{
@@ -295,7 +297,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.8.10"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/notebooks/galveston_community_app.ipynb b/notebooks/galveston_community_app.ipynb
index 1596fdeb..46a4bff1 100644
--- a/notebooks/galveston_community_app.ipynb
+++ b/notebooks/galveston_community_app.ipynb
@@ -37,6 +37,7 @@
"\n",
"# importing pyIncone analyses:\n",
"from pyincore.analyses.buildingdamage import BuildingDamage\n",
+ "from pyincore.analyses.nonstructbuildingdamage import NonStructBuildingDamage\n",
"from pyincore.analyses.buildingfunctionality import BuildingFunctionality\n",
"from pyincore.analyses.combinedwindwavesurgebuildingdamage import (\n",
" CombinedWindWaveSurgeBuildingDamage,\n",
@@ -192,10 +193,11 @@
"outputs": [],
"source": [
"# flood building damage\n",
- "f_bldg_dmg = BuildingDamage(client)\n",
+ "f_bldg_dmg = NonStructBuildingDamage(client)\n",
"f_bldg_dmg.load_remote_input_dataset(\"buildings\", bldg_dataset_id)\n",
"f_bldg_dmg.set_input_dataset(\"dfr3_mapping_set\", mapping_set)\n",
"f_bldg_dmg.set_parameter(\"result_name\", \"Galveston-flood-dmg\")\n",
+ "f_bldg_dmg.set_parameter(\"fragility_key\", \"Non-Retrofit Fragility ID Code\")\n",
"f_bldg_dmg.set_parameter(\"hazard_type\", hazard_type)\n",
"f_bldg_dmg.set_parameter(\"hazard_id\", hazard_id)\n",
"f_bldg_dmg.set_parameter(\"num_cpu\", 8)\n",
@@ -219,7 +221,7 @@
"source": [
"surge_wave_damage = sw_bldg_dmg.get_output_dataset(\"ds_result\")\n",
"wind_damage = w_bldg_dmg.get_output_dataset(\"ds_result\")\n",
- "flood_damage = f_bldg_dmg.get_output_dataset(\"ds_result\")"
+ "flood_damage = f_bldg_dmg.get_output_dataset(\"result\")"
]
},
{
@@ -793,7 +795,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.12.2"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/notebooks/galveston_community_app_retrofit.ipynb b/notebooks/galveston_community_app_retrofit.ipynb
index 80bd551e..5f151235 100644
--- a/notebooks/galveston_community_app_retrofit.ipynb
+++ b/notebooks/galveston_community_app_retrofit.ipynb
@@ -37,6 +37,7 @@
"\n",
"# importing pyIncone analyses:\n",
"from pyincore.analyses.buildingdamage import BuildingDamage\n",
+ "from pyincore.analyses.nonstructbuildingdamage import NonStructBuildingDamage\n",
"from pyincore.analyses.buildingfunctionality import BuildingFunctionality\n",
"from pyincore.analyses.combinedwindwavesurgebuildingdamage import (\n",
" CombinedWindWaveSurgeBuildingDamage,\n",
@@ -188,7 +189,7 @@
"outputs": [],
"source": [
"# flood building damage\n",
- "f_bldg_dmg_retrofit = BuildingDamage(client)\n",
+ "f_bldg_dmg_retrofit = NonStructBuildingDamage(client)\n",
"f_bldg_dmg_retrofit.load_remote_input_dataset(\"buildings\", bldg_dataset_id)\n",
"\n",
"# Flood mapping\n",
@@ -198,6 +199,7 @@
"f_bldg_dmg_retrofit.load_remote_input_dataset(\"retrofit_strategy\", retrofit_plan)\n",
"f_bldg_dmg_retrofit.set_input_dataset('dfr3_mapping_set', mapping_set)\n",
"f_bldg_dmg_retrofit.set_parameter(\"result_name\", \"Galveston-w-retrofit-flood-dmg\")\n",
+ "f_bldg_dmg_retrofit.set_parameter(\"fragility_key\", \"Non-Retrofit Fragility ID Code\")\n",
"f_bldg_dmg_retrofit.set_parameter(\"hazard_type\", hazard_type)\n",
"f_bldg_dmg_retrofit.set_parameter(\"hazard_id\", hazard_id)\n",
"f_bldg_dmg_retrofit.set_parameter(\"num_cpu\", 8)\n",
@@ -221,7 +223,7 @@
"source": [
"surge_wave_damage_r = sw_bldg_dmg_retrofit.get_output_dataset(\"ds_result\")\n",
"wind_damage = w_bldg_dmg.get_output_dataset(\"ds_result\") # no change\n",
- "flood_damage_r = f_bldg_dmg_retrofit.get_output_dataset(\"ds_result\")"
+ "flood_damage_r = f_bldg_dmg_retrofit.get_output_dataset(\"result\")"
]
},
{
@@ -800,7 +802,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.18"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/notebooks/gas_facility_dmg.ipynb b/notebooks/gas_facility_dmg.ipynb
new file mode 100644
index 00000000..a8d35ed1
--- /dev/null
+++ b/notebooks/gas_facility_dmg.ipynb
@@ -0,0 +1,293 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "from pyincore import IncoreClient, FragilityService, MappingSet\n",
+ "from pyincore.analyses.gasfacilitydamage import GasFacilityDamage"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# Connect to IN-CORE service\n",
+ "client = IncoreClient()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Gas Facility damage for Shelby county, TN with New Madrid earthquake."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# New madrid earthquake using Atkinson Boore 1995\n",
+ "hazard_type = \"earthquake\"\n",
+ "hazard_id = \"5b902cb273c3371e1236b36b\"\n",
+ "\n",
+ "# Gas facility inventory in Shelby county, TN\n",
+ "gas_facility_id = \"5a284f26c7d30d13bc081bb8\"\n",
+ "\n",
+ "# Gas Facility Fragility mapping\n",
+ "mapping_id = \"5b47c292337d4a38568f8386\""
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# Create gas facility damage\n",
+ "gas_facility_dmg = GasFacilityDamage(client)\n",
+ "\n",
+ "# Load input dataset\n",
+ "gas_facility_dmg.load_remote_input_dataset(\"gas_facilities\", gas_facility_id)\n",
+ "\n",
+ "# Load fragility mapping\n",
+ "fragility_service = FragilityService(client)\n",
+ "mapping_set = MappingSet(fragility_service.get_mapping(mapping_id))\n",
+ "gas_facility_dmg.set_input_dataset(\"dfr3_mapping_set\", mapping_set)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# Specify the result name\n",
+ "result_name = \"memphis_gas_facility_result\"\n",
+ "\n",
+ "# Set analysis parameters\n",
+ "gas_facility_dmg.set_parameter(\"result_name\", result_name)\n",
+ "gas_facility_dmg.set_parameter(\"hazard_type\", hazard_type)\n",
+ "gas_facility_dmg.set_parameter(\"hazard_id\", hazard_id)\n",
+ "gas_facility_dmg.set_parameter(\"num_cpu\", 1)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# Run gas facility damage analysis\n",
+ "gas_facility_dmg.run_analysis()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# Retrieve result dataset\n",
+ "result = gas_facility_dmg.get_output_dataset(\"result\")\n",
+ "\n",
+ "# Convert dataset to Pandas DataFrame\n",
+ "df = result.get_dataframe_from_csv()\n",
+ "\n",
+ "# Display top 5 rows of output data\n",
+ "df.head()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Gas Facility damage for Shelby county, TN with New Madrid earthquake and liquefaction."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Geology dataset, required for liquefaction to provide susceptibility\n",
+ "liq_geology_dataset_id = \"5a284f53c7d30d13bc08249c\""
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Create gas facility damage\n",
+ "gas_facility_dmg = GasFacilityDamage(client)\n",
+ "\n",
+ "# Load input dataset\n",
+ "gas_facility_dmg.load_remote_input_dataset(\"gas_facilities\", gas_facility_id)\n",
+ "\n",
+ "# Load fragility mapping\n",
+ "fragility_service = FragilityService(client)\n",
+ "mapping_set = MappingSet(fragility_service.get_mapping(mapping_id))\n",
+ "gas_facility_dmg.set_input_dataset(\"dfr3_mapping_set\", mapping_set)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Specify the result name\n",
+ "result_name = \"memphis_gas_facility_dmg_result-liquefaction\"\n",
+ "\n",
+ "# Set analysis parameters\n",
+ "gas_facility_dmg.set_parameter(\"result_name\", result_name)\n",
+ "gas_facility_dmg.set_parameter(\"hazard_type\", hazard_type)\n",
+ "gas_facility_dmg.set_parameter(\"hazard_id\", hazard_id)\n",
+ "gas_facility_dmg.set_parameter(\"num_cpu\", 1)\n",
+ "\n",
+ "# Set analysis parameters for liquefaction\n",
+ "gas_facility_dmg.set_parameter(\"use_liquefaction\", True)\n",
+ "gas_facility_dmg.set_parameter(\"liquefaction_geology_dataset_id\", liq_geology_dataset_id)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Run gas facility damage analysis\n",
+ "gas_facility_dmg.run_analysis()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Retrieve result dataset\n",
+ "result = gas_facility_dmg.get_output_dataset(\"result\")\n",
+ "\n",
+ "# Convert dataset to Pandas DataFrame\n",
+ "df = result.get_dataframe_from_csv()\n",
+ "\n",
+ "# Display top 5 rows of output data\n",
+ "df.head()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "#### Creating a chart"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "ax = df['DS_4'].hist(bins=20, figsize=[10,5])\n",
+ "ax.set_title(\"complete damage distribution\", fontsize=12)\n",
+ "ax.set_xlabel(\"complete damage value\", fontsize=12)\n",
+ "ax.set_ylabel(\"counts\", fontsize=12)"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.9.19"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
+}
diff --git a/notebooks/mc_failure_prob.ipynb b/notebooks/mc_failure_prob.ipynb
deleted file mode 100644
index e1bcbf87..00000000
--- a/notebooks/mc_failure_prob.ipynb
+++ /dev/null
@@ -1,482 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "scrolled": true
- },
- "outputs": [],
- "source": [
- "from pyincore import IncoreClient, FragilityService, MappingSet\n",
- "from pyincore.analyses.montecarlofailureprobability import MonteCarloFailureProbability\n",
- "from pyincore.analyses.buildingdamage import BuildingDamage\n",
- "from pyincore.analyses.waterfacilitydamage import WaterFacilityDamage"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "## Monte Carlo failure probability analysis"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "client = IncoreClient()"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "### Chaining with Building damage analysis"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "bldg_dmg = BuildingDamage(client)\n",
- "# Memphis hospitals\n",
- "bldg_dmg.load_remote_input_dataset(\"buildings\", \"5a284f0bc7d30d13bc081a28\")\n",
- "\n",
- "# Load fragility mapping\n",
- "fragility_service = FragilityService(client)\n",
- "mapping_set = MappingSet(fragility_service.get_mapping(\"5b47b350337d4a3629076f2c\"))\n",
- "bldg_dmg.set_input_dataset(\"dfr3_mapping_set\", mapping_set)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {
- "name": "#%%\n"
- }
- },
- "outputs": [],
- "source": [
- "bldg_dmg.set_parameter(\"hazard_type\", \"earthquake\")\n",
- "# Memphis 7.9 AB-95, New madrid Atkinson and Boore 1995 model based hazard\n",
- "bldg_dmg.set_parameter(\"hazard_id\", \"5b902cb273c3371e1236b36b\")\n",
- "bldg_dmg.set_parameter(\"num_cpu\", 1)\n",
- "\n",
- "bldg_dmg.set_parameter(\"result_name\", \"memphis_hospitals_damage\")"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {
- "name": "#%%\n"
- }
- },
- "outputs": [],
- "source": [
- "bldg_dmg.run_analysis()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "building_dmg_result = bldg_dmg.get_output_dataset('ds_result')\n",
- "\n",
- "mc = MonteCarloFailureProbability(client)\n",
- "mc.set_input_dataset(\"damage\", building_dmg_result)\n",
- "mc.set_parameter(\"result_name\", \"bldg_mc\")\n",
- "mc.set_parameter(\"num_cpu\", 8)\n",
- "mc.set_parameter(\"num_samples\", 10)\n",
- "mc.set_parameter(\"damage_interval_keys\", [\"DS_0\", \"DS_1\", \"DS_2\", \"DS_3\"])\n",
- "mc.set_parameter(\"failure_state_keys\", [\"DS_1\", \"DS_2\", \"DS_3\"])"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "mc.run_analysis()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "result_fs = mc.get_output_dataset(\"sample_failure_state\").get_dataframe_from_csv()\n",
- "result_fs.head()"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "### Chaining with water facility damage"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "wf_dmg = WaterFacilityDamage(client)\n",
- "# Memphis water facility\n",
- "wf_dmg.load_remote_input_dataset(\"water_facilities\", \"5a284f2ac7d30d13bc081e52\")\n",
- "\n",
- "# Load fragility mapping\n",
- "mapping_set = MappingSet(fragility_service.get_mapping(\"5b47c383337d4a387669d592\"))\n",
- "wf_dmg.set_input_dataset(\"dfr3_mapping_set\", mapping_set)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {
- "name": "#%%\n"
- }
- },
- "outputs": [],
- "source": [
- "wf_dmg.set_parameter(\"hazard_type\", \"earthquake\")\n",
- "# Memphis 7.9 AB-95, New madrid Atkinson and Boore 1995 model based hazard\n",
- "wf_dmg.set_parameter(\"hazard_id\", \"5b902cb273c3371e1236b36b\")\n",
- "wf_dmg.set_parameter(\"fragility_key\", \"pga\")\n",
- "wf_dmg.set_parameter(\"use_liquefaction\", True)\n",
- "wf_dmg.set_parameter(\"liquefaction_geology_dataset_id\", \"5a284f53c7d30d13bc08249c\")\n",
- "wf_dmg.set_parameter(\"liquefaction_fragility_key\", \"pgd\")\n",
- "wf_dmg.set_parameter(\"use_hazard_uncertainty\", False)\n",
- "wf_dmg.set_parameter(\"num_cpu\", 1)\n",
- "\n",
- "wf_dmg.set_parameter(\"result_name\", \"wf_dmg_results\")"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 11,
- "metadata": {
- "pycharm": {
- "name": "#%%\n"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": [
- "True"
- ]
- },
- "execution_count": 11,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "wf_dmg.run_analysis()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 12,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "text/plain": [
- "True"
- ]
- },
- "execution_count": 12,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "water_facility_dmg_result = wf_dmg.get_output_dataset('result')\n",
- "\n",
- "mc = MonteCarloFailureProbability(client)\n",
- "mc.set_input_dataset(\"damage\", water_facility_dmg_result)\n",
- "mc.set_parameter(\"num_cpu\", 8)\n",
- "mc.set_parameter(\"num_samples\", 10)\n",
- "mc.set_parameter(\"damage_interval_keys\", [\"DS_0\", \"DS_1\", \"DS_2\", \"DS_3\", \"DS_4\"])\n",
- "mc.set_parameter(\"failure_state_keys\", [\"DS_1\", \"DS_2\", \"DS_3\", \"DS_4\"])\n",
- "\n",
- "mc.set_parameter(\"result_name\", \"water_mc\")"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 13,
- "metadata": {
- "pycharm": {
- "name": "#%%\n"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": [
- "True"
- ]
- },
- "execution_count": 13,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "mc.run_analysis()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 14,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "text/html": [
- "\n",
- "\n",
- "
\n",
- " \n",
- " \n",
- " | \n",
- " id | \n",
- " failure | \n",
- "
\n",
- " \n",
- " \n",
- " \n",
- " 0 | \n",
- " NaN | \n",
- " 0,0,0,0,0,0,0,0,0,0 | \n",
- "
\n",
- " \n",
- " 1 | \n",
- " NaN | \n",
- " 0,0,0,0,0,0,0,0,0,0 | \n",
- "
\n",
- " \n",
- " 2 | \n",
- " NaN | \n",
- " 0,0,0,0,0,0,0,0,0,0 | \n",
- "
\n",
- " \n",
- " 3 | \n",
- " NaN | \n",
- " 0,0,1,0,0,0,0,0,0,0 | \n",
- "
\n",
- " \n",
- " 4 | \n",
- " NaN | \n",
- " 0,0,0,0,0,0,0,0,0,0 | \n",
- "
\n",
- " \n",
- "
\n",
- "
"
- ],
- "text/plain": [
- " id failure\n",
- "0 NaN 0,0,0,0,0,0,0,0,0,0\n",
- "1 NaN 0,0,0,0,0,0,0,0,0,0\n",
- "2 NaN 0,0,0,0,0,0,0,0,0,0\n",
- "3 NaN 0,0,1,0,0,0,0,0,0,0\n",
- "4 NaN 0,0,0,0,0,0,0,0,0,0"
- ]
- },
- "execution_count": 14,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "result_fs = mc.get_output_dataset(\"sample_failure_state\")\n",
- "df_fs = result_fs.get_dataframe_from_csv()\n",
- "df_fs.head()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 15,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "text/html": [
- "\n",
- "\n",
- "
\n",
- " \n",
- " \n",
- " | \n",
- " LS_0 | \n",
- " LS_1 | \n",
- " LS_2 | \n",
- " LS_3 | \n",
- " DS_0 | \n",
- " DS_1 | \n",
- " DS_2 | \n",
- " DS_3 | \n",
- " DS_4 | \n",
- " failure_probability | \n",
- "
\n",
- " \n",
- " \n",
- " \n",
- " 0 | \n",
- " 0.987977 | \n",
- " 0.912770 | \n",
- " 0.765555 | \n",
- " 0.680618 | \n",
- " 0.012023 | \n",
- " 0.075208 | \n",
- " 0.147215 | \n",
- " 0.084937 | \n",
- " 0.680618 | \n",
- " 1.0 | \n",
- "
\n",
- " \n",
- " 1 | \n",
- " 0.992818 | \n",
- " 0.941305 | \n",
- " 0.826592 | \n",
- " 0.751147 | \n",
- " 0.007182 | \n",
- " 0.051512 | \n",
- " 0.114713 | \n",
- " 0.075445 | \n",
- " 0.751147 | \n",
- " 1.0 | \n",
- "
\n",
- " \n",
- " 2 | \n",
- " 0.992745 | \n",
- " 0.940960 | \n",
- " 0.826160 | \n",
- " 0.751004 | \n",
- " 0.007255 | \n",
- " 0.051785 | \n",
- " 0.114800 | \n",
- " 0.075156 | \n",
- " 0.751004 | \n",
- " 1.0 | \n",
- "
\n",
- " \n",
- " 3 | \n",
- " 0.991617 | \n",
- " 0.935806 | \n",
- " 0.819960 | \n",
- " 0.749020 | \n",
- " 0.008383 | \n",
- " 0.055810 | \n",
- " 0.115847 | \n",
- " 0.070939 | \n",
- " 0.749020 | \n",
- " 0.9 | \n",
- "
\n",
- " \n",
- " 4 | \n",
- " 0.991851 | \n",
- " 0.936846 | \n",
- " 0.821175 | \n",
- " 0.749399 | \n",
- " 0.008149 | \n",
- " 0.055005 | \n",
- " 0.115672 | \n",
- " 0.071775 | \n",
- " 0.749399 | \n",
- " 1.0 | \n",
- "
\n",
- " \n",
- "
\n",
- "
"
- ],
- "text/plain": [
- " LS_0 LS_1 LS_2 LS_3 DS_0 DS_1 DS_2 \\\n",
- "0 0.987977 0.912770 0.765555 0.680618 0.012023 0.075208 0.147215 \n",
- "1 0.992818 0.941305 0.826592 0.751147 0.007182 0.051512 0.114713 \n",
- "2 0.992745 0.940960 0.826160 0.751004 0.007255 0.051785 0.114800 \n",
- "3 0.991617 0.935806 0.819960 0.749020 0.008383 0.055810 0.115847 \n",
- "4 0.991851 0.936846 0.821175 0.749399 0.008149 0.055005 0.115672 \n",
- "\n",
- " DS_3 DS_4 failure_probability \n",
- "0 0.084937 0.680618 1.0 \n",
- "1 0.075445 0.751147 1.0 \n",
- "2 0.075156 0.751004 1.0 \n",
- "3 0.070939 0.749020 0.9 \n",
- "4 0.071775 0.749399 1.0 "
- ]
- },
- "execution_count": 15,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "result_fp = mc.get_output_dataset(\"failure_probability\")\n",
- "df_fp = result_fp.get_dataframe_from_csv()\n",
- "df_fp.head()"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.7.10"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 4
-}
diff --git a/notebooks/mc_limit_state_prob.ipynb b/notebooks/mc_limit_state_prob.ipynb
new file mode 100644
index 00000000..4727624a
--- /dev/null
+++ b/notebooks/mc_limit_state_prob.ipynb
@@ -0,0 +1,256 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "scrolled": true
+ },
+ "outputs": [],
+ "source": [
+ "from pyincore import IncoreClient, FragilityService, MappingSet\n",
+ "from pyincore.analyses.montecarlolimitstateprobability import MonteCarloLimitStateProbability\n",
+ "from pyincore.analyses.buildingdamage import BuildingDamage\n",
+ "from pyincore.analyses.waterfacilitydamage import WaterFacilityDamage"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Monte Carlo limit state probability analysis"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "client = IncoreClient()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Chaining with Building damage analysis"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "bldg_dmg = BuildingDamage(client)\n",
+ "# Memphis hospitals\n",
+ "bldg_dmg.load_remote_input_dataset(\"buildings\", \"5a284f0bc7d30d13bc081a28\")\n",
+ "\n",
+ "# Load fragility mapping\n",
+ "fragility_service = FragilityService(client)\n",
+ "mapping_set = MappingSet(fragility_service.get_mapping(\"5b47b350337d4a3629076f2c\"))\n",
+ "bldg_dmg.set_input_dataset(\"dfr3_mapping_set\", mapping_set)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "bldg_dmg.set_parameter(\"hazard_type\", \"earthquake\")\n",
+ "# Memphis 7.9 AB-95, New madrid Atkinson and Boore 1995 model based hazard\n",
+ "bldg_dmg.set_parameter(\"hazard_id\", \"5b902cb273c3371e1236b36b\")\n",
+ "bldg_dmg.set_parameter(\"num_cpu\", 1)\n",
+ "\n",
+ "bldg_dmg.set_parameter(\"result_name\", \"memphis_hospitals_damage\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "bldg_dmg.run_analysis()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "building_dmg_result = bldg_dmg.get_output_dataset('ds_result')\n",
+ "\n",
+ "mc = MonteCarloLimitStateProbability(client)\n",
+ "mc.set_input_dataset(\"damage\", building_dmg_result)\n",
+ "mc.set_parameter(\"result_name\", \"bldg_mc\")\n",
+ "mc.set_parameter(\"num_cpu\", 8)\n",
+ "mc.set_parameter(\"num_samples\", 10)\n",
+ "mc.set_parameter(\"damage_interval_keys\", [\"DS_0\", \"DS_1\", \"DS_2\", \"DS_3\"])\n",
+ "mc.set_parameter(\"failure_state_keys\", [\"DS_1\", \"DS_2\", \"DS_3\"])"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "mc.run_analysis()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "result_fs = mc.get_output_dataset(\"sample_failure_state\").get_dataframe_from_csv()\n",
+ "result_fs.head()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Chaining with water facility damage"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "wf_dmg = WaterFacilityDamage(client)\n",
+ "# Memphis water facility\n",
+ "wf_dmg.load_remote_input_dataset(\"water_facilities\", \"5a284f2ac7d30d13bc081e52\")\n",
+ "\n",
+ "# Load fragility mapping\n",
+ "mapping_set = MappingSet(fragility_service.get_mapping(\"5b47c383337d4a387669d592\"))\n",
+ "wf_dmg.set_input_dataset(\"dfr3_mapping_set\", mapping_set)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "wf_dmg.set_parameter(\"hazard_type\", \"earthquake\")\n",
+ "# Memphis 7.9 AB-95, New madrid Atkinson and Boore 1995 model based hazard\n",
+ "wf_dmg.set_parameter(\"hazard_id\", \"5b902cb273c3371e1236b36b\")\n",
+ "wf_dmg.set_parameter(\"fragility_key\", \"pga\")\n",
+ "wf_dmg.set_parameter(\"use_liquefaction\", True)\n",
+ "wf_dmg.set_parameter(\"liquefaction_geology_dataset_id\", \"5a284f53c7d30d13bc08249c\")\n",
+ "wf_dmg.set_parameter(\"liquefaction_fragility_key\", \"pgd\")\n",
+ "wf_dmg.set_parameter(\"use_hazard_uncertainty\", False)\n",
+ "wf_dmg.set_parameter(\"num_cpu\", 1)\n",
+ "\n",
+ "wf_dmg.set_parameter(\"result_name\", \"wf_dmg_results\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "wf_dmg.run_analysis()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "water_facility_dmg_result = wf_dmg.get_output_dataset('result')\n",
+ "\n",
+ "mc = MonteCarloLimitStateProbability(client)\n",
+ "mc.set_input_dataset(\"damage\", water_facility_dmg_result)\n",
+ "mc.set_parameter(\"num_cpu\", 8)\n",
+ "mc.set_parameter(\"num_samples\", 10)\n",
+ "mc.set_parameter(\"damage_interval_keys\", [\"DS_0\", \"DS_1\", \"DS_2\", \"DS_3\", \"DS_4\"])\n",
+ "mc.set_parameter(\"failure_state_keys\", [\"DS_1\", \"DS_2\", \"DS_3\", \"DS_4\"])\n",
+ "\n",
+ "mc.set_parameter(\"result_name\", \"water_mc\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "mc.run_analysis()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "result_fs = mc.get_output_dataset(\"sample_failure_state\")\n",
+ "df_fs = result_fs.get_dataframe_from_csv()\n",
+ "df_fs.head()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "result_fp = mc.get_output_dataset(\"failure_probability\")\n",
+ "df_fp = result_fp.get_dataframe_from_csv()\n",
+ "df_fp.head()"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.9.19"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
+}
diff --git a/notebooks/mean_dmg.ipynb b/notebooks/mean_dmg.ipynb
index 68bb892c..03ceae14 100644
--- a/notebooks/mean_dmg.ipynb
+++ b/notebooks/mean_dmg.ipynb
@@ -330,7 +330,7 @@
"md.set_input_dataset(\"damage\", non_structural_building_damage_result)\n",
"md.load_remote_input_dataset(\"dmg_ratios\", \"5a284f2ec7d30d13bc08207c\")\n",
"md.set_parameter(\"result_name\", \"non_structural_mean_damage_AS\")\n",
- "md.set_parameter(\"damage_interval_keys\", [\"AS_DS_0\", \"AS_DS_1\", \"AS_DS_2\", \"AS_DS_3\"])\n",
+ "md.set_parameter(\"damage_interval_keys\", [\"DS_0\", \"DS_1\", \"DS_2\", \"DS_3\"])\n",
"md.set_parameter(\"num_cpu\", 1)\n",
"md.run_analysis()\n"
]
@@ -352,7 +352,7 @@
"md.set_input_dataset(\"damage\", non_structural_building_damage_result)\n",
"md.load_remote_input_dataset(\"dmg_ratios\", \"5a284f2ec7d30d13bc082090\")\n",
"md.set_parameter(\"result_name\", \"non_structural_mean_damage_DS\")\n",
- "md.set_parameter(\"damage_interval_keys\", [\"DS_DS_0\", \"DS_DS_1\", \"DS_DS_2\", \"DS_DS_3\"])\n",
+ "md.set_parameter(\"damage_interval_keys\", [\"DS_0\", \"DS_1\", \"DS_2\", \"DS_3\"])\n",
"md.set_parameter(\"num_cpu\", 1)\n",
"md.run_analysis()\n"
]
@@ -374,7 +374,7 @@
"md.set_input_dataset(\"damage\", non_structural_building_damage_result)\n",
"md.load_remote_input_dataset(\"dmg_ratios\", \"5a284f2ec7d30d13bc082086\")\n",
"md.set_parameter(\"result_name\", \"non_structural_mean_damage_Content\")\n",
- "md.set_parameter(\"damage_interval_keys\", [\"AS_DS_0\", \"AS_DS_1\", \"AS_DS_2\", \"AS_DS_3\"])\n",
+ "md.set_parameter(\"damage_interval_keys\", [\"DS_0\", \"DS_1\", \"DS_2\", \"DS_3\"])\n",
"md.set_parameter(\"num_cpu\", 1)\n",
"md.run_analysis()\n"
]
@@ -398,7 +398,7 @@
],
"metadata": {
"kernelspec": {
- "display_name": "Python 3",
+ "display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
@@ -412,9 +412,9 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.8.10"
+ "version": "3.9.19"
}
},
"nbformat": 4,
"nbformat_minor": 4
-}
\ No newline at end of file
+}
diff --git a/notebooks/non_structural_building_dmg.ipynb b/notebooks/non_structural_building_dmg.ipynb
deleted file mode 100644
index 7e996960..00000000
--- a/notebooks/non_structural_building_dmg.ipynb
+++ /dev/null
@@ -1,185 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "from pyincore import IncoreClient, FragilityService, MappingSet\n",
- "from pyincore.analyses.nonstructbuildingdamage import NonStructBuildingDamage"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "## set input parameters and input datasets"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "client = IncoreClient()\n",
- "\n",
- "# Memphis 7.9 AB-95\n",
- "hazard_type = \"earthquake\"\n",
- "hazard_id = \"5b902cb273c3371e1236b36b\"\n",
- "\n",
- "# damage ratio \n",
- "dmg_ratio_id_as = \"5a284f2ec7d30d13bc08207c\"\n",
- "dmg_ratio_id_ds = \"5a284f2ec7d30d13bc082090\"\n",
- "dmg_ratio_id_content = \"5a284f2ec7d30d13bc082086\"\n",
- "\n",
- "# Shelby County Essential Facilities\n",
- "building_dataset_id = \"5a284f42c7d30d13bc0821ba\"\n",
- "\n",
- "# Default Building Fragility Mapping v1.0\n",
- "mapping_id = \"5b47b350337d4a3629076f2c\""
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "## set object and fetch datasets based on parameters"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "non_structural_building_dmg = NonStructBuildingDamage(client)\n",
- "\n",
- "# Load input datasets\n",
- "non_structural_building_dmg.load_remote_input_dataset(\"buildings\", building_dataset_id)\n",
- "\n",
- "# Load fragility mapping\n",
- "fragility_service = FragilityService(client)\n",
- "mapping_set = MappingSet(fragility_service.get_mapping(mapping_id))\n",
- "non_structural_building_dmg.set_input_dataset(\"dfr3_mapping_set\", mapping_set)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {
- "name": "#%%\n"
- }
- },
- "outputs": [],
- "source": [
- "# Specify the result name\n",
- "result_name = \"non_structural_building_dmg_result\"\n",
- "\n",
- "# Set analysis parameters\n",
- "non_structural_building_dmg.set_parameter(\"result_name\", result_name)\n",
- "non_structural_building_dmg.set_parameter(\"hazard_type\", hazard_type)\n",
- "non_structural_building_dmg.set_parameter(\"hazard_id\", hazard_id)\n",
- "non_structural_building_dmg.set_parameter(\"num_cpu\", 4)"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "### use liquefaction (slow)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# Shelby County Liquefaction Susceptibility\n",
- "use_liquefaction = True\n",
- "liq_geology_dataset_id = \"5a284f55c7d30d13bc0824ba\"\n",
- "\n",
- "non_structural_building_dmg.set_parameter(\"use_liquefaction\", use_liquefaction)\n",
- "non_structural_building_dmg.set_parameter(\"liq_geology_dataset_id\", liq_geology_dataset_id)"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "## Run analysis"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "non_structural_building_dmg.run_analysis()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {
- "name": "#%%\n"
- }
- },
- "outputs": [],
- "source": [
- "# Retrieve result dataset\n",
- "result = non_structural_building_dmg.get_output_dataset(\"result\")\n",
- "\n",
- "# Convert dataset to Pandas DataFrame\n",
- "df = result.get_dataframe_from_csv()"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "## Visualize"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {
- "name": "#%%\n"
- }
- },
- "outputs": [],
- "source": [
- "# Display top 5 rows of output data\n",
- "df.head()"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.7.10"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 4
-}
diff --git a/notebooks/social_vulnerability_score.ipynb b/notebooks/social_vulnerability_score.ipynb
new file mode 100644
index 00000000..82261774
--- /dev/null
+++ b/notebooks/social_vulnerability_score.ipynb
@@ -0,0 +1,308 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {
+ "is_executing": true
+ }
+ },
+ "outputs": [],
+ "source": [
+ "import pandas as pd\n",
+ "import numpy as np\n",
+ "\n",
+ "from pyincore import IncoreClient\n",
+ "from pyincore.analyses.socialvulnerabilityscore import SocialVulnerabilityScore\n",
+ "from pyincore_data.censusutil import CensusUtil\n",
+ "from pyincore import Dataset"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {
+ "is_executing": true,
+ "name": "#%%\n"
+ }
+ },
+ "source": [
+ "# Social vulnerability score analysis for Douglas, Kansas census tracts (2019)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {
+ "is_executing": true
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# Parameters\n",
+ "state = \"kansas\"\n",
+ "county = \"douglas\"\n",
+ "year = 2019"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# get fips code to use fetch census data\n",
+ "fips = CensusUtil.get_fips_by_state_county(state=state, county=county)\n",
+ "state_code = fips[:2]\n",
+ "county_code = fips[2:]\n",
+ "fips"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### construct the Social vulnerability demographic factors"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def demographic_factors(state_number, county_number, year, geo_type=\"tract:*\"):\n",
+ "\n",
+ " api_json_1, df_1, ds_1 = CensusUtil.get_census_data(state=state_code, county=county_code, year=year,\n",
+ " data_source=\"acs/acs5\",\n",
+ " columns=\"GEO_ID,B03002_001E,B03002_003E\",\n",
+ " geo_type=geo_type)\n",
+ " df_1[\"factor_white_nonHispanic\"] = df_1[[\"B03002_001E\",\"B03002_003E\"]].astype(int).apply(lambda row: row[\"B03002_003E\"]/row[\"B03002_001E\"], axis = 1)\n",
+ "\n",
+ " api_json_2, df_2, ds_2 = CensusUtil.get_census_data(state=state_code, county=county_code, year=year,\n",
+ " data_source=\"acs/acs5\",\n",
+ " columns=\"B25003_001E,B25003_002E\",\n",
+ " geo_type=geo_type)\n",
+ " df_2[\"factor_owner_occupied\"] = df_2.astype(int).apply(lambda row: row[\"B25003_002E\"]/row[\"B25003_001E\"], axis = 1)\n",
+ " \n",
+ " api_json_3, df_3, ds_3 = CensusUtil.get_census_data(state=state_code, \n",
+ " county=county_code, \n",
+ " year=year,\n",
+ " data_source=\"acs/acs5\",\n",
+ " columns=\"B17021_001E,B17021_002E\",\n",
+ " geo_type=geo_type)\n",
+ " df_3[\"factor_earning_higher_than_national_poverty_rate\"] = df_3.astype(int).apply(lambda row: 1-row[\"B17021_002E\"]/row[\"B17021_001E\"], axis = 1)\n",
+ " \n",
+ " api_json_4, df_4, ds_4 = CensusUtil.get_census_data(state=state_code, \n",
+ " county=county_code, \n",
+ " year=year,\n",
+ " data_source=\"acs/acs5\", \n",
+ " columns=\"B15003_001E,B15003_017E,B15003_018E,B15003_019E,B15003_020E,B15003_021E,B15003_022E,B15003_023E,B15003_024E,B15003_025E\",\n",
+ " geo_type=geo_type)\n",
+ " df_4[\"factor_over_25_with_high_school_diploma_or_higher\"] = df_4.astype(int).apply(lambda row: (row[\"B15003_017E\"] \n",
+ " + row[\"B15003_018E\"] \n",
+ " + row[\"B15003_019E\"] \n",
+ " + row[\"B15003_020E\"] \n",
+ " + row[\"B15003_021E\"] \n",
+ " + row[\"B15003_022E\"] \n",
+ " + row[\"B15003_023E\"] \n",
+ " + row[\"B15003_024E\"] \n",
+ " + row[\"B15003_025E\"])/row[\"B15003_001E\"], axis = 1)\n",
+ "\n",
+ " if geo_type == 'tract:*':\n",
+ " api_json_5, df_5, ds_5 = CensusUtil.get_census_data(state=state_code, \n",
+ " county=county_code, \n",
+ " year=year,\n",
+ " data_source=\"acs/acs5\",\n",
+ " columns=\"B18101_001E,B18101_011E,B18101_014E,B18101_030E,B18101_033E\",\n",
+ " geo_type=geo_type)\n",
+ " df_5[\"factor_without_disability_age_18_to_65\"] = df_5.astype(int).apply(lambda row: (row[\"B18101_011E\"] + row[\"B18101_014E\"] + row[\"B18101_030E\"] + row[\"B18101_033E\"])/row[\"B18101_001E\"], axis = 1)\n",
+ " \n",
+ " elif geo_type == 'block%20group:*':\n",
+ " api_json_5, df_5, ds_5 = CensusUtil.get_census_data(state=state_code, \n",
+ " county=county_code, \n",
+ " year=year,\n",
+ " data_source=\"acs/acs5\",\n",
+ " columns=\"B01003_001E,C21007_006E,C21007_009E,C21007_013E,C21007_016E\",\n",
+ " geo_type=geo_type)\n",
+ "\n",
+ " df_5['factor_without_disability_age_18_to_65'] = df_5.astype(int).apply(lambda row: (row['C21007_006E']+\n",
+ " row['C21007_006E']+\n",
+ " row['C21007_009E']+\n",
+ " row['C21007_013E'])\n",
+ " /row['C21007_016E'], axis = 1)\n",
+ "\n",
+ " df_t = pd.concat([df_1[[\"GEO_ID\",\"factor_white_nonHispanic\"]],\n",
+ " df_2[\"factor_owner_occupied\"],\n",
+ " df_3[\"factor_earning_higher_than_national_poverty_rate\"], \n",
+ " df_4[\"factor_over_25_with_high_school_diploma_or_higher\"],\n",
+ " df_5[\"factor_without_disability_age_18_to_65\"]], \n",
+ " axis=1, join='inner')\n",
+ " \n",
+ " # extract FIPS from geo id\n",
+ " df_t[\"FIPS\"] = df_t.apply(lambda row: row[\"GEO_ID\"].split(\"US\")[1], axis = 1)\n",
+ " \n",
+ " return df_t"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def national_ave_values (year, data_source=\"acs/acs5\"):\n",
+ " api_json_1, nav1, ds_1 = CensusUtil.get_census_data(state=\"*\", county=None, year=year, data_source=data_source,\n",
+ " columns=\"B03002_001E,B03002_003E\",geo_type=None)\n",
+ " nav1 = nav1.astype(int)\n",
+ " nav1_avg ={\"feature\": \"NAV-1: White, nonHispanic\", \n",
+ " \"average\": nav1['B03002_003E'].sum()/ nav1['B03002_001E'].sum()}\n",
+ "\n",
+ " api_json_2, nav2, ds_2 = CensusUtil.get_census_data(state=\"*\", county=None, year=year, data_source=data_source,\n",
+ " columns=\"B25003_001E,B25003_002E\",geo_type=None)\n",
+ " nav2 = nav2.astype(int)\n",
+ " nav2_avg = {\"feature\": \"NAV-2: Home Owners\", \n",
+ " \"average\": nav2['B25003_002E'].sum()/nav2['B25003_001E'].sum()}\n",
+ "\n",
+ " api_json_3, nav3, ds_3 = CensusUtil.get_census_data(state=\"*\", county=None, year=year, data_source=data_source,\n",
+ " columns=\"B17021_001E,B17021_002E\",geo_type=None)\n",
+ " nav3 = nav3.astype(int)\n",
+ " nav3_avg = {\"feature\": \"NAV-3: earning higher than national poverty rate\", \n",
+ " \"average\": 1-nav3['B17021_002E'].sum()/nav3['B17021_001E'].sum()}\n",
+ "\n",
+ " api_json_4, nav4, ds_4 = CensusUtil.get_census_data(state=\"*\", \n",
+ " county=None, \n",
+ " year=year,\n",
+ " data_source=\"acs/acs5\",\n",
+ " columns=\"B15003_001E,B15003_017E,B15003_018E,B15003_019E,B15003_020E,B15003_021E,B15003_022E,B15003_023E,B15003_024E,B15003_025E\",\n",
+ " geo_type=None)\n",
+ " nav4 = nav4.astype(int)\n",
+ " nav4['temp'] = nav4.apply(lambda row: row['B15003_017E']+row['B15003_018E']+row['B15003_019E']+\n",
+ " row['B15003_020E']+row['B15003_021E']+row['B15003_022E']+row['B15003_023E']+\n",
+ " row['B15003_024E']+row['B15003_025E'], axis = 1)\n",
+ " nav4_avg = {\"feature\": 'NAV-4: over 25 with high school diploma or higher', \n",
+ " \"average\": nav4['temp'].sum()/nav4['B15003_001E'].sum()}\n",
+ "\n",
+ " api_json_5, nav5, ds_5 = CensusUtil.get_census_data(state=\"*\", county=None, year=year, data_source=data_source,\n",
+ " columns=\"B18101_001E,B18101_011E,B18101_014E,B18101_030E,B18101_033E\",\n",
+ " geo_type=None)\n",
+ " nav5 = nav5.astype(int)\n",
+ " nav5['temp'] = nav5.apply(lambda row: row['B18101_011E']+row['B18101_014E']+row['B18101_030E']+row['B18101_033E'], axis = 1)\n",
+ " nav5_avg = {\"feature\": 'NAV-5: without disability age 18 to 65', \n",
+ " \"average\": nav5[\"temp\"].sum()/nav5[\"B18101_001E\"].sum()}\n",
+ " \n",
+ " navs = [nav1_avg, nav2_avg, nav3_avg, nav4_avg, nav5_avg]\n",
+ " \n",
+ " return navs"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "navs = national_ave_values(year=2019)\n",
+ "national_vulnerability_feature_averages = Dataset.from_csv_data(navs, name=\"national_vulnerability_feature_averages.csv\",\n",
+ " data_type=\"incore:socialVulnerabilityFeatureAverages\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "geo_type = \"tract:*\"\n",
+ "social_vunlnerability_dem_factors_df = demographic_factors(state_code, county_code, year=2019, geo_type=geo_type)\n",
+ "social_vunlnerability_dem_factors = Dataset.from_dataframe(social_vunlnerability_dem_factors_df, \n",
+ " name=\"social_vunlnerability_dem_factors\",\n",
+ " data_type=\"incore:socialVulnerabilityDemFactors\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {
+ "is_executing": true
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# Configure pyincore access and the analysis with the information above\n",
+ "\n",
+ "client = IncoreClient()\n",
+ "social_vulnerability_score = SocialVulnerabilityScore(client)\n",
+ "\n",
+ "social_vulnerability_score.set_parameter(\"result_name\", \"social_vulnerabilty_score\")\n",
+ "social_vulnerability_score.set_input_dataset(\"national_vulnerability_feature_averages\", national_vulnerability_feature_averages)\n",
+ "social_vulnerability_score.set_input_dataset(\"social_vulnerability_demographic_factors\", social_vunlnerability_dem_factors)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {
+ "is_executing": true
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# Run social vulnerability damage analysis\n",
+ "result = social_vulnerability_score.run_analysis()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {
+ "is_executing": true
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# Retrieve result dataset\n",
+ "result = social_vulnerability_score.get_output_dataset(\"sv_result\")\n",
+ "\n",
+ "# Convert dataset to Pandas DataFrame\n",
+ "df = result.get_dataframe_from_csv()\n",
+ "\n",
+ "# Display top 5 rows of output data\n",
+ "df.head()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.0"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 1
+}
diff --git a/notebooks/traffic_flow_recovery.ipynb b/notebooks/traffic_flow_recovery.ipynb
new file mode 100644
index 00000000..d326df70
--- /dev/null
+++ b/notebooks/traffic_flow_recovery.ipynb
@@ -0,0 +1,137 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Shelby County Traffic Flow Recovery Model"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import json\n",
+ "import pandas as pd\n",
+ "from pyincore import IncoreClient, InventoryDataset, Dataset\n",
+ "from pyincore.analyses.trafficflowrecovery import TrafficFlowRecovery"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Calculate Traffic Flow Recovery"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "client = IncoreClient()\n",
+ "traffic_flow_recovery = TrafficFlowRecovery(client)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "nodes = \"603d37ec34f29a7fa4211fc4\"\n",
+ "traffic_flow_recovery.load_remote_input_dataset(\"nodes\", nodes)\n",
+ "\n",
+ "links = \"5c5de25ec5c0e488fc035613\"\n",
+ "traffic_flow_recovery.load_remote_input_dataset(\"links\", links)\n",
+ "\n",
+ "bridges = \"5a284f2dc7d30d13bc082040\"\n",
+ "traffic_flow_recovery.load_remote_input_dataset('bridges', bridges)\n",
+ "\n",
+ "bridge_damage = \"5c5ddff0c5c0e488fc0355df\"\n",
+ "traffic_flow_recovery.load_remote_input_dataset('bridge_damage_value', bridge_damage)\n",
+ "\n",
+ "unrepaired = \"5c5de0c5c5c0e488fc0355eb\"\n",
+ "traffic_flow_recovery.load_remote_input_dataset('unrepaired_bridge', unrepaired)\n",
+ "\n",
+ "ADT_data = \"5c5dde00c5c0e488fc032d7f\"\n",
+ "traffic_flow_recovery.load_remote_input_dataset('ADT', ADT_data)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "traffic_flow_recovery.set_parameter(\"num_cpu\", 4)\n",
+ "traffic_flow_recovery.set_parameter(\"pm\", 1)\n",
+ "traffic_flow_recovery.set_parameter('ini_num_population', 5)\n",
+ "traffic_flow_recovery.set_parameter(\"population_size\", 3)\n",
+ "traffic_flow_recovery.set_parameter(\"num_generation\", 2)\n",
+ "traffic_flow_recovery.set_parameter(\"mutation_rate\", 0.1)\n",
+ "traffic_flow_recovery.set_parameter(\"crossover_rate\", 1.0)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "traffic_flow_recovery.run_analysis()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "traffic_flow_recovery.get_output_datasets()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "traffic_flow_recovery.get_output_dataset('optimal_solution_of_bridge_repair_schedule').get_dataframe_from_csv()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "traffic_flow_recovery.get_output_dataset('overall_traffic_flow_recovery_trajectory').get_dataframe_from_csv()"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.0"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
+}
diff --git a/notebooks/transportation_recovery.ipynb b/notebooks/transportation_recovery.ipynb
index a020c64b..a1ff4756 100644
--- a/notebooks/transportation_recovery.ipynb
+++ b/notebooks/transportation_recovery.ipynb
@@ -109,13 +109,20 @@
"metadata": {},
"outputs": [],
"source": [
- "transportation_recovery.get_output_dataset('overall_transportation_recovery_trajectory').get_dataframe_from_csv()"
+ "transportation_recovery.get_output_dataset('overall_traffic_flow_recovery_trajectory').get_dataframe_from_csv()"
]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
}
],
"metadata": {
"kernelspec": {
- "display_name": "Python 3",
+ "display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
@@ -129,7 +136,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.8.8"
+ "version": "3.10.0"
}
},
"nbformat": 4,
diff --git a/restapi/Dockerfile b/restapi/Dockerfile
index 907adbea..77cb7b8b 100644
--- a/restapi/Dockerfile
+++ b/restapi/Dockerfile
@@ -5,5 +5,6 @@ RUN apt-get -qq update && apt-get -qq install git
RUN mkdir /usr/share/nginx/html/doc && \
mkdir /usr/share/nginx/html/doc/api
COPY index.html /usr/share/nginx/html/doc/api/.
+COPY config /usr/share/nginx/html/doc/api/config
COPY api.conf /etc/nginx/conf.d/default.conf
diff --git a/restapi/config/config.json b/restapi/config/config.json
new file mode 100644
index 00000000..6a59f319
--- /dev/null
+++ b/restapi/config/config.json
@@ -0,0 +1,3 @@
+{
+ "GA_KEY": "Test-Google-Analytics-Key-Replace-Me"
+}
diff --git a/restapi/config/googleAnalytics.js b/restapi/config/googleAnalytics.js
new file mode 100644
index 00000000..87a6dc45
--- /dev/null
+++ b/restapi/config/googleAnalytics.js
@@ -0,0 +1,31 @@
+// analytics.js
+(function() {
+ // Fetch the runtime configuration
+ fetch('config/config.json')
+ .then(response => {
+ if (!response.ok) {
+ throw new Error('Configuration file not found');
+ }
+ return response.json();
+ })
+ .then(config => {
+ if (!config.GA_KEY) {
+ throw new Error('GA_KEY is missing in the configuration');
+ }
+
+ // Create the script tag for Google Tag Manager
+ const scriptTag = document.createElement('script');
+ scriptTag.async = true;
+ scriptTag.src = `https://www.googletagmanager.com/gtag/js?id=${config.GA_KEY}`;
+ document.head.appendChild(scriptTag);
+
+ // Initialize Google Analytics
+ window.dataLayer = window.dataLayer || [];
+
+ function gtag() { dataLayer.push(arguments); }
+
+ gtag('js', new Date());
+ gtag('config', config.GA_KEY);
+ })
+ .catch(error => console.warn('GA setup skipped:', error.message));
+})();
diff --git a/restapi/index.html b/restapi/index.html
index 2528e41c..59c32a08 100644
--- a/restapi/index.html
+++ b/restapi/index.html
@@ -1,6 +1,17 @@
+