Skip to content

Commit

Permalink
Merge branch 'develop' into fix-requirementes
Browse files Browse the repository at this point in the history
  • Loading branch information
longshuicy authored Feb 12, 2024
2 parents 4f4b1c7 + 3d23c3f commit 48bd96e
Show file tree
Hide file tree
Showing 42 changed files with 918 additions and 171 deletions.
30 changes: 30 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,38 @@ and this project adheres to [Semantic Versioning](http://semver.org/).

## [Unreleased]

### Fixed
- Refactoring tornadoepndamage for hazardDatasets [#495](https://github.com/IN-CORE/pyincore/issues/495)

## [1.16.0] - 2024-02-07

### Added
- Create GUID field in geopackage file [#478](https://github.com/IN-CORE/pyincore/issues/478)

### Changed
- Tornado and Earthquake model [#474](https://github.com/IN-CORE/pyincore/issues/474)
- Disable methods interact with services if in offline mode [#458](https://github.com/IN-CORE/pyincore/issues/458)

### Fixed
- Fix semantics search pytest by switching to an existing search term


## [1.15.1] - 2023-12-20

### Fixed
- Fix NCI Functionality [#463](https://github.com/IN-CORE/pyincore/issues/463)


## [1.15.0] - 2023-12-13

### Added
- Add hazard models to documentation [#448](https://github.com/IN-CORE/pyincore/issues/448)

### Changed
- Upgrade python version from 3.6 to 3.9 [#447](https://github.com/IN-CORE/pyincore/issues/447)
- Enable offline mode for pyincore [#455](https://github.com/IN-CORE/pyincore/issues/455)
- Update MCS analysis to output only required columns for `failure_probability` [#401](https://github.com/IN-CORE/pyincore/issues/401)
- Update CommercialBuildingRecovery to input damage results as a required dataset [#460](https://github.com/IN-CORE/pyincore/issues/460)


## [1.14.0] - 2023-11-08
Expand Down
4 changes: 2 additions & 2 deletions docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,9 @@
author = ''

# The short X.Y version
version = '1.14'
version = '1.16'
# The full version, including alpha/beta/rc tags
release = '1.14.0'
release = '1.16.0'

# -- General configuration ---------------------------------------------------

Expand Down
55 changes: 55 additions & 0 deletions docs/source/modules.rst
Original file line number Diff line number Diff line change
Expand Up @@ -346,6 +346,51 @@ analyses/wfnfunctionality
models
^^^^^^

models/hazard/earthquake
===================
.. autoclass:: models.earthquake.Earthquake
:members:

models/hazard/flood
===================
.. autoclass:: models.flood.Flood
:members:

models/hazard/hazard
===================
.. autoclass:: models.hazard.Hazard
:members:

models/hazard/hazarddataset
===========================
.. autoclass:: models.hazarddataset.HazardDataset
:members:
.. autoclass:: models.hazarddataset.HurricaneDataset
:members:
.. autoclass:: models.hazarddataset.EarthquakeDataset
:members:
.. autoclass:: models.hazarddataset.TsunamiDataset
:members:
.. autoclass:: models.hazarddataset.TornadoDataset
:members:
.. autoclass:: models.hazarddataset.FloodDataset
:members:

models/hazard/hurricane
=======================
.. autoclass:: models.hurricane.Hurricane
:members:

models/hazard/tornado
=====================
.. autoclass:: models.tornado.Tornado
:members:

models/hazard/tsunami
=====================
.. autoclass:: models.tsunami.Tsunami
:members:

models/dfr3curve.py
===================
.. autoclass:: models.dfr3curve.DFR3Curve
Expand All @@ -366,6 +411,11 @@ models/mappingset
.. autoclass:: models.mappingset.MappingSet
:members:

models/networkdataset
=================
.. autoclass:: models.networkdataset.NetworkDataset
:members:

models/repaircurveset
=====================
.. autoclass:: models.repaircurveset.RepairCurveSet
Expand All @@ -376,6 +426,11 @@ models/restorationcurveset
.. autoclass:: models.restorationcurveset.RestorationCurveSet
:members:

models/units
==========================
.. autoclass:: models.units.Units
:members:

utilities
^^^^^^^^^

Expand Down
1 change: 1 addition & 0 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,3 +21,4 @@ dependencies:
- rtree>=1.1.0
- scipy>=1.11.3
- shapely>=2.0.2
- openssl<=3.2.0
Original file line number Diff line number Diff line change
Expand Up @@ -67,19 +67,20 @@ def run(self):
sample_damage_states = self.get_input_dataset("sample_damage_states").get_dataframe_from_csv(low_memory=False)
mcs_failure = self.get_input_dataset("mcs_failure").get_dataframe_from_csv(low_memory=False)
redi_delay_factors = self.get_input_dataset("delay_factors").get_dataframe_from_csv(low_memory=False)
building_dmg = self.get_input_dataset("building_dmg").get_dataframe_from_csv(low_memory=False)

# Returns dataframe
total_delay, recovery, time_stepping_recovery = self.commercial_recovery(buildings, sample_damage_states,
mcs_failure, redi_delay_factors,
num_samples)
building_dmg, num_samples)
self.set_result_csv_data("total_delay", total_delay, result_name + "_delay", "dataframe")
self.set_result_csv_data("recovery", recovery, result_name + "_recovery", "dataframe")
self.set_result_csv_data("time_stepping_recovery", time_stepping_recovery,
result_name + "_time_stepping_recovery", "dataframe")

return True

def commercial_recovery(self, buildings, sample_damage_states, mcs_failure, redi_delay_factors, num_samples):
def commercial_recovery(self, buildings, sample_damage_states, mcs_failure, redi_delay_factors, building_dmg, num_samples):
"""
Calculates commercial building recovery for buildings
Expand All @@ -88,6 +89,7 @@ def commercial_recovery(self, buildings, sample_damage_states, mcs_failure, redi
sample_damage_states (pd.DataFrame): Sample damage states
redi_delay_factors (pd.DataFrame): Delay factors based on REDi framework
mcs_failure (pd.DataFrame): Building inventory failure probabilities
building_dmg (pd.DataFrame): Building damage states
num_samples (int): number of sample scenarios to use
Returns:
Expand All @@ -97,7 +99,7 @@ def commercial_recovery(self, buildings, sample_damage_states, mcs_failure, redi

start_total_delay = time.process_time()
total_delay = CommercialBuildingRecovery.total_delay(buildings, sample_damage_states, mcs_failure,
redi_delay_factors, num_samples)
redi_delay_factors, building_dmg, num_samples)
end_total_delay = time.process_time()
print("Finished executing total_delay() in " + str(end_total_delay - start_total_delay) + " secs")

Expand All @@ -116,7 +118,7 @@ def commercial_recovery(self, buildings, sample_damage_states, mcs_failure, redi
return total_delay, recovery, time_stepping_recovery

@staticmethod
def total_delay(buildings, sample_damage_states, mcs_failure, redi_delay_factors, num_samples):
def total_delay(buildings, sample_damage_states, mcs_failure, redi_delay_factors, damage, num_samples):
""" Calculates total delay by combining financial delay and other factors from REDi framework
Args:
Expand All @@ -125,14 +127,15 @@ def total_delay(buildings, sample_damage_states, mcs_failure, redi_delay_factors
mcs_failure (pd.DataFrame): Building inventory failure probabilities
redi_delay_factors (pd.DataFrame): Delay impeding factors such as post-disaster inspection, insurance claim,
financing, and government permit based on building's damage state.
damage (pd.DataFrame): Damage states for building structural damage
num_samples (int): number of sample scenarios to use
Returns:
pd.DataFrame: Total delay time of all impeding factors from REDi framework.
"""

# Obtain the commercial buildings in damage
damage = mcs_failure[mcs_failure['haz_expose'] == 'yes']
damage = mcs_failure[damage['haz_expose'] == 'yes']
commercial = []
commercial_archetypes = [6, 7, 8, 15, 16, 18, 19]
for i, b in enumerate(buildings):
Expand Down Expand Up @@ -398,6 +401,12 @@ def get_spec(self):
'description': 'Delay impeding factors such as post-disaster inspection, insurance claim, '
'and government permit based on building\'s damage state. Provided by REDi framework',
'type': ['incore:buildingRecoveryFactors']
},
{
'id': 'building_dmg',
'required': True,
'description': 'damage result that has damage intervals',
'type': ['ergo:buildingDamageVer6']
}
],
'output_datasets': [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -265,7 +265,7 @@ def monte_carlo_failure_probability(self, dmg, damage_interval_keys,

# failure probability
fp_result = collections.OrderedDict()
fp_result.update(dmg)
fp_result['guid'] = dmg['guid']

ds_sample = self.sample_damage_interval(dmg, damage_interval_keys,
num_samples, seed)
Expand Down
22 changes: 16 additions & 6 deletions pyincore/analyses/ncifunctionality/ncifunctionality.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
# and is available at https://www.mozilla.org/en-US/MPL/2.0/
from pyincore import BaseAnalysis, NetworkDataset
from pyincore.utils.networkutil import NetworkUtil
from numpy.linalg import inv
from typing import List
from scipy import stats
import networkx as nx
Expand Down Expand Up @@ -75,12 +74,15 @@ def run(self):
wds_dmg_results = self.get_input_dataset('wds_dmg_results').get_dataframe_from_csv()
wds_inventory_rest_map = self.get_input_dataset('wds_inventory_rest_map').get_dataframe_from_csv()

# Load limit state probabilities and damage states for each electric power facility
epf_damage = self.get_input_dataset('epf_damage').get_dataframe_from_csv()

epf_cascading_functionality = self.nci_functionality(discretized_days, epf_network_nodes, epf_network_links,
wds_network_nodes, wds_network_links,
epf_wds_intdp_table, wds_epf_intdp_table,
epf_subst_failure_results, epf_inventory_rest_map,
epf_time_results, wds_dmg_results, wds_inventory_rest_map,
wds_time_results)
wds_time_results, epf_damage)

result_name = self.get_parameter("result_name")
self.set_result_csv_data("epf_cascading_functionality", epf_cascading_functionality, name=result_name,
Expand All @@ -91,7 +93,7 @@ def run(self):
def nci_functionality(self, discretized_days, epf_network_nodes, epf_network_links, wds_network_nodes,
wds_network_links, epf_wds_intdp_table, wds_epf_intdp_table, epf_subst_failure_results,
epf_inventory_rest_map, epf_time_results, wds_dmg_results, wds_inventory_rest_map,
wds_time_results):
wds_time_results, epf_damage):
"""Compute EPF and WDS cascading functionality outcomes
Args:
Expand All @@ -108,14 +110,15 @@ def nci_functionality(self, discretized_days, epf_network_nodes, epf_network_lin
wds_dmg_results (pd.DataFrame): damage results for WDS network
wds_inventory_rest_map (pd.DataFrame): inventory restoration map for WDS network
wds_time_results (pd.DataFrame): time results for WDS network
epf_damage (pd.DataFrame): limit state probabilities and damage states for each guid
Returns:
(pd.DataFrame, pd.DataFrame): results for EPF and WDS networks
"""

# Compute updated EPF and WDS node information
efp_nodes_updated = self.update_epf_discretized_func(epf_network_nodes, epf_subst_failure_results,
epf_inventory_rest_map, epf_time_results)
epf_inventory_rest_map, epf_time_results, epf_damage)

wds_nodes_updated = self.update_wds_discretized_func(wds_network_nodes, wds_dmg_results,
wds_inventory_rest_map, wds_time_results)
Expand All @@ -125,7 +128,6 @@ def nci_functionality(self, discretized_days, epf_network_nodes, epf_network_lin

# Generate the functionality data
df_functionality_nodes = pd.concat([efp_nodes_updated, wds_nodes_updated], ignore_index=True)
df_functionality_nodes = pd.concat([efp_nodes_updated, wds_nodes_updated], ignore_index=True)

# Create each individual graph
g_epf = NetworkUtil.create_network_graph_from_dataframes(epf_network_nodes, epf_network_links)
Expand Down Expand Up @@ -156,13 +158,15 @@ def nci_functionality(self, discretized_days, epf_network_nodes, epf_network_lin

@staticmethod
def update_epf_discretized_func(epf_nodes, epf_subst_failure_results, epf_inventory_restoration_map,
epf_time_results):
epf_time_results, epf_damage):
epf_time_results = epf_time_results.loc[
(epf_time_results['time'] == 1) | (epf_time_results['time'] == 3) | (epf_time_results['time'] == 7) | (
epf_time_results['time'] == 30) | (epf_time_results['time'] == 90)]
epf_time_results.insert(2, 'PF_00', list(
np.ones(len(epf_time_results)))) # PF_00, PF_0, PF_1, PF_2, PF_3 ---> DS_0, DS_1, DS_2, DS_3, DS_4

epf_subst_failure_results = pd.merge(epf_damage, epf_subst_failure_results, on='guid', how='outer')

epf_nodes_updated = pd.merge(epf_nodes[['nodenwid', 'utilfcltyc', 'guid']], epf_subst_failure_results[
['guid', 'DS_0', 'DS_1', 'DS_2', 'DS_3', 'DS_4', 'failure_probability']], on='guid', how='outer')

Expand Down Expand Up @@ -387,6 +391,12 @@ def get_spec(self):
'required': True,
'description': 'A csv file recording repair time for WDS per class and limit state',
'type': ['incore:waterFacilityRestorationTime']
},
{
'id': 'epf_damage',
'required': True,
'description': 'A csv file with limit state probabilities and damage states for each electric power facility',
'type': ['incore:epfDamageVer3']
}
],
'output_datasets': [
Expand Down
2 changes: 1 addition & 1 deletion pyincore/analyses/tornadoepndamage/tornadoepndamage.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def run(self):
tornado_id = tornado.id

tornado_metadata = self.hazardsvc.get_tornado_hazard_metadata(tornado_id)
self.load_remote_input_dataset("tornado", tornado_metadata["datasetId"])
self.load_remote_input_dataset("tornado", tornado_metadata["hazardDatasets"][0].get("datasetId"))
tornado_dataset = self.get_input_dataset("tornado").get_inventory_reader()
ds_results, damage_results = self.get_damage(network_dataset, tornado_dataset, tornado_id)

Expand Down
Loading

0 comments on commit 48bd96e

Please sign in to comment.