From 1f4b382853c8b5fe7d86204dca2ad39d88592523 Mon Sep 17 00:00:00 2001 From: Rashmil Panchani <32737711+Rashmil-1999@users.noreply.github.com> Date: Fri, 1 Dec 2023 22:19:18 +0530 Subject: [PATCH 01/14] 401 mcs remove redundant damage columns in the failure probability output (#457) * update MCS to output only required columns for failure_probability * add changelog entry --- CHANGELOG.md | 1 + .../montecarlofailureprobability.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7a4d1122c..fd43b8338 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/). ### Changed - Upgrade python version from 3.6 to 3.9 [#447](https://github.com/IN-CORE/pyincore/issues/447) +- Update MCS analysis to output only required columns for `failure_probability` [#401](https://github.com/IN-CORE/pyincore/issues/401) ## [1.14.0] - 2023-11-08 diff --git a/pyincore/analyses/montecarlofailureprobability/montecarlofailureprobability.py b/pyincore/analyses/montecarlofailureprobability/montecarlofailureprobability.py index 53ffa4626..2ae4235a5 100644 --- a/pyincore/analyses/montecarlofailureprobability/montecarlofailureprobability.py +++ b/pyincore/analyses/montecarlofailureprobability/montecarlofailureprobability.py @@ -265,7 +265,7 @@ def monte_carlo_failure_probability(self, dmg, damage_interval_keys, # failure probability fp_result = collections.OrderedDict() - fp_result.update(dmg) + fp_result['guid'] = dmg['guid'] ds_sample = self.sample_damage_interval(dmg, damage_interval_keys, num_samples, seed) From e16f96359449a4274ba1b44fead71699babcb501 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Fri, 1 Dec 2023 10:53:23 -0600 Subject: [PATCH 02/14] 448 new hazard objects are not defined in modulesrst (#449) * add missing models * changelog --- CHANGELOG.md | 3 +++ docs/source/modules.rst | 55 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 58 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index fd43b8338..8cdcb0fc3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,9 @@ and this project adheres to [Semantic Versioning](http://semver.org/). ## [Unreleased] +### Added +- Add hazard models to documentation [#448](https://github.com/IN-CORE/pyincore/issues/448) + ### Changed - Upgrade python version from 3.6 to 3.9 [#447](https://github.com/IN-CORE/pyincore/issues/447) - Update MCS analysis to output only required columns for `failure_probability` [#401](https://github.com/IN-CORE/pyincore/issues/401) diff --git a/docs/source/modules.rst b/docs/source/modules.rst index 87376b066..e30bed62f 100644 --- a/docs/source/modules.rst +++ b/docs/source/modules.rst @@ -346,6 +346,51 @@ analyses/wfnfunctionality models ^^^^^^ +models/hazard/earthquake +=================== +.. autoclass:: models.earthquake.Earthquake + :members: + +models/hazard/flood +=================== +.. autoclass:: models.flood.Flood + :members: + +models/hazard/hazard +=================== +.. autoclass:: models.hazard.Hazard + :members: + +models/hazard/hazarddataset +=========================== +.. autoclass:: models.hazarddataset.HazardDataset + :members: +.. autoclass:: models.hazarddataset.HurricaneDataset + :members: +.. autoclass:: models.hazarddataset.EarthquakeDataset + :members: +.. autoclass:: models.hazarddataset.TsunamiDataset + :members: +.. autoclass:: models.hazarddataset.TornadoDataset + :members: +.. autoclass:: models.hazarddataset.FloodDataset + :members: + +models/hazard/hurricane +======================= +.. autoclass:: models.hurricane.Hurricane + :members: + +models/hazard/tornado +===================== +.. autoclass:: models.tornado.Tornado + :members: + +models/hazard/tsunami +===================== +.. autoclass:: models.tsunami.Tsunami + :members: + models/dfr3curve.py =================== .. autoclass:: models.dfr3curve.DFR3Curve @@ -366,6 +411,11 @@ models/mappingset .. autoclass:: models.mappingset.MappingSet :members: +models/networkdataset +================= +.. autoclass:: models.networkdataset.NetworkDataset + :members: + models/repaircurveset ===================== .. autoclass:: models.repaircurveset.RepairCurveSet @@ -376,6 +426,11 @@ models/restorationcurveset .. autoclass:: models.restorationcurveset.RestorationCurveSet :members: +models/units +========================== +.. autoclass:: models.units.Units + :members: + utilities ^^^^^^^^^ From f058e5e9d6cfd5f01bfe2ec8e6c2fb60c1154c7d Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Thu, 7 Dec 2023 15:46:26 -0600 Subject: [PATCH 03/14] 455 update get allowed demands method to return fixed information if offline (#456) * enable offline damage analysis * add correct setting for online/offline * changelog entry * typo * remove not used classes * update docstrings * put allowed demand type in a new class hazardconstant * have to put it outside hazard model to avoid circular dependency --- CHANGELOG.md | 1 + pyincore/client.py | 79 ++-- pyincore/hazardservice.py | 343 ++++++++++++++++-- pyincore/models/hazard/earthquake.py | 2 +- .../joplin_commercial_bldg_v6_sample.cpg | 1 + .../joplin_commercial_bldg_v6_sample.dbf | Bin 0 -> 19705 bytes .../joplin_commercial_bldg_v6_sample.prj | 1 + .../joplin_commercial_bldg_v6_sample.shp | Bin 0 -> 520 bytes .../joplin_commercial_bldg_v6_sample.shx | Bin 0 -> 220 bytes .../fragility_archetype_6.json | 79 ++++ .../fragility_archetype_7.json | 81 +++++ .../test_buildingdamage_offline.py | 74 ++++ 12 files changed, 605 insertions(+), 56 deletions(-) create mode 100644 tests/data/building/joplin_commercial_bldg_v6_sample.cpg create mode 100644 tests/data/building/joplin_commercial_bldg_v6_sample.dbf create mode 100644 tests/data/building/joplin_commercial_bldg_v6_sample.prj create mode 100644 tests/data/building/joplin_commercial_bldg_v6_sample.shp create mode 100644 tests/data/building/joplin_commercial_bldg_v6_sample.shx create mode 100644 tests/data/fragility_curves/fragility_archetype_6.json create mode 100644 tests/data/fragility_curves/fragility_archetype_7.json create mode 100644 tests/pyincore/analyses/buildingdamage/test_buildingdamage_offline.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 8cdcb0fc3..4d65eae95 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/). ### Changed - Upgrade python version from 3.6 to 3.9 [#447](https://github.com/IN-CORE/pyincore/issues/447) +- Enable offline mode for pyincore [#455](https://github.com/IN-CORE/pyincore/issues/455) - Update MCS analysis to output only required columns for `failure_probability` [#401](https://github.com/IN-CORE/pyincore/issues/401) diff --git a/pyincore/client.py b/pyincore/client.py index 05ebeb9e7..980c80f38 100644 --- a/pyincore/client.py +++ b/pyincore/client.py @@ -163,51 +163,62 @@ def delete(self, url: str, timeout=(30, 600), **kwargs): class IncoreClient(Client): - """IN-CORE service client class. It contains token and service root url. + """IN-CORE service client class. It contains token and service root url.""" - Args: - service_url (str): Service url. - token_file_name (str): Path to file containing the authorization token. - - """ + def __init__(self, service_url: str = None, token_file_name: str = None, offline: bool = False): + """ - def __init__(self, service_url: str = None, token_file_name: str = None): + Args: + service_url (str): Service url. + token_file_name (str): Path to file containing the authorization token. + offline (bool): Flag to indicate offline mode or not. + """ super().__init__() - if service_url is None or len(service_url.strip()) == 0: - service_url = pyglobals.INCORE_API_PROD_URL - self.service_url = service_url - self.token_url = urllib.parse.urljoin(self.service_url, pyglobals.KEYCLOAK_AUTH_PATH) + self.offline = offline + + if not offline: + if service_url is None or len(service_url.strip()) == 0: + service_url = pyglobals.INCORE_API_PROD_URL + self.service_url = service_url + self.token_url = urllib.parse.urljoin(self.service_url, pyglobals.KEYCLOAK_AUTH_PATH) - # hashlib requires bytes array for hash operations - byte_url_string = str.encode(self.service_url) - self.hashed_service_url = hashlib.sha256(byte_url_string).hexdigest() + # hashlib requires bytes array for hash operations + byte_url_string = str.encode(self.service_url) + self.hashed_service_url = hashlib.sha256(byte_url_string).hexdigest() - self.create_service_json_entry() + self.create_service_json_entry() - # construct local directory and filename - cache_data = pyglobals.PYINCORE_USER_DATA_CACHE - if not os.path.exists(cache_data): - os.makedirs(cache_data) + # construct local directory and filename + cache_data = pyglobals.PYINCORE_USER_DATA_CACHE + if not os.path.exists(cache_data): + os.makedirs(cache_data) - self.hashed_svc_data_dir = os.path.join(cache_data, self.hashed_service_url) + self.hashed_svc_data_dir = os.path.join(cache_data, self.hashed_service_url) - if not os.path.exists(self.hashed_svc_data_dir): - os.makedirs(self.hashed_svc_data_dir) + if not os.path.exists(self.hashed_svc_data_dir): + os.makedirs(self.hashed_svc_data_dir) - # store the token file in the respective repository's directory - if token_file_name is None or len(token_file_name.strip()) == 0: - token_file_name = "." + self.hashed_service_url + "_token" - self.token_file = os.path.join(pyglobals.PYINCORE_USER_CACHE, token_file_name) + # store the token file in the respective repository's directory + if token_file_name is None or len(token_file_name.strip()) == 0: + token_file_name = "." + self.hashed_service_url + "_token" + self.token_file = os.path.join(pyglobals.PYINCORE_USER_CACHE, token_file_name) - authorization = self.retrieve_token_from_file() - if authorization is not None: - self.session.headers["Authorization"] = authorization - print("Connection successful to IN-CORE services.", "pyIncore version detected:", pyglobals.PACKAGE_VERSION) + authorization = self.retrieve_token_from_file() + if authorization is not None: + self.session.headers["Authorization"] = authorization + print("Connection successful to IN-CORE services.", "pyIncore version detected:", pyglobals.PACKAGE_VERSION) + else: + if self.login(): + print("Connection successful to IN-CORE services.", "pyIncore version detected:", + pyglobals.PACKAGE_VERSION) else: - if self.login(): - print("Connection successful to IN-CORE services.", "pyIncore version detected:", - pyglobals.PACKAGE_VERSION) + self.service_url = "" + self.token_url = "" + self.hashed_service_url = "" + self.hashed_svc_data_dir = "" + self.token_file = "" + print("You are working with the offline version of IN-CORE.", "pyIncore version detected:", pyglobals.PACKAGE_VERSION) def login(self): for attempt in range(pyglobals.MAX_LOGIN_ATTEMPTS): @@ -415,4 +426,4 @@ def __init__(self, service_url: str = None, username: str = None): self.session.headers["x-auth-userinfo"] = pyglobals.INCORE_LDAP_TEST_USER_INFO else: user_info = "{\"preferred_username\": \"" + username + "\"}" - self.session.headers["x-auth-userinfo"] = user_info \ No newline at end of file + self.session.headers["x-auth-userinfo"] = user_info diff --git a/pyincore/hazardservice.py b/pyincore/hazardservice.py index f5f6190c3..3bf0d88cf 100644 --- a/pyincore/hazardservice.py +++ b/pyincore/hazardservice.py @@ -6,19 +6,16 @@ import json -import os from typing import List from urllib.parse import urljoin import numpy -import requests import pyincore.globals as pyglobals from pyincore.utils import return_http_response +from pyincore import IncoreClient logger = pyglobals.LOGGER -from pyincore import IncoreClient - class HazardService: """Hazard service client @@ -1042,21 +1039,325 @@ def search_hurricanewf(self, text: str, skip: int = None, limit: int = None, tim # TODO replace this with API endpoint in the future def get_allowed_demands(self, hazard_type, timeout=(30, 600), **kwargs): - if hazard_type == 'earthquake': - url = urljoin(self.base_earthquake_url, "demands") - elif hazard_type == 'tornado': - url = urljoin(self.base_tornado_url, "demands") - elif hazard_type == 'tsunami': - url = urljoin(self.base_tsunami_url, "demands") - elif hazard_type == 'hurricane': - url = urljoin(self.base_hurricane_url, "demands") - elif hazard_type == 'hurricaneWindfield': - url = urljoin(self.base_hurricanewf_url, "demands") - elif hazard_type == 'flood': - url = urljoin(self.base_flood_url, "demands") + if self.client.offline: + if hazard_type in HazardConstant.DEFAULT_ALLOWED_DEMANDS.keys(): + return HazardConstant.DEFAULT_ALLOWED_DEMANDS.get(hazard_type) + else: + raise ValueError("Unknown hazard type!") else: - raise ValueError("Unknown hazard type!") - - r = self.client.get(url, timeout=timeout, **kwargs) - return return_http_response(r).json() - + if hazard_type == 'earthquake': + url = urljoin(self.base_earthquake_url, "demands") + elif hazard_type == 'tornado': + url = urljoin(self.base_tornado_url, "demands") + elif hazard_type == 'tsunami': + url = urljoin(self.base_tsunami_url, "demands") + elif hazard_type == 'hurricane': + url = urljoin(self.base_hurricane_url, "demands") + elif hazard_type == 'hurricaneWindfield': + url = urljoin(self.base_hurricanewf_url, "demands") + elif hazard_type == 'flood': + url = urljoin(self.base_flood_url, "demands") + else: + raise ValueError("Unknown hazard type!") + + r = self.client.get(url, timeout=timeout, **kwargs) + return return_http_response(r).json() + + +class HazardConstant: + + """HazardConstant class to hold all the constants related to hazard.""" + DEFAULT_ALLOWED_DEMANDS = { + "earthquake": [ + { + "demand_type": "pga", + "demand_unit": [ + "g", + "in/sec^2", + "m/sec^2" + ], + "description": "Peak ground acceleration" + }, + { + "demand_type": "pgv", + "demand_unit": [ + "in/s", + "cm/s" + ], + "description": "Peak ground velocity" + }, + { + "demand_type": "pgd", + "demand_unit": [ + "in", + "ft", + "m" + ], + "description": "Peak ground displacement" + }, + { + "demand_type": "sa", + "demand_unit": [ + "g", + "in/sec^2", + "m/sec^2" + ], + "description": "Spectral acceleration" + }, + { + "demand_type": "sd", + "demand_unit": [ + "in", + "ft", + "m", + "cm" + ], + "description": "Spectral displacement" + }, + { + "demand_type": "sv", + "demand_unit": [ + "cm/s", + "in/s" + ], + "description": "Spectral Velocity" + } + ], + "tsunami": [ + { + "demand_type": "Hmax", + "demand_unit": [ + "ft", + "m" + ], + "description": "Onshore: maximum tsunami height above local ground level overland. Offshore: " + "maximum tsunami height taken crest to trough" + }, + { + "demand_type": "Vmax", + "demand_unit": [ + "mph", + "kph", + "ft/sec", + "m/sec" + ], + "description": "Maximum near-coast or overland water velocity due to tsunami" + }, + { + "demand_type": "Mmax", + "demand_unit": [ + "m^3/s^2", + "ft^3/s^2" + ], + "description": "" + } + ], + "flood": [ + { + "demand_type": "inundationDepth", + "demand_unit": [ + "ft", + "m" + ], + "description": "Depth of the water surface relative to local ground level" + }, + { + "demand_type": "waterSurfaceElevation", + "demand_unit": [ + "ft", + "m" + ], + "description": "Elevation of the water surface above reference datum (e.g. NAVD88, mean sea level)" + } + ], + "tornado": [ + { + "demand_type": "wind", + "demand_unit": [ + "mps", + "mph" + ], + "description": "Defined as a wind velocity below" + } + ], + "hurricaneWindfield": [ + { + "demand_type": "3s", + "demand_unit": [ + "kph", + "mph", + "kt" + ], + "description": "Typically, reported at 10 m above local ground or sea level" + }, + { + "demand_type": "60s", + "demand_unit": [ + "kph", + "mph", + "kt" + ], + "description": "Typically, reported at 10 m above local ground or sea level" + } + ], + "hurricane": [ + { + "demand_type": "waveHeight", + "demand_unit": [ + "ft", + "m", + "in", + "cm" + ], + "description": " Height of wave measured crest to trough. Characteristic wave height is typically the " + "average of one third highest waves for a random sea." + }, + { + "demand_type": "surgeLevel", + "demand_unit": [ + "ft", + "m", + "in", + "cm" + ], + "description": "Elevation of the water surface above reference datum (e.g. NAVD88, mean sea level)" + }, + { + "demand_type": "inundationDuration", + "demand_unit": [ + "hr", + "min", + "s" + ], + "description": "Time that inundation depth exceeds a critical threshold for a given storm" + }, + { + "demand_type": "inundationDepth", + "demand_unit": [ + "ft", + "m", + "in", + "cm" + ], + "description": "Depth of the water surface relative to local ground level" + }, + { + "demand_type": "wavePeriod", + "demand_unit": [ + "s", + "hr", + "min" + ], + "description": "Time between wave crests. Characteristic wave period is typically the inverse of the " + "spectral peak frequency for a random sea" + }, + { + "demand_type": "waveDirection", + "demand_unit": [ + "deg", + "rad" + ], + "description": "Principle wave direction associated with the characteristic wave height and period" + }, + { + "demand_type": "waterVelocity", + "demand_unit": [ + "ft/s", + "m/s", + "in/s" + ], + "description": "" + }, + { + "demand_type": "windVelocity", + "demand_unit": [ + "ft/s", + "m/s", + "m/sec", + "in/s" + ], + "description": "" + } + ], + "earthquake+tsunami": [ + { + "demand_type": "pga", + "demand_unit": [ + "g", + "in/sec^2", + "m/sec^2" + ], + "description": "Peak ground acceleration" + }, + { + "demand_type": "pgv", + "demand_unit": [ + "in/s", + "cm/s" + ], + "description": "Peak ground velocity" + }, + { + "demand_type": "pgd", + "demand_unit": [ + "in", + "ft", + "m" + ], + "description": "Peak ground displacement" + }, + { + "demand_type": "sa", + "demand_unit": [ + "g", + "in/sec^2", + "m/sec^2" + ], + "description": "Spectral acceleration" + }, + { + "demand_type": "sd", + "demand_unit": [ + "in", + "ft", + "m", + "cm" + ], + "description": "Spectral displacement" + }, + { + "demand_type": "sv", + "demand_unit": [ + "cm/s", + "in/s" + ], + "description": "Spectral Velocity" + }, + { + "demand_type": "Hmax", + "demand_unit": [ + "ft", + "m" + ], + "description": "Onshore: maximum tsunami height above local ground level overland. Offshore: maximum tsunami height taken crest to trough" + }, + { + "demand_type": "Vmax", + "demand_unit": [ + "mph", + "kph", + "ft/sec", + "m/sec" + ], + "description": "Maximum near-coast or overland water velocity due to tsunami" + }, + { + "demand_type": "Mmax", + "demand_unit": [ + "m^3/s^2", + "ft^3/s^2" + ], + "description": "" + } + ] + } diff --git a/pyincore/models/hazard/earthquake.py b/pyincore/models/hazard/earthquake.py index 66ef1b7ec..1e9affd61 100644 --- a/pyincore/models/hazard/earthquake.py +++ b/pyincore/models/hazard/earthquake.py @@ -3,7 +3,7 @@ # This program and the accompanying materials are made available under the # terms of the Mozilla Public License v2.0 which accompanies this distribution, # and is available at https://www.mozilla.org/en-US/MPL/2.0/ -from pyincore import HazardService, Dataset +from pyincore import HazardService from pyincore.models.hazard.hazard import Hazard from pyincore.models.hazard.hazarddataset import EarthquakeDataset diff --git a/tests/data/building/joplin_commercial_bldg_v6_sample.cpg b/tests/data/building/joplin_commercial_bldg_v6_sample.cpg new file mode 100644 index 000000000..cd89cb975 --- /dev/null +++ b/tests/data/building/joplin_commercial_bldg_v6_sample.cpg @@ -0,0 +1 @@ +ISO-8859-1 \ No newline at end of file diff --git a/tests/data/building/joplin_commercial_bldg_v6_sample.dbf b/tests/data/building/joplin_commercial_bldg_v6_sample.dbf new file mode 100644 index 0000000000000000000000000000000000000000..d81682dd9b39f3a71e9fcbb672e8865bd4448e50 GIT binary patch literal 19705 zcmeHPON;Er6&{FpCIqqySp+Xaa7^k_@5cobOfcDnA$Q}fhLThocW6&fdU|Xw`KA6d zCSOVYkh)5fQFn{m0}@P2I;T|6Q(v7r=hXb_zn}l%H#avo-+cD-XSgNTcL&Np;^XgP z;?DK2!*iDJ*X8$N^YJm<+%){(9?~~YkAA(VTH4>@?Y4d1|2}Qj&*kP0Ikev&m;W*N z^9*?E5P|M*WoG_~8c@3?+X|GBGIt4(ZAwtwyq39|jO zD$Ce-WPg+GpUAJm#xIun{D1KOUajwtYM}gLz4Fxy{B?L--jz?W0onRve75X7{{abn zn_K?x7gUMtD@wP z*NR9c6_JWcVr%A!tdx?{GF=M!0pcmR-AZ^;=4is$Iu51(5SF3bgzAer_|5Y=JdLhN z|AX7r;$idGa{cu6x3?eu{$2{M|5Pu+vif?zs=r^Z9`3(n^e^z~Jh@ z0BO6D@3{8BJS^kyQD(Rg>o4VjdsxJC-2D=sWA4}QEOWPb?`p(4^=_*2#918;#9i8!99d zBJMMj(KolCaw;i{TToocA(JfmZjdlE-Qow>^6Fzj8A?Ajz25-TEY)WFNk+d--GXb& zKk1;+z(y+pX@#;@8_EG}z_dbYPIOV0Oj|0w6%p7pQYEFGOb`~^oE(4|Y<)6xqYV`N zV>zCIZ5m)xOlqc;gj$W|XRCcQz^09J!ohX{3)*&z+L3{S#-^aE!OS)-uxWKH5T9y# zcKQI1T=Nw4`$eX5pys&bPK8VzhEGrp>8MyL?5u@#IHubd`DRVgQHBMM)}tteM| zyL2aIo2uVwUk8y)*h- zV2kECsA^nM4sbhXjt@<%SSE9h9}Cb>`l;!iKER`fWwK8>|843PTwDH0kDUN)qJ$}= zB&-&&v6(^;=dC47)Jn2~)l@}|jYZ>EQrgJ`4zXl8V8cYR89G-Ad(>k&o`G!|V1o$_ zI-AyTC&B!`wWUR1vyQ?~N~M+ziQ(TGaA?mT072EO{@*Dv3ahD=SGrJTVHF2W!F^^*kEJ3}MPRcqy6g(Mz+JM8GXVKAtagdKg06%}HM|+vrU5oM4-kRNEdnZ>>ILWu z8{=~k-w3@e)!4Q+wg_x7jt$$W4vFbBNX%b{+uOU}vBlfjg6GY*!0xFfq;=+j)SR0NZGd z&9R^bwjKz}H3-XL4bZ*Xa1VRYsnSfc^I+ob zeVri7XpPOvl2%F=1bYl0weN=iGOpq0r18^h&JqaMU~PPa-pTbnX2?G4Lxp@0v+9FtkQWnL4}R zJ7AK{acsL2`7bJEFEU@Ju_=Z?HpHwU+ym}|?XWjCHvDf7oD9Jb;T%e8a_s;c5G|go zipTO28GSurXhOyun3Jz|s}JRKDSE#}2I$|Xod0&In!c5Mk{%lkY^H*lmcHFl|rU&u%!6c5*Swf^oSI`)(C@2ww#b+gM-BheYUY((#H1t F{{h-K7B&C? literal 0 HcmV?d00001 diff --git a/tests/data/building/joplin_commercial_bldg_v6_sample.prj b/tests/data/building/joplin_commercial_bldg_v6_sample.prj new file mode 100644 index 000000000..f45cbadf0 --- /dev/null +++ b/tests/data/building/joplin_commercial_bldg_v6_sample.prj @@ -0,0 +1 @@ +GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]] \ No newline at end of file diff --git a/tests/data/building/joplin_commercial_bldg_v6_sample.shp b/tests/data/building/joplin_commercial_bldg_v6_sample.shp new file mode 100644 index 0000000000000000000000000000000000000000..a209a85c8368a277bef7ed47c000f07b7ba7ac5b GIT binary patch literal 520 zcmZQzQ0HR63K&^lFf%YP0_9>>|4(RK6n@~15$lSJZB7m|e={|nofUpyvF%m0)NUsS ztlCipLB?=_h?9Qr)|^`ye&FQU<(=UjP7XjJCLjjO-CNt|kOh?cBRnNZ1|kP^0Yr}b z$4MP2pc{-2+{E1pkdt`q2fyOG4rZ*5dc9Yspj#fT_#mhav5y-j=fNqvRtxAq%f)~1b3){JU~-%aXTxU#{kKy} ze7SF@lLJtk7bX|FefM)lpnZ2Q)*M!V*vAKxo1?p69Vk2}c}%GI2$AE5$))+r^E?5% MrT36QxzqQP7X7FGc}%_6@Fl`?NznZZYKxq y+EGM}fIN^r4p7<;O2 0" + ], + "expression": "scipy.stats.norm.cdf((math.log(wind) - (3.625))/(0.11))" + } + ] + }, + { + "curveParameters": null, + "description": "legacy - StandardFragilityCurve - Extensive", + "returnType": { + "description": "LS_1", + "type": "Limit State", + "unit": "" + }, + "rules": [ + { + "condition": [ + "wind > 0" + ], + "expression": "scipy.stats.norm.cdf((math.log(wind) - (3.895))/(0.11))" + } + ] + }, + { + "curveParameters": null, + "description": "legacy - StandardFragilityCurve - Complete", + "returnType": { + "description": "LS_2", + "type": "Limit State", + "unit": "" + }, + "rules": [ + { + "condition": [ + "wind > 0" + ], + "expression": "scipy.stats.norm.cdf((math.log(wind) - (4.075))/(0.21))" + } + ] + } + ], + "hazardType": "tornado", + "inventoryType": "building", + "paperReference": null, + "resultType": "Limit State", + "resultUnit": null +} diff --git a/tests/data/fragility_curves/fragility_archetype_7.json b/tests/data/fragility_curves/fragility_archetype_7.json new file mode 100644 index 000000000..81b1643f3 --- /dev/null +++ b/tests/data/fragility_curves/fragility_archetype_7.json @@ -0,0 +1,81 @@ +{ + "description": "Light industrial", + "authors": [ + "M. Memari N. Attary H. Masoomi J.W. van de Lindt S.F. Pilkington M.R. Ameri & H. Mahmoud" + ], + "paperReference": null, + "resultUnit": null, + "resultType": "Limit State", + "hazardType": "tornado", + "inventoryType": "building", + "creator": "incore", + "owner": "incore", + "curveParameters": [ + { + "name": "wind", + "unit": "mps", + "description": "wind value from hazard service", + "fullName": "wind", + "expression": null + } + ], + "demandTypes": [ + "wind" + ], + "demandUnits": [ + "mps" + ], + "fragilityCurves": [ + { + "description": "legacy - StandardFragilityCurve - Moderate", + "rules": [ + { + "condition": [ + "wind > 0" + ], + "expression": "scipy.stats.norm.cdf((math.log(wind) - (3.695))/(0.1))" + } + ], + "returnType": { + "type": "Limit State", + "unit": "", + "description": "LS_0" + }, + "curveParameters": null + }, + { + "description": "legacy - StandardFragilityCurve - Extensive", + "rules": [ + { + "condition": [ + "wind > 0" + ], + "expression": "scipy.stats.norm.cdf((math.log(wind) - (3.785))/(0.1))" + } + ], + "returnType": { + "type": "Limit State", + "unit": "", + "description": "LS_1" + }, + "curveParameters": null + }, + { + "description": "legacy - StandardFragilityCurve - Complete", + "rules": [ + { + "condition": [ + "wind > 0" + ], + "expression": "scipy.stats.norm.cdf((math.log(wind) - (3.865))/(0.1))" + } + ], + "returnType": { + "type": "Limit State", + "unit": "", + "description": "LS_2" + }, + "curveParameters": null + } + ] +} diff --git a/tests/pyincore/analyses/buildingdamage/test_buildingdamage_offline.py b/tests/pyincore/analyses/buildingdamage/test_buildingdamage_offline.py new file mode 100644 index 000000000..1fb46cd0c --- /dev/null +++ b/tests/pyincore/analyses/buildingdamage/test_buildingdamage_offline.py @@ -0,0 +1,74 @@ +import os + +from pyincore import IncoreClient, FragilityCurveSet, MappingSet, Tornado, Dataset, Mapping +from pyincore.analyses.buildingdamage import BuildingDamage +import pyincore.globals as pyglobals + + +def run_with_base_class(): + client = IncoreClient(offline=True) + # client = IncoreClient(pyglobals.INCORE_API_DEV_URL, offline=False) + # client.clear_cache() + + # building + buildings = Dataset.from_file(os.path.join(pyglobals.TEST_DATA_DIR, + "building/joplin_commercial_bldg_v6_sample.shp"), + data_type="ergo:buildingInventoryVer6") + + # tornado + tornado = Tornado.from_json_file(os.path.join(pyglobals.TEST_DATA_DIR, "tornado_dataset.json")) + tornado.hazardDatasets[0].from_file((os.path.join(pyglobals.TEST_DATA_DIR, "joplin_tornado/joplin_path_wgs84.shp")), + data_type="incore:tornadoWindfield") + # dfr3 + fragility_archetype_6 = FragilityCurveSet.from_json_file(os.path.join(pyglobals.TEST_DATA_DIR, + "fragility_curves/fragility_archetype_6.json")) + fragility_archetype_7 = FragilityCurveSet.from_json_file(os.path.join(pyglobals.TEST_DATA_DIR, + "fragility_curves/fragility_archetype_7.json")) + + fragility_entry_archetype_6 = {"Non-Retrofit Fragility ID Code": fragility_archetype_6} + fragility_rules_archetype_6 = {"OR": ["int archetype EQUALS 6"]} + fragility_mapping_archetype_6 = Mapping(fragility_entry_archetype_6, fragility_rules_archetype_6) + fragility_entry_archetype_7 = {"Non-Retrofit Fragility ID Code": fragility_archetype_7} + fragility_rules_archetype_7 = {"OR": ["int archetype EQUALS 7"]} + fragility_mapping_archetype_7 = Mapping(fragility_entry_archetype_7, fragility_rules_archetype_7) + + fragility_mapping_set_definition = { + "id": "N/A", + "name": "local joplin tornado fragility mapping object", + "hazardType": "tornado", + "inventoryType": "building", + 'mappings': [ + fragility_mapping_archetype_6, + fragility_mapping_archetype_7, + ], + "mappingType": "fragility" + } + + fragility_mapping_set = MappingSet(fragility_mapping_set_definition) + + # Building Damage + # Create building damage + bldg_dmg = BuildingDamage(client) + + # Load input dataset + bldg_dmg.set_input_dataset("buildings", buildings) + + # Load fragility mapping + bldg_dmg.set_input_dataset("dfr3_mapping_set", fragility_mapping_set) + + # Set hazard + bldg_dmg.set_input_hazard("hazard", tornado) + + # Set analysis parameters + result_folder = "offline" + # result_folder = "online" + if not os.path.exists(result_folder): + os.mkdir(result_folder) + result_name = os.path.join(result_folder, "joplin_tornado_commerical_bldg_dmg") + bldg_dmg.set_parameter("result_name", result_name) + bldg_dmg.set_parameter("num_cpu", 2) + bldg_dmg.run_analysis() + + +if __name__ == '__main__': + run_with_base_class() From 077ff8210c1d23d46a54bc5eb4116700ec31498a Mon Sep 17 00:00:00 2001 From: Vismayak Mohanarajan Date: Fri, 8 Dec 2023 11:40:38 -0600 Subject: [PATCH 04/14] Update to accept damage results as input (#461) * Update to accept damage results as input * Update CHANGELOG.md * Update dataset key --- CHANGELOG.md | 1 + .../commercialbuildingrecovery.py | 19 ++++++++++++++----- .../test_commercialbuildingrecovery.py | 3 +++ 3 files changed, 18 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4d65eae95..22b209930 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/). - Upgrade python version from 3.6 to 3.9 [#447](https://github.com/IN-CORE/pyincore/issues/447) - Enable offline mode for pyincore [#455](https://github.com/IN-CORE/pyincore/issues/455) - Update MCS analysis to output only required columns for `failure_probability` [#401](https://github.com/IN-CORE/pyincore/issues/401) +- Update CommercialBuildingRecovery to input damage results as a required dataset [#460](https://github.com/IN-CORE/pyincore/issues/460) ## [1.14.0] - 2023-11-08 diff --git a/pyincore/analyses/commercialbuildingrecovery/commercialbuildingrecovery.py b/pyincore/analyses/commercialbuildingrecovery/commercialbuildingrecovery.py index 8a1e3f757..4776ef4af 100644 --- a/pyincore/analyses/commercialbuildingrecovery/commercialbuildingrecovery.py +++ b/pyincore/analyses/commercialbuildingrecovery/commercialbuildingrecovery.py @@ -67,11 +67,12 @@ def run(self): sample_damage_states = self.get_input_dataset("sample_damage_states").get_dataframe_from_csv(low_memory=False) mcs_failure = self.get_input_dataset("mcs_failure").get_dataframe_from_csv(low_memory=False) redi_delay_factors = self.get_input_dataset("delay_factors").get_dataframe_from_csv(low_memory=False) + building_dmg = self.get_input_dataset("building_dmg").get_dataframe_from_csv(low_memory=False) # Returns dataframe total_delay, recovery, time_stepping_recovery = self.commercial_recovery(buildings, sample_damage_states, mcs_failure, redi_delay_factors, - num_samples) + building_dmg, num_samples) self.set_result_csv_data("total_delay", total_delay, result_name + "_delay", "dataframe") self.set_result_csv_data("recovery", recovery, result_name + "_recovery", "dataframe") self.set_result_csv_data("time_stepping_recovery", time_stepping_recovery, @@ -79,7 +80,7 @@ def run(self): return True - def commercial_recovery(self, buildings, sample_damage_states, mcs_failure, redi_delay_factors, num_samples): + def commercial_recovery(self, buildings, sample_damage_states, mcs_failure, redi_delay_factors, building_dmg, num_samples): """ Calculates commercial building recovery for buildings @@ -88,6 +89,7 @@ def commercial_recovery(self, buildings, sample_damage_states, mcs_failure, redi sample_damage_states (pd.DataFrame): Sample damage states redi_delay_factors (pd.DataFrame): Delay factors based on REDi framework mcs_failure (pd.DataFrame): Building inventory failure probabilities + building_dmg (pd.DataFrame): Building damage states num_samples (int): number of sample scenarios to use Returns: @@ -97,7 +99,7 @@ def commercial_recovery(self, buildings, sample_damage_states, mcs_failure, redi start_total_delay = time.process_time() total_delay = CommercialBuildingRecovery.total_delay(buildings, sample_damage_states, mcs_failure, - redi_delay_factors, num_samples) + redi_delay_factors, building_dmg, num_samples) end_total_delay = time.process_time() print("Finished executing total_delay() in " + str(end_total_delay - start_total_delay) + " secs") @@ -116,7 +118,7 @@ def commercial_recovery(self, buildings, sample_damage_states, mcs_failure, redi return total_delay, recovery, time_stepping_recovery @staticmethod - def total_delay(buildings, sample_damage_states, mcs_failure, redi_delay_factors, num_samples): + def total_delay(buildings, sample_damage_states, mcs_failure, redi_delay_factors, damage, num_samples): """ Calculates total delay by combining financial delay and other factors from REDi framework Args: @@ -125,6 +127,7 @@ def total_delay(buildings, sample_damage_states, mcs_failure, redi_delay_factors mcs_failure (pd.DataFrame): Building inventory failure probabilities redi_delay_factors (pd.DataFrame): Delay impeding factors such as post-disaster inspection, insurance claim, financing, and government permit based on building's damage state. + damage (pd.DataFrame): Damage states for building structural damage num_samples (int): number of sample scenarios to use Returns: @@ -132,7 +135,7 @@ def total_delay(buildings, sample_damage_states, mcs_failure, redi_delay_factors """ # Obtain the commercial buildings in damage - damage = mcs_failure[mcs_failure['haz_expose'] == 'yes'] + damage = mcs_failure[damage['haz_expose'] == 'yes'] commercial = [] commercial_archetypes = [6, 7, 8, 15, 16, 18, 19] for i, b in enumerate(buildings): @@ -398,6 +401,12 @@ def get_spec(self): 'description': 'Delay impeding factors such as post-disaster inspection, insurance claim, ' 'and government permit based on building\'s damage state. Provided by REDi framework', 'type': ['incore:buildingRecoveryFactors'] + }, + { + 'id': 'building_dmg', + 'required': True, + 'description': 'damage result that has damage intervals', + 'type': ['ergo:buildingDamageVer6'] } ], 'output_datasets': [ diff --git a/tests/pyincore/analyses/commercialbuildingrecovery/test_commercialbuildingrecovery.py b/tests/pyincore/analyses/commercialbuildingrecovery/test_commercialbuildingrecovery.py index fd2c239bd..aa16837dd 100644 --- a/tests/pyincore/analyses/commercialbuildingrecovery/test_commercialbuildingrecovery.py +++ b/tests/pyincore/analyses/commercialbuildingrecovery/test_commercialbuildingrecovery.py @@ -24,11 +24,14 @@ def run_with_base_class(): sample_damage_states = "64ee146456b25759cfc599ac" # 10 samples 28k buildings - MCS output format mcs_failure = '64ee144256b25759cfc599a5' delay_factors = "64ee10e756b25759cfc53243" + bld_dmg = '65723c3f9bc3c806024c69b0' # Load input datasets com_recovery.load_remote_input_dataset("sample_damage_states", sample_damage_states) com_recovery.load_remote_input_dataset("mcs_failure", mcs_failure) com_recovery.load_remote_input_dataset("delay_factors", delay_factors) + com_recovery.load_remote_input_dataset("building_dmg", bld_dmg) + # Input parameters From 123bb89bf73cc619b8b4644e9b446cc616c76c77 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Fri, 8 Dec 2023 11:45:14 -0600 Subject: [PATCH 05/14] changelog and bump the version up --- CHANGELOG.md | 3 ++- docs/source/conf.py | 4 ++-- pyincore/globals.py | 2 +- setup.py | 2 +- 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 22b209930..69f6111dc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,7 +5,8 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). -## [Unreleased] + +## [1.15.0] - 2023-12-13 ### Added - Add hazard models to documentation [#448](https://github.com/IN-CORE/pyincore/issues/448) diff --git a/docs/source/conf.py b/docs/source/conf.py index d94eb4b88..31562aae0 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -33,9 +33,9 @@ author = '' # The short X.Y version -version = '1.14' +version = '1.15' # The full version, including alpha/beta/rc tags -release = '1.14.0' +release = '1.15.0' # -- General configuration --------------------------------------------------- diff --git a/pyincore/globals.py b/pyincore/globals.py index 1525caf6d..da7a9868b 100644 --- a/pyincore/globals.py +++ b/pyincore/globals.py @@ -10,7 +10,7 @@ import os import shutil -PACKAGE_VERSION = "1.14.0" +PACKAGE_VERSION = "1.15.0" INCORE_API_PROD_URL = "https://incore.ncsa.illinois.edu" INCORE_API_DEV_URL = "https://incore-dev.ncsa.illinois.edu" diff --git a/setup.py b/setup.py index a8b306b51..ba415e1a0 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ from setuptools import setup, find_packages # version number of pyincore -version = '1.14.0' +version = '1.15.0' with open("README.rst", encoding="utf-8") as f: readme = f.read() From 9be06eb79d0a9d6d16e3c9a7a278d35c277647e6 Mon Sep 17 00:00:00 2001 From: Ya-Lan Yang <63822845+ylyangtw@users.noreply.github.com> Date: Fri, 15 Dec 2023 15:18:19 -0600 Subject: [PATCH 06/14] 463 fix nci functionality (#465) * added epf_damage as input dataset * Update CHANGELOG.md * remove unused imports and redundant line --- CHANGELOG.md | 5 +++++ .../ncifunctionality/ncifunctionality.py | 22 ++++++++++++++----- .../ncifunctionality/test_ncifunctionality.py | 1 + 3 files changed, 22 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 69f6111dc..818f681f4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,11 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). +## [Unreleased] + +### Fixed +- Fix NCI Functionality [#463](https://github.com/IN-CORE/pyincore/issues/463) + ## [1.15.0] - 2023-12-13 diff --git a/pyincore/analyses/ncifunctionality/ncifunctionality.py b/pyincore/analyses/ncifunctionality/ncifunctionality.py index 55639b4df..65d6abe1d 100644 --- a/pyincore/analyses/ncifunctionality/ncifunctionality.py +++ b/pyincore/analyses/ncifunctionality/ncifunctionality.py @@ -5,7 +5,6 @@ # and is available at https://www.mozilla.org/en-US/MPL/2.0/ from pyincore import BaseAnalysis, NetworkDataset from pyincore.utils.networkutil import NetworkUtil -from numpy.linalg import inv from typing import List from scipy import stats import networkx as nx @@ -75,12 +74,15 @@ def run(self): wds_dmg_results = self.get_input_dataset('wds_dmg_results').get_dataframe_from_csv() wds_inventory_rest_map = self.get_input_dataset('wds_inventory_rest_map').get_dataframe_from_csv() + # Load limit state probabilities and damage states for each electric power facility + epf_damage = self.get_input_dataset('epf_damage').get_dataframe_from_csv() + epf_cascading_functionality = self.nci_functionality(discretized_days, epf_network_nodes, epf_network_links, wds_network_nodes, wds_network_links, epf_wds_intdp_table, wds_epf_intdp_table, epf_subst_failure_results, epf_inventory_rest_map, epf_time_results, wds_dmg_results, wds_inventory_rest_map, - wds_time_results) + wds_time_results, epf_damage) result_name = self.get_parameter("result_name") self.set_result_csv_data("epf_cascading_functionality", epf_cascading_functionality, name=result_name, @@ -91,7 +93,7 @@ def run(self): def nci_functionality(self, discretized_days, epf_network_nodes, epf_network_links, wds_network_nodes, wds_network_links, epf_wds_intdp_table, wds_epf_intdp_table, epf_subst_failure_results, epf_inventory_rest_map, epf_time_results, wds_dmg_results, wds_inventory_rest_map, - wds_time_results): + wds_time_results, epf_damage): """Compute EPF and WDS cascading functionality outcomes Args: @@ -108,6 +110,7 @@ def nci_functionality(self, discretized_days, epf_network_nodes, epf_network_lin wds_dmg_results (pd.DataFrame): damage results for WDS network wds_inventory_rest_map (pd.DataFrame): inventory restoration map for WDS network wds_time_results (pd.DataFrame): time results for WDS network + epf_damage (pd.DataFrame): limit state probabilities and damage states for each guid Returns: (pd.DataFrame, pd.DataFrame): results for EPF and WDS networks @@ -115,7 +118,7 @@ def nci_functionality(self, discretized_days, epf_network_nodes, epf_network_lin # Compute updated EPF and WDS node information efp_nodes_updated = self.update_epf_discretized_func(epf_network_nodes, epf_subst_failure_results, - epf_inventory_rest_map, epf_time_results) + epf_inventory_rest_map, epf_time_results, epf_damage) wds_nodes_updated = self.update_wds_discretized_func(wds_network_nodes, wds_dmg_results, wds_inventory_rest_map, wds_time_results) @@ -125,7 +128,6 @@ def nci_functionality(self, discretized_days, epf_network_nodes, epf_network_lin # Generate the functionality data df_functionality_nodes = pd.concat([efp_nodes_updated, wds_nodes_updated], ignore_index=True) - df_functionality_nodes = pd.concat([efp_nodes_updated, wds_nodes_updated], ignore_index=True) # Create each individual graph g_epf = NetworkUtil.create_network_graph_from_dataframes(epf_network_nodes, epf_network_links) @@ -156,13 +158,15 @@ def nci_functionality(self, discretized_days, epf_network_nodes, epf_network_lin @staticmethod def update_epf_discretized_func(epf_nodes, epf_subst_failure_results, epf_inventory_restoration_map, - epf_time_results): + epf_time_results, epf_damage): epf_time_results = epf_time_results.loc[ (epf_time_results['time'] == 1) | (epf_time_results['time'] == 3) | (epf_time_results['time'] == 7) | ( epf_time_results['time'] == 30) | (epf_time_results['time'] == 90)] epf_time_results.insert(2, 'PF_00', list( np.ones(len(epf_time_results)))) # PF_00, PF_0, PF_1, PF_2, PF_3 ---> DS_0, DS_1, DS_2, DS_3, DS_4 + epf_subst_failure_results = pd.merge(epf_damage, epf_subst_failure_results, on='guid', how='outer') + epf_nodes_updated = pd.merge(epf_nodes[['nodenwid', 'utilfcltyc', 'guid']], epf_subst_failure_results[ ['guid', 'DS_0', 'DS_1', 'DS_2', 'DS_3', 'DS_4', 'failure_probability']], on='guid', how='outer') @@ -387,6 +391,12 @@ def get_spec(self): 'required': True, 'description': 'A csv file recording repair time for WDS per class and limit state', 'type': ['incore:waterFacilityRestorationTime'] + }, + { + 'id': 'epf_damage', + 'required': True, + 'description': 'A csv file with limit state probabilities and damage states for each electric power facility', + 'type': ['incore:epfDamageVer3'] } ], 'output_datasets': [ diff --git a/tests/pyincore/analyses/ncifunctionality/test_ncifunctionality.py b/tests/pyincore/analyses/ncifunctionality/test_ncifunctionality.py index b939ac8be..d54e3ed21 100644 --- a/tests/pyincore/analyses/ncifunctionality/test_ncifunctionality.py +++ b/tests/pyincore/analyses/ncifunctionality/test_ncifunctionality.py @@ -138,6 +138,7 @@ def run_with_base_class(): nic_func.set_input_dataset("wds_dmg_results", wds_dmg_results) nic_func.set_input_dataset("wds_inventory_rest_map", wds_inventory_rest_map) nic_func.set_input_dataset("wds_time_results", wds_time_results) + nic_func.set_input_dataset("epf_damage", substation_dmg_result) nic_func.run_analysis() From ea14d106b8fa8eae5d216f085912dd59679ce9f6 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Fri, 15 Dec 2023 15:24:31 -0600 Subject: [PATCH 07/14] bump version up add changelog entry --- CHANGELOG.md | 3 ++- docs/source/conf.py | 2 +- pyincore/globals.py | 2 +- setup.py | 2 +- 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 818f681f4..63852e210 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,7 +5,8 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). -## [Unreleased] + +## [1.15.1] - 2023-12-20 ### Fixed - Fix NCI Functionality [#463](https://github.com/IN-CORE/pyincore/issues/463) diff --git a/docs/source/conf.py b/docs/source/conf.py index 31562aae0..565bb1010 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -35,7 +35,7 @@ # The short X.Y version version = '1.15' # The full version, including alpha/beta/rc tags -release = '1.15.0' +release = '1.15.1' # -- General configuration --------------------------------------------------- diff --git a/pyincore/globals.py b/pyincore/globals.py index da7a9868b..cda468a99 100644 --- a/pyincore/globals.py +++ b/pyincore/globals.py @@ -10,7 +10,7 @@ import os import shutil -PACKAGE_VERSION = "1.15.0" +PACKAGE_VERSION = "1.15.1" INCORE_API_PROD_URL = "https://incore.ncsa.illinois.edu" INCORE_API_DEV_URL = "https://incore-dev.ncsa.illinois.edu" diff --git a/setup.py b/setup.py index ba415e1a0..7976b0cb7 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ from setuptools import setup, find_packages # version number of pyincore -version = '1.15.0' +version = '1.15.1' with open("README.rst", encoding="utf-8") as f: readme = f.read() From 755d44ef1c4ccc4bdade4d121df939765eeeaa7d Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Thu, 25 Jan 2024 12:00:23 -0600 Subject: [PATCH 08/14] Update tornado/eq model and fix pytest (#480) * change to different semantic search term * changelog * fix tornado * fix model based eq * update changelog --- CHANGELOG.md | 8 +++++ .../tornadoepndamage/tornadoepndamage.py | 2 +- pyincore/models/hazard/tornado.py | 16 +++------ tests/data/eq-dataset.json | 2 +- tests/data/tornado.json | 11 ++++-- tests/data/tornado_dataset.json | 9 ++++- tests/data/tornado_model.json | 9 ++++- tests/pyincore/models/test_hazard.py | 2 +- tests/pyincore/test_analysis.py | 35 ------------------- tests/pyincore/test_semanticservice.py | 2 +- 10 files changed, 42 insertions(+), 54 deletions(-) delete mode 100644 tests/pyincore/test_analysis.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 63852e210..a6bdee892 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,14 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). +## [Unreleased] + +### Changed +- Tornado and Earthquake model [#474](https://github.com/IN-CORE/pyincore/issues/474) + +### Fixed +- Fix semantics search pytest by switching to an existing search term + ## [1.15.1] - 2023-12-20 diff --git a/pyincore/analyses/tornadoepndamage/tornadoepndamage.py b/pyincore/analyses/tornadoepndamage/tornadoepndamage.py index c952d470b..5a6a62505 100644 --- a/pyincore/analyses/tornadoepndamage/tornadoepndamage.py +++ b/pyincore/analyses/tornadoepndamage/tornadoepndamage.py @@ -97,7 +97,7 @@ def run(self): tornado_id = tornado.id tornado_metadata = self.hazardsvc.get_tornado_hazard_metadata(tornado_id) - self.load_remote_input_dataset("tornado", tornado_metadata["datasetId"]) + self.load_remote_input_dataset("tornado", tornado_metadata["hazardDataset"][0].get("datasetId")) tornado_dataset = self.get_input_dataset("tornado").get_inventory_reader() ds_results, damage_results = self.get_damage(network_dataset, tornado_dataset, tornado_id) diff --git a/pyincore/models/hazard/tornado.py b/pyincore/models/hazard/tornado.py index ef56cf967..b82fc4d7c 100644 --- a/pyincore/models/hazard/tornado.py +++ b/pyincore/models/hazard/tornado.py @@ -27,23 +27,17 @@ class Tornado(Hazard): "xxx" ], "date": "2020-08-14T16:22:32+0000", - "datasetId": "xxx" + "hazardDatasets":[] """ def __init__(self, metadata, ef_rating_field="ef_rating", ef_wind_speed=(65, 86, 111, 136, 166, 200), max_wind_speed=250.0): super().__init__(metadata) self.tornado_type = metadata["tornadoType"] if "tornadoType" in metadata else "" - # tornado has very different shape than other hazards - if self.tornado_type == "dataset": - self.hazardDatasets = [ - TornadoDataset({"threshold": metadata["threshold"] if "threshold" in metadata else None, - "demandType": "wind", - "demandUnits": metadata["thresholdUnit"] if "thresholdUnit" in metadata else "mph", - "datasetId": metadata["datasetId"] if "datasetId" in metadata else ""}) - ] - else: - self.hazardDatasets = [] + self.hazardDatasets = [] + if "hazardDatasets" in metadata: + for hazardDataset in metadata["hazardDatasets"]: + self.hazardDatasets.append(TornadoDataset(hazardDataset)) self.hazard_type = "tornado" self.EF_RATING_FIELD = ef_rating_field self.EF_WIND_SPEED = ef_wind_speed diff --git a/tests/data/eq-dataset.json b/tests/data/eq-dataset.json index 0400b97ac..9d4845983 100644 --- a/tests/data/eq-dataset.json +++ b/tests/data/eq-dataset.json @@ -28,4 +28,4 @@ } } ] -} \ No newline at end of file +} diff --git a/tests/data/tornado.json b/tests/data/tornado.json index 2d308bad1..6cb5a07e7 100644 --- a/tests/data/tornado.json +++ b/tests/data/tornado.json @@ -16,5 +16,12 @@ ], "windSpeedMethod": "1", "numSimulations": "1" - } -} \ No newline at end of file + }, + "hazardDatasets": [ + { + "demandType": "wind", + "demandUnits": "mph", + "threshold": null + } + ] +} diff --git a/tests/data/tornado_dataset.json b/tests/data/tornado_dataset.json index fa1fee515..3f3bf7353 100644 --- a/tests/data/tornado_dataset.json +++ b/tests/data/tornado_dataset.json @@ -3,5 +3,12 @@ "description": "Joplin tornado hazard", "tornadoType": "dataset", "threshold": null, - "thresholdUnit": "mph" + "thresholdUnit": "mph", + "hazardDatasets": [ + { + "demandType": "wind", + "demandUnits": "mph", + "threshold": null + } + ] } diff --git a/tests/data/tornado_model.json b/tests/data/tornado_model.json index f7daeaac4..ee99dd0c2 100644 --- a/tests/data/tornado_model.json +++ b/tests/data/tornado_model.json @@ -16,5 +16,12 @@ ], "windSpeedMethod": "1", "numSimulations": "1" - } + }, + "hazardDatasets": [ + { + "demandType": "wind", + "demandUnits": "mph", + "threshold": null + } + ] } diff --git a/tests/pyincore/models/test_hazard.py b/tests/pyincore/models/test_hazard.py index c1d0f0c4d..ad0d719da 100644 --- a/tests/pyincore/models/test_hazard.py +++ b/tests/pyincore/models/test_hazard.py @@ -129,7 +129,7 @@ def test_create_tsunami_from_local(): def test_create_eq_from_remote(): eq = Earthquake.from_hazard_service("5b902cb273c3371e1236b36b", hazardsvc) - assert len(eq.hazardDatasets) == 0 # test model based eq compatibility + assert len(eq.hazardDatasets) == 1 # test model based eq compatibility payload = [ { diff --git a/tests/pyincore/test_analysis.py b/tests/pyincore/test_analysis.py deleted file mode 100644 index dbb9a09d9..000000000 --- a/tests/pyincore/test_analysis.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright (c) 2019 University of Illinois and others. All rights reserved. -# -# This program and the accompanying materials are made available under the -# terms of the Mozilla Public License v2.0 which accompanies this distribution, -# and is available at https://www.mozilla.org/en-US/MPL/2.0/ - -import pytest - -from pyincore import globals as pyglobals -from pyincore.analyses.tornadoepndamage.tornadoepndamage import \ - TornadoEpnDamage -from pyincore import IncoreClient - - -@pytest.fixture -def datasvc(): - return pytest.datasvc - - -def test_tornado_epn_damage_analysis(datasvc): - client = IncoreClient(pyglobals.INCORE_API_DEV_URL) - - ted = TornadoEpnDamage(client) - - epn_network_id = "62719fc857f1d94b047447e6" - tornado_id = '5df913b83494fe000861a743' - - ted.load_remote_input_dataset("epn_network", epn_network_id) - - result_name = "tornado_dmg_result" - ted.set_parameter("result_name", result_name) - ted.set_parameter('tornado_id', tornado_id) - ted.set_parameter('seed', 1001) - - ted.run_analysis() diff --git a/tests/pyincore/test_semanticservice.py b/tests/pyincore/test_semanticservice.py index e8f40eb49..f76b502e4 100644 --- a/tests/pyincore/test_semanticservice.py +++ b/tests/pyincore/test_semanticservice.py @@ -64,7 +64,7 @@ def test_get_semantic_type_by_name(semanticsvc): def test_search_semantic_types(semanticsvc): - search_term_exists = "wildFireDamageRaster" + search_term_exists = "buildingDamage" search_term_not_exists = "asdwerueidj" # search for term that should find an entry semantic_types = semanticsvc.search_semantic_type(search_term_exists) From 81e79245c9f2eebeaab9414e5a7f9a25ceacd855 Mon Sep 17 00:00:00 2001 From: YONG WOOK KIM Date: Tue, 30 Jan 2024 14:23:46 -0600 Subject: [PATCH 09/14] added guid field insertion method for geopackage file (#479) * added guid to geopackage file * delete unnecessary files * fixed typo --- CHANGELOG.md | 2 ++ pyincore/utils/geoutil.py | 73 +++++++++++++++++++++++---------------- 2 files changed, 45 insertions(+), 30 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a6bdee892..d7cbb06fb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). ## [Unreleased] +### Added +- Create GUID field in geopackage file [#478](https://github.com/IN-CORE/pyincore/issues/478) ### Changed - Tornado and Earthquake model [#474](https://github.com/IN-CORE/pyincore/issues/474) diff --git a/pyincore/utils/geoutil.py b/pyincore/utils/geoutil.py index 7d7440c64..71489dea3 100644 --- a/pyincore/utils/geoutil.py +++ b/pyincore/utils/geoutil.py @@ -9,13 +9,14 @@ from scipy.spatial import KDTree import sys import pyproj +import geopandas as gpd from rtree import index from shapely.geometry import shape, Point, MultiLineString, LineString import fiona import uuid -import copy +import os logging.basicConfig(stream=sys.stderr, level=logging.INFO) @@ -228,49 +229,61 @@ def create_rtree_index(inshp): return idx @staticmethod - def add_guid(inshp_filename, outshp_filename): - """Add guid to shapefile + def add_guid(infile, outfile): + """Add uuid to shapefile or geopackage Args: - inshp_filename (str): Full path and filename of Input Shapefile - outshp_filename (str): Full path and filename of Ouptut shapefile + infile (str): Full path and filename of Input file + outfile (str): Full path and filename of Ouptut file Returns: bool: A success or fail to add guid. """ - # TODO: # - need to handle when there is existing GUID # - need to handle when there is existing GUID and some missing guid for some rows # - need to handle when input and output are same - shape_property_list = [] - schema = None - incrs = None + # check if the input file is a shapefile or geopackage + is_success = False + is_shapefile = False + is_geopackage = False - try: - infile = fiona.open(inshp_filename) - incrs = infile.crs - # create list of each shapefile entry - schema = infile.schema.copy() - schema['properties']['guid'] = 'str:30' - for in_feature in infile: - # build shape feature - tmp_feature = copy.deepcopy(in_feature) - tmp_feature['properties']['guid'] = str(uuid.uuid4()) - shape_property_list.append(tmp_feature) - except: - logging.exception("Error reading/processing feature %s:", inshp_filename) + if infile.lower().endswith('.shp'): + is_shapefile = True + elif infile.lower().endswith('.gpkg'): + is_geopackage = True + else: + logging.error("Error: Input file format is not supported.") + print("Error: Input file format is not supported.") return False + # get the filename without extension so it can be used for layer name + outfile_name = os.path.splitext(os.path.basename(outfile))[0] + + if is_shapefile: + gdf = gpd.read_file(infile) + gdf['guid'] = gdf.apply(lambda x: str(uuid.uuid4()), axis=1) + gdf.to_file(f"{outfile}", driver='ESRI Shapefile') + is_success = True + elif is_geopackage: + if GeoUtil.is_vector_gpkg(infile): + gdf = gpd.read_file(infile) + gdf['guid'] = gdf.apply(lambda x: str(uuid.uuid4()), axis=1) + gdf.to_file(outfile, layer=outfile_name, driver='GPKG') + is_success = True + else: + logging.error("Error: The GeoPackage contains raster data, which is not supported.") + print("Error: The GeoPackage contains raster data, which is not supported.") + return False + + return is_success + + @staticmethod + def is_vector_gpkg(filepath): try: - with fiona.open(outshp_filename, 'w', crs=incrs, driver='ESRI Shapefile', schema=schema) as output: - for i in range(len(shape_property_list)): - new_feature = shape_property_list[i] - output.write(new_feature) - except: - logging.exception("Error writing features %s:", outshp_filename) + with fiona.open(filepath) as src: + return src.schema['geometry'] is not None + except fiona.errors.DriverError: return False - - return True From d2eb42f0a812a728fbac616f7dab58d5c55c7a96 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Tue, 30 Jan 2024 17:50:34 -0600 Subject: [PATCH 10/14] 458 check offline mode and disable method interact with services (#476) * use decrator * update decorator; fix typo * changelog * lift one check * add decorator to data services * fix unused import and pep8 * more unused import --- CHANGELOG.md | 6 +++++ pyincore/dataservice.py | 20 ++++++++++++-- pyincore/decorators.py | 11 ++++++++ pyincore/dfr3service.py | 15 ++++++++--- pyincore/fragilityservice.py | 2 ++ pyincore/hazardservice.py | 48 ++++++++++++++++++++++++++++++++-- pyincore/repairservice.py | 2 ++ pyincore/restorationservice.py | 5 ++-- pyincore/semanticservice.py | 8 +++--- pyincore/spaceservice.py | 12 ++++++++- 10 files changed, 115 insertions(+), 14 deletions(-) create mode 100644 pyincore/decorators.py diff --git a/CHANGELOG.md b/CHANGELOG.md index d7cbb06fb..32746a505 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,12 @@ and this project adheres to [Semantic Versioning](http://semver.org/). - Fix semantics search pytest by switching to an existing search term +## [Unreleased] + +### Changed +- Disable methods interact with services if in offline mode [#458](https://github.com/IN-CORE/pyincore/issues/458) + + ## [1.15.1] - 2023-12-20 ### Fixed diff --git a/pyincore/dataservice.py b/pyincore/dataservice.py index d0c685959..819d96cc1 100644 --- a/pyincore/dataservice.py +++ b/pyincore/dataservice.py @@ -14,9 +14,9 @@ import pyincore.globals as pyglobals from pyincore import IncoreClient +from pyincore.decorators import forbid_offline from pyincore.utils import return_http_response from urllib.parse import urljoin -import requests logger = pyglobals.LOGGER @@ -36,7 +36,7 @@ def __init__(self, client: IncoreClient): self.base_earthquake_url = urljoin(client.service_url, 'hazard/api/earthquakes/') self.base_tornado_url = urljoin(client.service_url, 'hazard/api/tornadoes/') - + @forbid_offline def get_dataset_metadata(self, dataset_id: str, timeout=(30, 600), **kwargs): """Retrieve metadata from data service. Dataset API endpoint is called. @@ -53,6 +53,7 @@ def get_dataset_metadata(self, dataset_id: str, timeout=(30, 600), **kwargs): r = self.client.get(url, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def get_dataset_files_metadata(self, dataset_id: str, timeout=(30, 600), **kwargs): """Retrieve metadata of all files associated with the dataset. Files API endpoint is called. @@ -69,6 +70,7 @@ def get_dataset_files_metadata(self, dataset_id: str, timeout=(30, 600), **kwarg r = self.client.get(url, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def get_dataset_file_metadata(self, dataset_id: str, file_id: str, timeout=(30, 600), **kwargs): """Retrieve metadata of all files associated with the dataset. Files API endpoint is called. @@ -87,6 +89,7 @@ def get_dataset_file_metadata(self, dataset_id: str, file_id: str, timeout=(30, r = self.client.get(url, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def get_dataset_blob(self, dataset_id: str, join=None, timeout=(30, 600), **kwargs): """Retrieve a blob of the dataset. Blob API endpoint is called. @@ -128,6 +131,7 @@ def get_dataset_blob(self, dataset_id: str, join=None, timeout=(30, 600), **kwar else: return local_filename + @forbid_offline def download_dataset_blob(self, cache_data_dir: str, dataset_id: str, join=None, timeout=(30, 600), **kwargs): # construct url for file download url = urljoin(self.base_url, dataset_id + '/blob') @@ -155,6 +159,7 @@ def download_dataset_blob(self, cache_data_dir: str, dataset_id: str, join=None, return local_filename + @forbid_offline def get_datasets(self, datatype: str = None, title: str = None, creator: str = None, skip: int = None, limit: int = None, space: str = None, timeout=(30, 600), **kwargs): """Function to get datasets. Blob API endpoint is called. @@ -193,6 +198,7 @@ def get_datasets(self, datatype: str = None, title: str = None, creator: str = N # need to handle there is no datasets return return_http_response(r).json() + @forbid_offline def create_dataset(self, properties: dict, timeout=(30, 600), **kwargs): """Create datasets. Post API endpoint is called. @@ -210,6 +216,7 @@ def create_dataset(self, properties: dict, timeout=(30, 600), **kwargs): r = self.client.post(url, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def update_dataset(self, dataset_id, property_name: str, property_value: str, timeout=(30, 600), **kwargs): """Update dataset. Put API endpoint is called. @@ -232,6 +239,7 @@ def update_dataset(self, dataset_id, property_name: str, r = self.client.put(url, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def add_files_to_dataset(self, dataset_id: str, filepaths: list, timeout=(30, 600), **kwargs): """Add files to the dataset. Post API endpoint is called. @@ -260,6 +268,7 @@ def add_files_to_dataset(self, dataset_id: str, filepaths: list, timeout=(30, 60 return return_http_response(r).json() + @forbid_offline def add_files_to_network_dataset(self, dataset_id: str, filepaths: list, nodename: str, linkname: str, graphname: str, timeout=(30, 600), **kwargs): """Add files to the network dataset. Post API endpoint is called. @@ -303,6 +312,7 @@ def add_files_to_network_dataset(self, dataset_id: str, filepaths: list, return return_http_response(r).json() + @forbid_offline def delete_dataset(self, dataset_id: str, timeout=(30, 600), **kwargs): """Delete dataset. Delete API endpoint is called. @@ -319,6 +329,7 @@ def delete_dataset(self, dataset_id: str, timeout=(30, 600), **kwargs): r = self.client.delete(url, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def get_files(self, timeout=(30, 600), **kwargs): """Get all files. Files API endpoint is called. Args: @@ -334,6 +345,7 @@ def get_files(self, timeout=(30, 600), **kwargs): r = self.client.get(url, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def get_file_metadata(self, file_id: str, timeout=(30, 600), **kwargs): """Function to retrieve metadata of a file defined by id. Files API endpoint is called. @@ -350,6 +362,7 @@ def get_file_metadata(self, file_id: str, timeout=(30, 600), **kwargs): r = self.client.get(url, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def get_file_blob(self, file_id: str, timeout=(30, 600), **kwargs): """Function to retrieve a blob of the file. Blob API endpoint is called. @@ -412,6 +425,7 @@ def unzip_dataset(self, local_filename: str): zip_ref.close() return foldername + @forbid_offline def get_shpfile_from_service(self, fileid, dirname, timeout=(30, 600), **kwargs): """Function to obtain a shape file from Data service. @@ -443,6 +457,7 @@ def get_shpfile_from_service(self, fileid, dirname, timeout=(30, 600), **kwargs) return filename + @forbid_offline def get_tornado_dataset_id_from_service(self, fileid, timeout=(30, 600), **kwargs): """Function to obtain a tornado dataset Id from Data service. @@ -461,6 +476,7 @@ def get_tornado_dataset_id_from_service(self, fileid, timeout=(30, 600), **kwarg return return_http_response(r).json()["tornadoDatasetId"] + @forbid_offline def search_datasets(self, text: str, skip: int = None, limit: int = None, timeout=(30, 600), **kwargs): """Function to search datasets. diff --git a/pyincore/decorators.py b/pyincore/decorators.py new file mode 100644 index 000000000..212e40373 --- /dev/null +++ b/pyincore/decorators.py @@ -0,0 +1,11 @@ +def forbid_offline(func): + """ + Custom decorator to forbid method interact with remote service in offline mode. + Returns: + """ + def wrapper(self, *args, **kwargs): + if self.client.offline: + raise ValueError("Service is not available in offline mode.") + return func(self, *args, **kwargs) + + return wrapper diff --git a/pyincore/dfr3service.py b/pyincore/dfr3service.py index 9f078108a..c82108048 100644 --- a/pyincore/dfr3service.py +++ b/pyincore/dfr3service.py @@ -8,11 +8,9 @@ import re from urllib.parse import urljoin from typing import Dict -import requests import pyincore.globals as pyglobals - -logger = pyglobals.LOGGER +from pyincore.decorators import forbid_offline from pyincore import IncoreClient from pyincore.models.fragilitycurveset import FragilityCurveSet @@ -21,6 +19,8 @@ from pyincore.models.mappingset import MappingSet from pyincore.utils import return_http_response +logger = pyglobals.LOGGER + # add more types if needed known_types = { "java.lang.String": "str", @@ -73,7 +73,7 @@ def __init__(self, client: IncoreClient): self.client = client self.base_mapping_url = urljoin(client.service_url, 'dfr3/api/mappings/') - + @forbid_offline def get_dfr3_set(self, dfr3_id: str, timeout=(30, 600), **kwargs): """Get specific DFR3 set. @@ -91,6 +91,7 @@ def get_dfr3_set(self, dfr3_id: str, timeout=(30, 600), **kwargs): return return_http_response(r).json() + @forbid_offline def delete_dfr3_set(self, dfr3_id: str, timeout=(30, 600), **kwargs): """Delete specific DFR3 set. Args: @@ -132,6 +133,7 @@ def batch_get_dfr3_set(self, dfr3_id_lists: list): return batch_dfr3_sets + @forbid_offline def search_dfr3_sets(self, text: str, skip: int = None, limit: int = None, timeout=(30, 600), **kwargs): """Search DFR3 sets based on a specific text. @@ -156,6 +158,7 @@ def search_dfr3_sets(self, text: str, skip: int = None, limit: int = None, timeo r = self.client.get(url, params=payload, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def create_dfr3_set(self, dfr3_set: dict, timeout=(30, 600), **kwargs): """Create DFR3 set on the server. POST API endpoint call. @@ -495,6 +498,7 @@ def extract_inventory_class(rules): else: raise ValueError("boolean " + boolean + " not supported!") + @forbid_offline def create_mapping(self, mapping_set: dict, timeout=(30, 600), **kwargs): """Create DFR3 mapping on the server. POST API endpoint call. @@ -513,6 +517,7 @@ def create_mapping(self, mapping_set: dict, timeout=(30, 600), **kwargs): return return_http_response(r).json() + @forbid_offline def get_mappings(self, hazard_type: str = None, inventory_type: str = None, mapping_type: str = None, creator: str = None, space: str = None, skip: int = None, limit: int = None, timeout=(30, 600), **kwargs): @@ -556,6 +561,7 @@ def get_mappings(self, hazard_type: str = None, inventory_type: str = None, mapp return return_http_response(r).json() + @forbid_offline def get_mapping(self, mapping_id, timeout=(30, 600), **kwargs): """Get specific inventory mapping. @@ -573,6 +579,7 @@ def get_mapping(self, mapping_id, timeout=(30, 600), **kwargs): return return_http_response(r).json() + @forbid_offline def delete_mapping(self, mapping_id, timeout=(30, 600), **kwargs): """delete specific inventory mappings. diff --git a/pyincore/fragilityservice.py b/pyincore/fragilityservice.py index e2604df84..4b90ecafa 100644 --- a/pyincore/fragilityservice.py +++ b/pyincore/fragilityservice.py @@ -8,6 +8,7 @@ import urllib from pyincore import IncoreClient +from pyincore.decorators import forbid_offline from pyincore.dfr3service import Dfr3Service from pyincore.utils import return_http_response @@ -26,6 +27,7 @@ def __init__(self, client: IncoreClient): super(FragilityService, self).__init__(client) + @forbid_offline def get_dfr3_sets(self, demand_type: str = None, hazard_type: str = None, inventory_type: str = None, author: str = None, legacy_id: str = None, diff --git a/pyincore/hazardservice.py b/pyincore/hazardservice.py index 3bf0d88cf..0029821d5 100644 --- a/pyincore/hazardservice.py +++ b/pyincore/hazardservice.py @@ -11,6 +11,7 @@ import numpy import pyincore.globals as pyglobals +from pyincore.decorators import forbid_offline from pyincore.utils import return_http_response from pyincore import IncoreClient @@ -39,6 +40,7 @@ def __init__(self, client: IncoreClient): 'hazard/api/hurricaneWindfields/') self.base_flood_url = urljoin(client.service_url, 'hazard/api/floods/') + @forbid_offline def get_earthquake_hazard_metadata_list(self, skip: int = None, limit: int = None, space: str = None, timeout: tuple = (30, 600), **kwargs): """Retrieve earthquake metadata list from hazard service. Hazard API endpoint is called. @@ -67,6 +69,7 @@ def get_earthquake_hazard_metadata_list(self, skip: int = None, limit: int = Non return return_http_response(r).json() + @forbid_offline def get_earthquake_hazard_metadata(self, hazard_id: str, timeout=(30, 600), **kwargs): """Retrieve earthquake metadata from hazard service. Hazard API endpoint is called. @@ -84,6 +87,7 @@ def get_earthquake_hazard_metadata(self, hazard_id: str, timeout=(30, 600), **kw return return_http_response(r).json() + @forbid_offline def get_earthquake_hazard_value_set(self, hazard_id: str, demand_type: str, demand_unit: str, bbox, grid_spacing: float, @@ -131,6 +135,7 @@ def get_earthquake_hazard_value_set(self, hazard_id: str, demand_type: str, return x, y, hazard_val + @forbid_offline def post_earthquake_hazard_values(self, hazard_id: str, payload: list, amplify_hazard=True, timeout=(30, 600), **kwargs): """ Retrieve bulk hurricane hazard values from the Hazard service. @@ -157,10 +162,11 @@ def post_earthquake_hazard_values(self, hazard_id: str, payload: list, amplify_h """ url = urljoin(self.base_earthquake_url, hazard_id + "/values") kwargs = {"files": {('points', json.dumps(payload)), ('amplifyHazard', json.dumps(amplify_hazard))}} - r = self.client.post(url, **kwargs) + r = self.client.post(url, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def get_liquefaction_values(self, hazard_id: str, geology_dataset_id: str, demand_unit: str, points: List, timeout=(30, 600), **kwargs): """Retrieve earthquake liquefaction values. @@ -185,6 +191,7 @@ def get_liquefaction_values(self, hazard_id: str, geology_dataset_id: str, response = r.json() return response + @forbid_offline def post_liquefaction_values(self, hazard_id: str, geology_dataset_id: str, payload: list, timeout=(30, 600), **kwargs): """ Retrieve bulk earthquake liquefaction hazard values from the Hazard service. @@ -204,6 +211,7 @@ def post_liquefaction_values(self, hazard_id: str, geology_dataset_id: str, payl return return_http_response(r).json() + @forbid_offline def get_soil_amplification_value(self, method: str, dataset_id: str, site_lat: float, site_long: float, demand_type: str, hazard: float, @@ -238,6 +246,7 @@ def get_soil_amplification_value(self, method: str, dataset_id: str, # TODO get_slope_amplification_value needed to be implemented on the server side # def get_slope_amplification_value(self) + @forbid_offline def get_supported_earthquake_models(self, timeout=(30, 600), **kwargs): """Retrieve suported earthquake models. @@ -254,6 +263,7 @@ def get_supported_earthquake_models(self, timeout=(30, 600), **kwargs): return return_http_response(r).json() + @forbid_offline def create_earthquake(self, eq_json, file_paths: List = [], timeout=(30, 600), **kwargs): """Create earthquake on the server. POST API endpoint is called. @@ -277,6 +287,7 @@ def create_earthquake(self, eq_json, file_paths: List = [], timeout=(30, 600), * r = self.client.post(url, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def delete_earthquake(self, hazard_id: str, timeout=(30, 600), **kwargs): """Delete an earthquake by it's id, and it's associated datasets @@ -293,6 +304,7 @@ def delete_earthquake(self, hazard_id: str, timeout=(30, 600), **kwargs): r = self.client.delete(url, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def search_earthquakes(self, text: str, skip: int = None, limit: int = None, timeout=(30, 600), **kwargs): """Search earthquakes. @@ -318,6 +330,7 @@ def search_earthquakes(self, text: str, skip: int = None, limit: int = None, tim return return_http_response(r).json() + @forbid_offline def get_earthquake_aleatory_uncertainty(self, hazard_id: str, demand_type: str, timeout=(30, 600), **kwargs): """ Gets aleatory uncertainty for an earthquake @@ -337,6 +350,7 @@ def get_earthquake_aleatory_uncertainty(self, hazard_id: str, demand_type: str, r = self.client.get(url, params=payload, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def get_earthquake_variance(self, hazard_id: str, variance_type: str, demand_type: str, demand_unit: str, points: List, timeout=(30, 600), **kwargs): """Gets total and epistemic variance for a model based earthquake @@ -360,6 +374,7 @@ def get_earthquake_variance(self, hazard_id: str, variance_type: str, demand_typ r = self.client.get(url, params=payload, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def get_tornado_hazard_metadata_list(self, skip: int = None, limit: int = None, space: str = None, timeout=(30, 600), **kwargs): """Retrieve tornado metadata list from hazard service. Hazard API endpoint is called. @@ -388,6 +403,7 @@ def get_tornado_hazard_metadata_list(self, skip: int = None, limit: int = None, return return_http_response(r).json() + @forbid_offline def get_tornado_hazard_metadata(self, hazard_id: str, timeout=(30, 600), **kwargs): """Retrieve tornado metadata list from hazard service. Hazard API endpoint is called. @@ -405,6 +421,7 @@ def get_tornado_hazard_metadata(self, hazard_id: str, timeout=(30, 600), **kwarg return return_http_response(r).json() + @forbid_offline def post_tornado_hazard_values(self, hazard_id: str, payload: list, seed=None, timeout=(30, 600), **kwargs): """ Retrieve bulk tornado hazard values from the Hazard service. @@ -427,6 +444,7 @@ def post_tornado_hazard_values(self, hazard_id: str, payload: list, seed=None, t return return_http_response(r).json() + @forbid_offline def create_tornado_scenario(self, tornado_json, file_paths: List = [], timeout=(30, 600), **kwargs): """Create tornado on the server. POST API endpoint is called. @@ -450,6 +468,7 @@ def create_tornado_scenario(self, tornado_json, file_paths: List = [], timeout=( r = self.client.post(url, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def delete_tornado(self, hazard_id: str, timeout=(30, 600), **kwargs): """Delete a tornado by it's id, and it's associated datasets @@ -466,6 +485,7 @@ def delete_tornado(self, hazard_id: str, timeout=(30, 600), **kwargs): r = self.client.delete(url, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def search_tornadoes(self, text: str, skip: int = None, limit: int = None, timeout=(30, 600), **kwargs): """Search tornadoes. @@ -491,6 +511,7 @@ def search_tornadoes(self, text: str, skip: int = None, limit: int = None, timeo return return_http_response(r).json() + @forbid_offline def get_tsunami_hazard_metadata_list(self, skip: int = None, limit: int = None, space: str = None, timeout=(30, 600), **kwargs): """Retrieve tsunami metadata list from hazard service. Hazard API endpoint is called. @@ -519,6 +540,7 @@ def get_tsunami_hazard_metadata_list(self, skip: int = None, limit: int = None, return return_http_response(r).json() + @forbid_offline def get_tsunami_hazard_metadata(self, hazard_id: str, timeout=(30, 600), **kwargs): """Retrieve tsunami metadata list from hazard service. Hazard API endpoint is called. @@ -536,6 +558,7 @@ def get_tsunami_hazard_metadata(self, hazard_id: str, timeout=(30, 600), **kwarg return return_http_response(r).json() + @forbid_offline def post_tsunami_hazard_values(self, hazard_id: str, payload: list, timeout=(30, 600), **kwargs): """ Retrieve bulk tsunami hazard values from the Hazard service. @@ -554,6 +577,7 @@ def post_tsunami_hazard_values(self, hazard_id: str, payload: list, timeout=(30, return return_http_response(r).json() + @forbid_offline def create_tsunami_hazard(self, tsunami_json, file_paths: List, timeout=(30, 600), **kwargs): """Create tsunami on the server. POST API endpoint is called. @@ -577,6 +601,7 @@ def create_tsunami_hazard(self, tsunami_json, file_paths: List, timeout=(30, 600 r = self.client.post(url, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def delete_tsunami(self, hazard_id: str, timeout=(30, 600), **kwargs): """Delete a tsunami by it's id, and it's associated datasets @@ -593,6 +618,7 @@ def delete_tsunami(self, hazard_id: str, timeout=(30, 600), **kwargs): r = self.client.delete(url, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def search_tsunamis(self, text: str, skip: int = None, limit: int = None, timeout=(30, 600), **kwargs): """Search tsunamis. @@ -618,6 +644,7 @@ def search_tsunamis(self, text: str, skip: int = None, limit: int = None, timeou return return_http_response(r).json() + @forbid_offline def create_hurricane(self, hurricane_json, file_paths: List, timeout=(30, 600), **kwargs): """Create hurricanes on the server. POST API endpoint is called. @@ -641,6 +668,7 @@ def create_hurricane(self, hurricane_json, file_paths: List, timeout=(30, 600), return return_http_response(r).json() + @forbid_offline def get_hurricane_metadata_list(self, skip: int = None, limit: int = None, space: str = None, timeout=(30, 600), **kwargs): """Retrieve hurricane metadata list from hazard service. Hazard API endpoint is called. @@ -670,6 +698,7 @@ def get_hurricane_metadata_list(self, skip: int = None, limit: int = None, space return return_http_response(r).json() + @forbid_offline def get_hurricane_metadata(self, hazard_id, timeout=(30, 600), **kwargs): """Retrieve hurricane metadata list from hazard service. Hazard API endpoint is called. @@ -687,6 +716,7 @@ def get_hurricane_metadata(self, hazard_id, timeout=(30, 600), **kwargs): return return_http_response(r).json() + @forbid_offline def post_hurricane_hazard_values(self, hazard_id: str, payload: list, timeout=(30, 600), **kwargs): """ Retrieve bulk hurricane hazard values from the Hazard service. @@ -703,6 +733,7 @@ def post_hurricane_hazard_values(self, hazard_id: str, payload: list, timeout=(3 return return_http_response(r).json() + @forbid_offline def delete_hurricane(self, hazard_id: str, timeout=(30, 600), **kwargs): """Delete a hurricane by it's id, and it's associated datasets @@ -719,6 +750,7 @@ def delete_hurricane(self, hazard_id: str, timeout=(30, 600), **kwargs): r = self.client.delete(url, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def search_hurricanes(self, text: str, skip: int = None, limit: int = None, timeout=(30, 600), **kwargs): """Search hurricanes. @@ -743,6 +775,7 @@ def search_hurricanes(self, text: str, skip: int = None, limit: int = None, time return return_http_response(r).json() + @forbid_offline def create_flood(self, flood_json, file_paths: List, timeout=(30, 600), **kwargs): """Create floods on the server. POST API endpoint is called. @@ -763,6 +796,7 @@ def create_flood(self, flood_json, file_paths: List, timeout=(30, 600), **kwargs return return_http_response(r).json() + @forbid_offline def get_flood_metadata_list(self, skip: int = None, limit: int = None, space: str = None, timeout=(30, 600), **kwargs): """Retrieve flood metadata list from hazard service. Hazard API endpoint is called. @@ -792,6 +826,7 @@ def get_flood_metadata_list(self, skip: int = None, limit: int = None, space: st return return_http_response(r).json() + @forbid_offline def get_flood_metadata(self, hazard_id, timeout=(30, 600), **kwargs): """Retrieve flood metadata list from hazard service. Hazard API endpoint is called. @@ -808,6 +843,7 @@ def get_flood_metadata(self, hazard_id, timeout=(30, 600), **kwargs): return return_http_response(r).json() + @forbid_offline def post_flood_hazard_values(self, hazard_id: str, payload: list, timeout=(30, 600), **kwargs): """ Retrieve bulk flood hazard values from the Hazard service. @@ -826,6 +862,7 @@ def post_flood_hazard_values(self, hazard_id: str, payload: list, timeout=(30, 6 return return_http_response(r).json() + @forbid_offline def delete_flood(self, hazard_id: str, timeout=(30, 600), **kwargs): """Delete a flood by it's id, and it's associated datasets @@ -842,6 +879,7 @@ def delete_flood(self, hazard_id: str, timeout=(30, 600), **kwargs): r = self.client.delete(url, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def search_floods(self, text: str, skip: int = None, limit: int = None, timeout=(30, 600), **kwargs): """Search floods. @@ -867,6 +905,7 @@ def search_floods(self, text: str, skip: int = None, limit: int = None, timeout= return return_http_response(r).json() + @forbid_offline def create_hurricane_windfield(self, hurr_wf_inputs, timeout=(30, 10800), **kwargs): """Create wind fields on the server. POST API endpoint is called. @@ -887,6 +926,7 @@ def create_hurricane_windfield(self, hurr_wf_inputs, timeout=(30, 10800), **kwar return return_http_response(r).json() + @forbid_offline def get_hurricanewf_metadata_list(self, coast: str = None, category: int = None, skip: int = None, limit: int = None, space: str = None, timeout=(30, 600), **kwargs): """Retrieve hurricane metadata list from hazard service. Hazard API endpoint is called. @@ -922,6 +962,7 @@ def get_hurricanewf_metadata_list(self, coast: str = None, category: int = None, return return_http_response(r).json() + @forbid_offline def get_hurricanewf_metadata(self, hazard_id, timeout=(30, 600), **kwargs): """Retrieve hurricane metadata list from hazard service. Hazard API endpoint is called. @@ -939,6 +980,7 @@ def get_hurricanewf_metadata(self, hazard_id, timeout=(30, 600), **kwargs): return return_http_response(r).json() + @forbid_offline def post_hurricanewf_hazard_values(self, hazard_id: str, payload: list, elevation: int, roughness: float, timeout=(30, 600), **kwargs): @@ -963,6 +1005,7 @@ def post_hurricanewf_hazard_values(self, hazard_id: str, payload: list, elevatio return return_http_response(r).json() + @forbid_offline def get_hurricanewf_json(self, coast: str, category: int, trans_d: float, land_fall_loc: int, demand_type: str, demand_unit: str, resolution: int = 6, grid_points: int = 80, rf_method: str = "circular", timeout=(30, 600), **kwargs): @@ -996,6 +1039,7 @@ def get_hurricanewf_json(self, coast: str, category: int, trans_d: float, land_f return return_http_response(r).json() + @forbid_offline def delete_hurricanewf(self, hazard_id: str, timeout=(30, 600), **kwargs): """Delete a hurricane windfield by it's id, and it's associated datasets @@ -1012,6 +1056,7 @@ def delete_hurricanewf(self, hazard_id: str, timeout=(30, 600), **kwargs): r = self.client.delete(url, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def search_hurricanewf(self, text: str, skip: int = None, limit: int = None, timeout=(30, 600), **kwargs): """Search hurricanes. @@ -1037,7 +1082,6 @@ def search_hurricanewf(self, text: str, skip: int = None, limit: int = None, tim return return_http_response(r).json() - # TODO replace this with API endpoint in the future def get_allowed_demands(self, hazard_type, timeout=(30, 600), **kwargs): if self.client.offline: if hazard_type in HazardConstant.DEFAULT_ALLOWED_DEMANDS.keys(): diff --git a/pyincore/repairservice.py b/pyincore/repairservice.py index 29dc7c63d..da7780408 100644 --- a/pyincore/repairservice.py +++ b/pyincore/repairservice.py @@ -8,6 +8,7 @@ import urllib from pyincore import IncoreClient +from pyincore.decorators import forbid_offline from pyincore.dfr3service import Dfr3Service @@ -26,6 +27,7 @@ def __init__(self, client: IncoreClient): super(RepairService, self).__init__(client) + @forbid_offline def get_dfr3_sets(self, hazard_type: str = None, inventory_type: str = None, author: str = None, creator: str = None, space: str = None, skip: int = None, limit: int = None, timeout=(30, 600), **kwargs): diff --git a/pyincore/restorationservice.py b/pyincore/restorationservice.py index 0e5d215ad..279592872 100644 --- a/pyincore/restorationservice.py +++ b/pyincore/restorationservice.py @@ -8,6 +8,7 @@ from urllib.parse import urljoin from pyincore import IncoreClient +from pyincore.decorators import forbid_offline from pyincore.dfr3service import Dfr3Service from pyincore.utils import return_http_response @@ -22,11 +23,11 @@ class RestorationService(Dfr3Service): def __init__(self, client: IncoreClient): self.client = client - self.base_dfr3_url = urljoin(client.service_url, - 'dfr3/api/restorations/') + self.base_dfr3_url = urljoin(client.service_url, 'dfr3/api/restorations/') super(RestorationService, self).__init__(client) + @forbid_offline def get_dfr3_sets(self, hazard_type: str = None, inventory_type: str = None, author: str = None, creator: str = None, space: str = None, skip: int = None, limit: int = None, timeout=(30, 600), **kwargs): diff --git a/pyincore/semanticservice.py b/pyincore/semanticservice.py index bc147fb92..372758778 100644 --- a/pyincore/semanticservice.py +++ b/pyincore/semanticservice.py @@ -6,14 +6,13 @@ import json -import os -from typing import Tuple, Union +from typing import Tuple import pyincore.globals as pyglobals from pyincore import IncoreClient +from pyincore.decorators import forbid_offline from pyincore.utils import return_http_response from urllib.parse import urljoin -import requests logger = pyglobals.LOGGER @@ -33,6 +32,7 @@ def __init__(self, client: IncoreClient): self.base_url = urljoin(client.service_url, "semantics/api/types") + @forbid_offline def get_all_semantic_types( self, hyperlink: bool = False, @@ -83,6 +83,7 @@ def get_all_semantic_types( return data + @forbid_offline def get_semantic_type_by_name( self, type_name: str, @@ -116,6 +117,7 @@ def get_semantic_type_by_name( return data + @forbid_offline def search_semantic_type( self, query: str, timeout: Tuple[int, int] = (30, 600), **kwargs ) -> list: diff --git a/pyincore/spaceservice.py b/pyincore/spaceservice.py index e5dd632eb..38d5b6a81 100644 --- a/pyincore/spaceservice.py +++ b/pyincore/spaceservice.py @@ -7,8 +7,8 @@ from urllib.parse import urljoin from pyincore import IncoreClient import pyincore.globals as pyglobals +from pyincore.decorators import forbid_offline from pyincore.utils import return_http_response -import requests logger = pyglobals.LOGGER @@ -25,6 +25,7 @@ def __init__(self, client: IncoreClient): self.client = client self.base_space_url = urljoin(client.service_url, "space/api/spaces/") + @forbid_offline def create_space(self, space_json, timeout=(30, 600), **kwargs): """Creates a Space. @@ -43,6 +44,7 @@ def create_space(self, space_json, timeout=(30, 600), **kwargs): r = self.client.post(url, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def get_spaces(self, dataset_id: str = None, timeout=(30, 600), **kwargs): """Retrieve a Space with the dataset. @@ -64,6 +66,7 @@ def get_spaces(self, dataset_id: str = None, timeout=(30, 600), **kwargs): return return_http_response(r).json() + @forbid_offline def get_space_by_id(self, space_id: str, timeout=(30, 600), **kwargs): """Get space information. @@ -79,6 +82,7 @@ def get_space_by_id(self, space_id: str, timeout=(30, 600), **kwargs): return return_http_response(r).json() + @forbid_offline def get_space_by_name(self, space_name: str, timeout=(30, 600), **kwargs): """Get space information by querying the name of space. @@ -94,6 +98,7 @@ def get_space_by_name(self, space_name: str, timeout=(30, 600), **kwargs): r = self.client.get(self.base_space_url, params={"name": space_name}, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def update_space(self, space_id: str, space_json, timeout=(30, 600), **kwargs): """Updates a Space. @@ -113,6 +118,7 @@ def update_space(self, space_id: str, space_json, timeout=(30, 600), **kwargs): r = self.client.put(url, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def add_to_space_by_name(self, space_name: str, dataset_id: str): """Add dataset to a space by using space name and dataset id. @@ -130,6 +136,7 @@ def add_to_space_by_name(self, space_name: str, dataset_id: str): return response + @forbid_offline def remove_from_space_by_name(self, space_name: str, dataset_id: str): """Remove dataset from a space by using space name and dataset id. @@ -147,6 +154,7 @@ def remove_from_space_by_name(self, space_name: str, dataset_id: str): return response + @forbid_offline def remove_dataset_from_space(self, space_id: str, dataset_id: str, timeout=(30, 600), **kwargs): """Remove dataset from the space using dataset id and space id @@ -165,6 +173,7 @@ def remove_dataset_from_space(self, space_id: str, dataset_id: str, timeout=(30, r = self.client.delete(url, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def add_dataset_to_space(self, space_id: str, dataset_id: str, timeout=(30, 600), **kwargs): """Add member to a Space. @@ -183,6 +192,7 @@ def add_dataset_to_space(self, space_id: str, dataset_id: str, timeout=(30, 600) r = self.client.post(url, timeout=timeout, **kwargs) return return_http_response(r).json() + @forbid_offline def grant_privileges_to_space(self, space_id: str, privileges_json, timeout=(30, 600), **kwargs): """Updates a Space. From 4f02e2fd3cff7b6141e18604157e717a1d94626a Mon Sep 17 00:00:00 2001 From: Ya-Lan Yang <63822845+ylyangtw@users.noreply.github.com> Date: Thu, 1 Feb 2024 08:17:57 -0600 Subject: [PATCH 11/14] Fix pyincore github action (#483) * test * fix * fix --- environment.yml | 3 ++- requirements.min | 1 + requirements.txt | 2 +- setup.py | 1 + 4 files changed, 5 insertions(+), 2 deletions(-) diff --git a/environment.yml b/environment.yml index 6bf884c9b..6eb840f32 100644 --- a/environment.yml +++ b/environment.yml @@ -19,4 +19,5 @@ dependencies: - requests>=2.31.0 - rtree>=1.1.0 - scipy>=1.11.3 - - shapely>=2.0.2 \ No newline at end of file + - shapely>=2.0.2 + - openssl<=3.2.0 \ No newline at end of file diff --git a/requirements.min b/requirements.min index a5a7e0be9..24624a10b 100644 --- a/requirements.min +++ b/requirements.min @@ -16,3 +16,4 @@ requests>=2.31.0 rtree>=1.1.0 scipy>=1.11.3 shapely>=2.0.2 +openssl<=3.2.0 diff --git a/requirements.txt b/requirements.txt index 67803f1a7..1c769cd14 100644 --- a/requirements.txt +++ b/requirements.txt @@ -14,4 +14,4 @@ rasterio>=1.3.9 requests>=2.31.0 rtree>=1.1.0 scipy>=1.11.3 -shapely>=2.0.2 +shapely>=2.0.2 \ No newline at end of file diff --git a/setup.py b/setup.py index 7976b0cb7..fbf10497e 100644 --- a/setup.py +++ b/setup.py @@ -67,6 +67,7 @@ 'rtree>=1.1.0', 'scipy>=1.11.3', 'shapely>=2.0.2', + 'openssl<=3.2.0' ], extras_require={ From f2caefa1a728733f2c488ba9c699bc117c6c958d Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Thu, 1 Feb 2024 10:56:53 -0600 Subject: [PATCH 12/14] release branch --- CHANGELOG.md | 11 ++++------- docs/source/conf.py | 4 ++-- pyincore/globals.py | 2 +- setup.py | 2 +- 4 files changed, 8 insertions(+), 11 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 32746a505..fd1640e20 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,23 +5,20 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). -## [Unreleased] + +## [1.16.0] - 2024-02-07 + ### Added - Create GUID field in geopackage file [#478](https://github.com/IN-CORE/pyincore/issues/478) ### Changed - Tornado and Earthquake model [#474](https://github.com/IN-CORE/pyincore/issues/474) +- Disable methods interact with services if in offline mode [#458](https://github.com/IN-CORE/pyincore/issues/458) ### Fixed - Fix semantics search pytest by switching to an existing search term -## [Unreleased] - -### Changed -- Disable methods interact with services if in offline mode [#458](https://github.com/IN-CORE/pyincore/issues/458) - - ## [1.15.1] - 2023-12-20 ### Fixed diff --git a/docs/source/conf.py b/docs/source/conf.py index 565bb1010..b44bd34f8 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -33,9 +33,9 @@ author = '' # The short X.Y version -version = '1.15' +version = '1.16' # The full version, including alpha/beta/rc tags -release = '1.15.1' +release = '1.16.0' # -- General configuration --------------------------------------------------- diff --git a/pyincore/globals.py b/pyincore/globals.py index cda468a99..c1e21126f 100644 --- a/pyincore/globals.py +++ b/pyincore/globals.py @@ -10,7 +10,7 @@ import os import shutil -PACKAGE_VERSION = "1.15.1" +PACKAGE_VERSION = "1.16.0" INCORE_API_PROD_URL = "https://incore.ncsa.illinois.edu" INCORE_API_DEV_URL = "https://incore-dev.ncsa.illinois.edu" diff --git a/setup.py b/setup.py index fbf10497e..0e8b3373c 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ from setuptools import setup, find_packages # version number of pyincore -version = '1.15.1' +version = '1.16.0' with open("README.rst", encoding="utf-8") as f: readme = f.read() From b744cd6290fc79f2dba93035c6400096a892a7e4 Mon Sep 17 00:00:00 2001 From: Ya-Lan Yang Date: Fri, 9 Feb 2024 09:22:37 +0800 Subject: [PATCH 13/14] Refactor tornadoepndamage --- pyincore/analyses/tornadoepndamage/tornadoepndamage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyincore/analyses/tornadoepndamage/tornadoepndamage.py b/pyincore/analyses/tornadoepndamage/tornadoepndamage.py index 5a6a62505..57b1aabbe 100644 --- a/pyincore/analyses/tornadoepndamage/tornadoepndamage.py +++ b/pyincore/analyses/tornadoepndamage/tornadoepndamage.py @@ -97,7 +97,7 @@ def run(self): tornado_id = tornado.id tornado_metadata = self.hazardsvc.get_tornado_hazard_metadata(tornado_id) - self.load_remote_input_dataset("tornado", tornado_metadata["hazardDataset"][0].get("datasetId")) + self.load_remote_input_dataset("tornado", tornado_metadata["hazardDatasets"][0].get("datasetId")) tornado_dataset = self.get_input_dataset("tornado").get_inventory_reader() ds_results, damage_results = self.get_damage(network_dataset, tornado_dataset, tornado_id) From b94c6e72d76773eb616fd83f083ec61d2e569912 Mon Sep 17 00:00:00 2001 From: Ya-Lan Yang Date: Fri, 9 Feb 2024 10:36:46 +0800 Subject: [PATCH 14/14] Update CHANGELOG.md --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index fd1640e20..5a146014a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,10 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). +## [Unreleased] + +### Fixed +- Refactoring tornadoepndamage for hazardDatasets [#495](https://github.com/IN-CORE/pyincore/issues/495) ## [1.16.0] - 2024-02-07