Skip to content

Commit

Permalink
v2.12.0 (#1224)
Browse files Browse the repository at this point in the history
  • Loading branch information
pl-buiquang authored Jan 19, 2023
2 parents 16a71c1 + 0507f33 commit 7b8218d
Show file tree
Hide file tree
Showing 127 changed files with 5,902 additions and 2,028 deletions.
2 changes: 1 addition & 1 deletion antarest/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "2.11.1"
__version__ = "2.12.0"

from pathlib import Path

Expand Down
45 changes: 43 additions & 2 deletions antarest/core/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,10 +64,10 @@ def __init__(self, message: str) -> None:


class StudyTypeUnsupported(HTTPException):
def __init__(self, uuid: str, type: str) -> None:
def __init__(self, uuid: str, type_: str) -> None:
super().__init__(
HTTPStatus.UNPROCESSABLE_ENTITY,
f"Study {uuid} with type {type} not recognized",
f"Study {uuid} with type {type_} not recognized",
)


Expand Down Expand Up @@ -168,10 +168,51 @@ def __init__(self) -> None:
super().__init__(HTTPStatus.NOT_FOUND)


class LayerNotAllowedToBeDeleted(HTTPException):
def __init__(self) -> None:
super().__init__(HTTPStatus.EXPECTATION_FAILED)


class StudyOutputNotFoundError(Exception):
pass


class AreaNotFound(HTTPException):
def __init__(self, *area_ids: str) -> None:
count = len(area_ids)
ids = ", ".join(f"'{a}'" for a in area_ids)
msg = {
0: "All areas are found",
1: f"{count} area is not found: {ids}",
2: f"{count} areas are not found: {ids}",
}[min(count, 2)]
super().__init__(HTTPStatus.NOT_FOUND, msg)


class DistrictNotFound(HTTPException):
def __init__(self, *district_ids: str) -> None:
count = len(district_ids)
ids = ", ".join(f"'{a}'" for a in district_ids)
msg = {
0: "All districts are found",
1: f"{count} district is not found: {ids}",
2: f"{count} districts are not found: {ids}",
}[min(count, 2)]
super().__init__(HTTPStatus.NOT_FOUND, msg)


class DistrictAlreadyExist(HTTPException):
def __init__(self, *district_ids: str):
count = len(district_ids)
ids = ", ".join(f"'{a}'" for a in district_ids)
msg = {
0: "No district already exist",
1: f"{count} district already exist: {ids}",
2: f"{count} districts already exist: {ids}",
}[min(count, 2)]
super().__init__(HTTPStatus.CONFLICT, msg)


class BadEditInstructionException(HTTPException):
def __init__(self, message: str) -> None:
super().__init__(HTTPStatus.BAD_REQUEST, message)
4 changes: 2 additions & 2 deletions antarest/core/roles.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ class RoleType(enum.Enum):
"""

ADMIN = 40
RUNNER = 30
WRITER = 20
WRITER = 30
RUNNER = 20
READER = 10

def is_higher_or_equals(self, other: "RoleType") -> bool:
Expand Down
16 changes: 16 additions & 0 deletions antarest/core/utils/dict.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import copy
from typing import Dict, Any


def merge_deep(a: Dict[Any, Any], b: Dict[Any, Any]) -> Dict[Any, Any]:
result = copy.deepcopy(a)
for key, value in b.items():
if (
key in result
and isinstance(result[key], dict)
and isinstance(value, dict)
):
result[key] = merge_deep(result[key], value)
else:
result[key] = copy.deepcopy(value)
return result
8 changes: 6 additions & 2 deletions antarest/launcher/adapters/slurm_launcher/slurm_launcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,15 +288,19 @@ def _import_xpansion_result(self, job_id: str, xpansion_mode: str) -> None:
logger.info(
"Unzipping zipped output for xpansion result storage"
)
unzip(
unzipped_output_path = (
self.local_workspace
/ STUDIES_OUTPUT_DIR_NAME
/ job_id
/ "output"
/ output_path.name[:-4],
/ output_path.name[:-4]
)
unzip(
unzipped_output_path,
output_path,
remove_source_zip=True,
)
output_path = unzipped_output_path

if (output_path / "updated_links").exists():
logger.warning("Skipping updated links")
Expand Down
16 changes: 13 additions & 3 deletions antarest/launcher/extensions/adequacy_patch/extension.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,11 +83,11 @@ def prepare_study_for_adq_patch(
]
original_area_enabled: Dict[str, bool] = {}
original_link_enabled: Dict[str, bool] = {}
study.tree.save(
True, ["settings", "generaldata", "general", "year-by-year"]
year_by_year_active = study.tree.get(
["settings", "generaldata", "general", "year-by-year"]
)
study.tree.save(
True, ["settings", "generaldata", "output", "synthesis"]
True, ["settings", "generaldata", "general", "year-by-year"]
)
for area_id, area in study.config.areas.items():
# areas
Expand Down Expand Up @@ -144,6 +144,16 @@ def prepare_study_for_adq_patch(
"w",
) as fh:
yaml.dump(original_link_enabled, fh)
if year_by_year_active:
with open(
study.config.study_path
/ "user"
/ "adequacypatch"
/ "year-by-year-active",
"w",
) as fh:
fh.write("True")

return original_area_enabled

def before_import_hook(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ remove_data <- function(path_prefix, data_type, data_list, include_id) {
unlink(file.path(paste0(c(item_data, "id-hourly.txt"), collapse="/")))
}
unlink(file.path(paste0(c(item_data, "details-hourly.txt"), collapse="/")))
unlink(file.path(paste0(c(item_data, "details-res-hourly.txt"), collapse="/")))
if (length(list.files(file.path(item_data))) == 0) {
unlink(file.path(item_data))
}
Expand All @@ -57,18 +58,21 @@ for (output in list.files("output")) {
if (file.exists(output_data)) {
# mc ind
mc_data <- paste(c(output_data, "mc-ind"), collapse="/")
if (file.exists(mc_data)) {
if (!file.exists("user/adequacypatch/year-by-year-active")) {
unlink(mc_data, recursive=TRUE)
}
else if (file.exists(mc_data)) {
for (mc_year in list.files(file.path(mc_data))) {
remove_data(c(mc_data, mc_year), "areas", areas, FALSE)
remove_data(c(mc_data, mc_year), "links", links, FALSE)
}
}
# mc all
mc_data <- paste(c(output_data, "mc-all"), collapse="/")
if (file.exists(mc_data)) {
remove_data(mc_data, "areas", areas, TRUE)
remove_data(mc_data, "links", links, TRUE)
}
# mc_data <- paste(c(output_data, "mc-all"), collapse="/")
# if (file.exists(mc_data)) {
# remove_data(mc_data, "areas", areas, TRUE)
# remove_data(mc_data, "links", links, TRUE)
# }
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ remove_data <- function(path_prefix, data_type, data_list, include_id) {
unlink(file.path(paste0(c(item_data, "id-hourly.txt"), collapse="/")))
}
unlink(file.path(paste0(c(item_data, "details-hourly.txt"), collapse="/")))
unlink(file.path(paste0(c(item_data, "details-res-hourly.txt"), collapse="/")))
if (length(list.files(file.path(item_data))) == 0) {
unlink(file.path(item_data))
}
Expand All @@ -58,19 +59,23 @@ for (output in list.files("output")) {
if (file.exists(output_data)) {
# mc ind
mc_data <- paste(c(output_data, "mc-ind"), collapse="/")
if (file.exists(mc_data)) {
if (!file.exists("user/adequacypatch/year-by-year-active")) {
unlink(mc_data, recursive=TRUE)
}
else if (file.exists(mc_data)) {
for (mc_year in list.files(file.path(mc_data))) {
remove_data(c(mc_data, mc_year), "areas", areas, FALSE)
remove_data(c(mc_data, mc_year), "links", links, FALSE)
}
}
# mc all
mc_data <- paste(c(output_data, "mc-all"), collapse="/")
if (file.exists(mc_data)) {
remove_data(mc_data, "areas", areas, TRUE)
remove_data(mc_data, "links", links, TRUE)
}
# mc_data <- paste(c(output_data, "mc-all"), collapse="/")
# if (file.exists(mc_data)) {
# remove_data(mc_data, "areas", areas, TRUE)
# remove_data(mc_data, "links", links, TRUE)
# }
}
}
}


23 changes: 20 additions & 3 deletions antarest/study/business/area_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

from pydantic import BaseModel

from antarest.core.exceptions import LayerNotFound
from antarest.core.exceptions import LayerNotFound, LayerNotAllowedToBeDeleted
from antarest.study.business.utils import execute_or_add_commands
from antarest.study.model import (
RawStudy,
Expand Down Expand Up @@ -133,6 +133,8 @@ def get_all_areas(
def get_all_areas_ui_info(self, study: RawStudy) -> Dict[str, Any]:
storage_service = self.storage_service.get_storage(study)
file_study = storage_service.get_raw(study)
if len(file_study.config.areas.keys()) == 0:
return {}
areas_ui = file_study.tree.get(
["input", "areas", ",".join(file_study.config.areas.keys()), "ui"]
)
Expand All @@ -154,8 +156,17 @@ def get_layers(self, study: RawStudy) -> List[LayerInfoDTO]:
storage_service = self.storage_service.get_storage(study)
file_study = storage_service.get_raw(study)
layers = file_study.tree.get(["layers", "layers", "layers"])
areas_ui = file_study.tree.get(
["input", "areas", ",".join(file_study.config.areas.keys()), "ui"]
areas_ui = (
file_study.tree.get(
[
"input",
"areas",
",".join(file_study.config.areas.keys()),
"ui",
]
)
if len(file_study.config.areas)
else {}
)
if len(layers) == 0:
layers["0"] = "All"
Expand All @@ -168,6 +179,8 @@ def get_layers(self, study: RawStudy) -> List[LayerInfoDTO]:
for area in areas_ui
if str(layer)
in AreaManager._get_area_layers(areas_ui, area)
# the layer 0 always display all areas
or str(layer) == "0"
],
)
for layer in layers
Expand Down Expand Up @@ -294,7 +307,11 @@ def create_layer(self, study: RawStudy, layer_name: str) -> str:

def remove_layer(self, study: RawStudy, layer_id: str) -> None:
file_study = self.storage_service.get_storage(study).get_raw(study)
if layer_id == "0":
raise LayerNotAllowedToBeDeleted
layers = file_study.tree.get(["layers", "layers", "layers"])
# remove all areas from the layer since this info is stored in area data...
self.update_layer_areas(study, layer_id, [])
command = UpdateConfig(
target=f"layers/layers/layers",
data={
Expand Down
Loading

0 comments on commit 7b8218d

Please sign in to comment.