Skip to content

Commit

Permalink
v2.14.2
Browse files Browse the repository at this point in the history
  • Loading branch information
laurent-laporte-pro authored and skamril committed Jun 13, 2023
2 parents abe216f + e699391 commit ecc9a64
Show file tree
Hide file tree
Showing 61 changed files with 2,557 additions and 1,503 deletions.
12 changes: 6 additions & 6 deletions .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,15 @@ on:
push:
branches:
- 'master'

- 'hotfix/**'

jobs:
binary:
runs-on: ${{ matrix.os }}
strategy:
max-parallel: 3
matrix:
os: [windows-latest, ubuntu-20.04]
os: [ windows-latest, ubuntu-20.04 ]

steps:
- name: Checkout github repo (+ download lfs dependencies)
Expand Down Expand Up @@ -59,17 +59,17 @@ jobs:
if: matrix.os == 'windows-latest'
run: |
git log -1 HEAD --format=%H > .\resources\commit_id
pyinstaller -F antarest\worker\archive_worker_service.py -n AntaresWebWorker --add-data ".\resources;.\resources"
pyinstaller -F antarest\worker\archive_worker_service.py -n AntaresWebWorker --add-data ".\resources;.\resources" --hidden-import 'pythonjsonlogger.jsonlogger'
- name: Generate Linux worker binary
if: matrix.os == 'ubuntu-20.04'
run: |
git log -1 HEAD --format=%H > .\resources\commit_id
pyinstaller -F antarest/worker/archive_worker_service.py -n AntaresWebWorker --add-data resources:resources
git log -1 HEAD --format=%H > ./resources/commit_id
pyinstaller -F antarest/worker/archive_worker_service.py -n AntaresWebWorker --add-data resources:resources --hidden-import 'pythonjsonlogger.jsonlogger'
- name: Packaging
run: bash ./package_antares_web.sh
working-directory: scripts
- name: install changelog requirements
run : npm install -g auto-changelog
run: npm install -g auto-changelog
- name: generate changelog file
run: |
auto-changelog -l false --hide-empty-releases
Expand Down
16 changes: 13 additions & 3 deletions AntaresWebLinux.spec
Original file line number Diff line number Diff line change
@@ -1,13 +1,23 @@
# -*- mode: python ; coding: utf-8 -*-

from pathlib import Path

block_cipher = None

antares_web_server_a = Analysis(['antarest/gui.py'],
# We need to analyze all alembic files to be sure the migration phase works fine
migrations_dir = Path('alembic/versions')
migration_files = [str(f) for f in migrations_dir.iterdir() if f.is_file() and f.suffix == '.py']

antares_web_server_a = Analysis(['antarest/gui.py', 'alembic/env.py'] + migration_files,
pathex=[],
binaries=[('./alembic.ini', './alembic.ini')],
datas=[('./resources', './resources'), ('./alembic', './alembic')],
hiddenimports=['cmath', 'antarest.dbmodel', 'plyer.platforms.linux', 'plyer.platforms.linux.notification'],
hiddenimports=[
'cmath',
'antarest.dbmodel',
'plyer.platforms.linux',
'plyer.platforms.linux.notification',
'pythonjsonlogger.jsonlogger',
],
hookspath=['extra-hooks'],
hooksconfig={},
runtime_hooks=[],
Expand Down
16 changes: 13 additions & 3 deletions AntaresWebWin.spec
Original file line number Diff line number Diff line change
@@ -1,13 +1,23 @@
# -*- mode: python ; coding: utf-8 -*-

from pathlib import Path

block_cipher = None

antares_web_server_a = Analysis(['antarest/gui.py'],
# We need to analyze all alembic files to be sure the migration phase works fine
migrations_dir = Path('alembic/versions')
migration_files = [str(f) for f in migrations_dir.iterdir() if f.is_file() and f.suffix == '.py']

antares_web_server_a = Analysis(['antarest/gui.py', 'alembic/env.py'] + migration_files,
pathex=[],
binaries=[('./alembic.ini', './alembic.ini')],
datas=[('./resources', './resources'), ('./alembic', './alembic')],
hiddenimports=['cmath', 'antarest.dbmodel', 'plyer.platforms.win', 'plyer.platforms.win.notification'],
hiddenimports=[
'cmath',
'antarest.dbmodel',
'plyer.platforms.win',
'plyer.platforms.win.notification',
'pythonjsonlogger.jsonlogger',
],
hookspath=['extra-hooks'],
hooksconfig={},
runtime_hooks=[],
Expand Down
4 changes: 2 additions & 2 deletions antarest/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@

# Standard project metadata

__version__ = "2.14.1"
__version__ = "2.14.2"
__author__ = "RTE, Antares Web Team"
__date__ = "2023-05-15"
__date__ = "2023-06-12"
# noinspection SpellCheckingInspection
__credits__ = "(c) Réseau de Transport de l’Électricité (RTE)"

Expand Down
6 changes: 3 additions & 3 deletions antarest/core/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,8 +183,8 @@ def __init__(self, *area_ids: str) -> None:
ids = ", ".join(f"'{a}'" for a in area_ids)
msg = {
0: "Allocation data is found",
1: f"Allocation data for area {area_ids} is not found",
2: f"Allocation data for areas {area_ids} is not found",
1: f"Allocation data for area {ids} is not found",
2: f"Allocation data for areas {ids} is not found",
}[min(count, 2)]
super().__init__(HTTPStatus.NOT_FOUND, msg)

Expand Down Expand Up @@ -227,7 +227,7 @@ def __init__(self, *district_ids: str):

class BadEditInstructionException(HTTPException):
def __init__(self, message: str) -> None:
super().__init__(HTTPStatus.BAD_REQUEST, message)
super().__init__(HTTPStatus.UNPROCESSABLE_ENTITY, message)


class CannotScanInternalWorkspace(HTTPException):
Expand Down
29 changes: 25 additions & 4 deletions antarest/core/interfaces/eventbus.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,19 +52,40 @@ class Event(BaseModel):
channel: str = ""


EventListener = Callable[[Event], Awaitable[None]]


class IEventBus(ABC):
"""
Interface for the event bus.
The event bus provides 2 communication mechanisms:
- a broadcasting mechanism, where events are pushed to all
registered listeners
- a message queue mechanism: a message can be pushed to
a specified queue. Only consumers registered for that
queue will be called to handle those messages.
"""

@abstractmethod
def push(self, event: Event) -> None:
"""
Pushes an event to registered listeners.
"""
pass

@abstractmethod
def queue(self, event: Event, queue: str) -> None:
"""
Queues an event at the end of the specified queue.
"""
pass

@abstractmethod
def add_queue_consumer(
self, listener: Callable[[Event], Awaitable[None]], queue: str
) -> str:
def add_queue_consumer(self, listener: EventListener, queue: str) -> str:
"""
Adds a consumer for events on the specified queue.
"""
pass

@abstractmethod
Expand All @@ -74,7 +95,7 @@ def remove_queue_consumer(self, listener_id: str) -> None:
@abstractmethod
def add_listener(
self,
listener: Callable[[Event], Awaitable[None]],
listener: EventListener,
type_filter: Optional[List[EventType]] = None,
) -> str:
"""
Expand Down
26 changes: 19 additions & 7 deletions antarest/core/interfaces/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,28 @@


class IService(ABC):
def __init__(self) -> None:
self.thread = threading.Thread(
target=self._loop,
name=self.__class__.__name__,
daemon=True,
)
"""
A base class for long running processing services.
Processing may be started either in a background thread or in current thread.
Implementations must implement the `_loop` method.
"""

def start(self, threaded: bool = True) -> None:
"""
Starts the processing loop.
Args:
threaded: if True, the loop is started in a daemon thread,
else in this thread
"""
if threaded:
self.thread.start()
thread = threading.Thread(
target=self._loop,
name=self.__class__.__name__,
daemon=True,
)
thread.start()
else:
self._loop()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,7 @@ def stop_reading_output() -> bool:
stderr=subprocess.STDOUT,
universal_newlines=True,
encoding="utf-8",
shell=True,
)
self.job_id_to_study_id[str(uuid)] = (
study_uuid,
Expand Down
121 changes: 53 additions & 68 deletions antarest/matrixstore/business/matrix_editor.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,7 @@
import operator
from enum import Enum
from typing import Optional, Any, List, cast, Tuple
from typing import Any, List, Optional, Tuple, Dict

import numpy as np
import pandas as pd # type:ignore
from pydantic import BaseModel, validator
from pydantic import BaseModel, validator, Field, root_validator


class MatrixSlice(BaseModel):
Expand All @@ -17,87 +14,75 @@ class MatrixSlice(BaseModel):

@validator("row_to", always=True)
def set_row_to(cls, v: Optional[int], values: Any) -> int:
if v is None:
return cast(int, values["row_from"])
return v
return values["row_from"] if v is None else v # type: ignore

@validator("column_to", always=True)
def set_column_to(cls, v: Optional[int], values: Any) -> int:
if v is None:
return cast(int, values["column_from"])
return v
return values["column_from"] if v is None else v # type: ignore


class Operator(Enum):
ADD = "+"
SUB = "-"
MUL = "*"
DIV = "/"
ABS = "ABS"
EQ = "="
OPERATIONS = {
"+": operator.add,
"-": operator.sub,
"*": operator.mul,
"/": operator.truediv,
"ABS": lambda x, y: abs(x),
"=": lambda x, y: y,
}


class Operation(BaseModel):
operation: Operator
operation: str = Field(regex=r"[=/*+-]|ABS")
value: float

# noinspection SpellCheckingInspection
def compute(self, x: Any, use_coords: bool = False) -> Any:
def set_series(x): # type:ignore
if use_coords:
return OPERATIONS[self.operation](x, self.value) # type: ignore
if self.operation == "=":
x.loc[~x.isnull()] = self.value
return x
return OPERATIONS[self.operation](x, self.value) # type: ignore

operation_dict = {
Operator.ADD: operator.add,
Operator.SUB: operator.sub,
Operator.MUL: operator.mul,
Operator.DIV: operator.truediv,
Operator.ABS: lambda x, y: abs(x),
Operator.EQ: lambda x, y: set_series(x), # type:ignore
}
if not use_coords:
operation_dict[Operator.EQ] = lambda x, y: set_series(
x
) # type:ignore
else:
operation_dict[Operator.EQ] = lambda x, y: y

return operation_dict[self.operation](x, self.value) # type: ignore
def __str__(self) -> str:
return f"['{self.operation}' {self.value}]"


class MatrixEditInstructionDTO(BaseModel):
slices: Optional[List[MatrixSlice]] = None
coordinates: Optional[List[Tuple[int, int]]] = None
operation: Operation


class MatrixEditor:
@staticmethod
def update_matrix_content_with_slices(
matrix_data: pd.DataFrame,
slices: List[MatrixSlice],
operation: Operation,
) -> pd.DataFrame:
mask = pd.DataFrame(np.zeros(matrix_data.shape), dtype=bool)

for matrix_slice in slices:
mask.loc[
matrix_slice.row_from : matrix_slice.row_to,
matrix_slice.column_from : matrix_slice.column_to,
] = True

new_matrix_data = matrix_data.where(mask).apply(operation.compute)
new_matrix_data[new_matrix_data.isnull()] = matrix_data

return new_matrix_data.astype(matrix_data.dtypes)

@staticmethod
def update_matrix_content_with_coordinates(
df: pd.DataFrame,
coordinates: List[Tuple[int, int]],
operation: Operation,
) -> pd.DataFrame:
for row, column in coordinates:
df.iat[row, column] = operation.compute(
df.iat[row, column], use_coords=True
)
return df.astype(df.dtypes)
@root_validator(pre=True)
def check_slice_coordinates(cls, values: Dict[str, Any]) -> Dict[str, Any]:
"""
Validates the 'slices' and 'coordinates' fields.
Args:
values: The input values for validation.
Returns:
The validated values.
Raises:
ValueError: If both 'slices' and 'coordinates' are `None`.
ValueError: If both 'slices' and 'coordinates' are both defined.
"""
slices = values.get("slices")
coordinates = values.get("coordinates")
# fmt: off
if slices is None and coordinates is None:
raise ValueError("At least 'slices' or 'coordinates' must be defined.")
if slices is not None and coordinates is not None:
raise ValueError("Only 'slices' or 'coordinates' could be defined, but not both.")
# fmt: on
return values

def __str__(self) -> str:
# fmt: off
if self.slices:
return f"slices={self.slices}, operation={self.operation}"
elif self.coordinates:
return f"coordinates={self.coordinates}, operation={self.operation}"
# fmt: on
raise NotImplementedError
2 changes: 1 addition & 1 deletion antarest/matrixstore/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def build_matrix_service(
"""
if service is None:
repo = MatrixRepository()
content = MatrixContentRepository(config)
content = MatrixContentRepository(config.storage.matrixstore)
dataset_repo = MatrixDataSetRepository()

service = MatrixService(
Expand Down
Loading

0 comments on commit ecc9a64

Please sign in to comment.