Skip to content

Commit

Permalink
Merge pull request #232 from ndem0/v2
Browse files Browse the repository at this point in the history
Refactoring for pre- and post-processing, new design

Co-authored-by: MMRROOO <mrowkamaad@gmail.com>
  • Loading branch information
ndem0 and MMRROOO authored Feb 7, 2024
2 parents 677794b + 03e0344 commit a88a799
Show file tree
Hide file tree
Showing 38 changed files with 1,565 additions and 269 deletions.
19 changes: 6 additions & 13 deletions ezyrb/__init__.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,16 @@
"""EZyRB package"""

__all__ = [
'Database', 'Reduction', 'POD', 'Approximation', 'RBF', 'Linear', 'GPR',
'Database', 'Snapshot', 'Reduction', 'POD', 'Approximation', 'RBF', 'Linear', 'GPR',
'ANN', 'KNeighborsRegressor', 'RadiusNeighborsRegressor', 'AE',
'ReducedOrderModel', 'PODAE', 'RegularGrid'
]

from .meta import *
from .database import Database
from .reduction import Reduction
from .pod import POD
from .ae import AE
from .pod_ae import PODAE
from .approximation import Approximation
from .rbf import RBF
from .linear import Linear
from .regular_grid import RegularGrid
from .gpr import GPR
from .snapshot import Snapshot
from .parameter import Parameter
from .reducedordermodel import ReducedOrderModel
from .ann import ANN
from .kneighbors_regressor import KNeighborsRegressor
from .radius_neighbors_regressor import RadiusNeighborsRegressor
from .reduction import *
from .approximation import *
from .regular_grid import RegularGrid
14 changes: 14 additions & 0 deletions ezyrb/approximation/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
"""EZyRB package"""

__all__ = [
'Approximation', 'RBF', 'Linear', 'GPR',
'ANN', 'KNeighborsRegressor', 'RadiusNeighborsRegressor'
]

from .approximation import Approximation
from .rbf import RBF
from .linear import Linear
from .gpr import GPR
from .ann import ANN
from .kneighbors_regressor import KNeighborsRegressor
from .radius_neighbors_regressor import RadiusNeighborsRegressor
File renamed without changes.
File renamed without changes.
32 changes: 16 additions & 16 deletions ezyrb/gpr.py → ezyrb/approximation/gpr.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,14 @@ class GPR(Approximation):
arranged by row.
:cvar numpy.ndarray Y_sample: the array containing the output values,
arranged by row.
:cvar GPy.models.GPRegression model: the regression model.
:cvar sklearn.gaussian_process.GaussianProcessRegressor model: the
regression model.
:cvar sklearn.gaussian_process.kernels.Kernel kern: kernel object from
sklearn.
:cvar bool normalizer: whether to normilize `values` or not. Defaults to
True.
:cvar int optimization_restart: number of restarts for the optimization.
Defaults to 20.
:Example:
Expand All @@ -30,28 +37,21 @@ class GPR(Approximation):
>>> print(np.allclose(y, y_pred))
"""
def __init__(self):
def __init__(self, kern=None, normalizer=True, optimization_restart=20):

self.X_sample = None
self.Y_sample = None
self.kern = kern
self.normalizer = normalizer # TODO: avoid normalizer inside GPR class
self.optimization_restart = optimization_restart
self.model = None

def fit(self,
points,
values,
kern=None,
normalizer=True,
optimization_restart=20):
def fit(self, points, values):
"""
Construct the regression given `points` and `values`.
:param array_like points: the coordinates of the points.
:param array_like values: the values in the points.
:param sklearn.gaussian_process.kernels.Kernel kern: kernel object from
sklearn.
:param bool normalizer: whether to normilize `values` or not.
Defaults to True.
:param int optimization_restart: number of restarts for the
optimization. Defaults to 20.
"""
self.X_sample = np.array(points)
self.Y_sample = np.array(values)
Expand All @@ -61,8 +61,8 @@ def fit(self,
self.Y_sample = self.Y_sample.reshape(-1, 1)

self.model = GaussianProcessRegressor(
kernel=kern, n_restarts_optimizer=optimization_restart,
normalize_y=normalizer)
kernel=self.kern, n_restarts_optimizer=self.optimization_restart,
normalize_y=self.normalizer)
self.model.fit(self.X_sample, self.Y_sample)

def predict(self, new_points, return_variance=False):
Expand Down
File renamed without changes.
5 changes: 4 additions & 1 deletion ezyrb/linear.py → ezyrb/approximation/linear.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,10 @@ def fit(self, points, values):

if as_np_array.ndim == 1 or (as_np_array.ndim == 2
and as_np_array.shape[1] == 1):
self.interpolator = interp1d(as_np_array, values, axis=0)

self.interpolator = interp1d(as_np_array, values, axis=0,
bounds_error=False,
fill_value=self.fill_value)
else:
self.interpolator = LinearNDInterp(points,
values,
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
186 changes: 84 additions & 102 deletions ezyrb/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@

import numpy as np

from .parameter import Parameter
from .snapshot import Snapshot

class Database():
"""
Database class
Expand All @@ -14,66 +17,35 @@ class Database():
None meaning no scaling.
:param array_like space: the input spatial data
"""
def __init__(self,
parameters=None,
snapshots=None,
scaler_parameters=None,
scaler_snapshots=None,
space=None):
self._parameters = None
self._snapshots = None
self._space = None
self.scaler_parameters = scaler_parameters
self.scaler_snapshots = scaler_snapshots

# if only parameters or snapshots are provided
if (parameters is None) ^ (snapshots is None):
raise RuntimeError(
'Parameters and Snapshots are not both provided')

if space is not None and snapshots is None:
raise RuntimeError(
'Snapshot data is not provided with Spatial data')

if parameters is not None and snapshots is not None:
if space is not None:
self.add(parameters, snapshots, space)
else:
self.add(parameters, snapshots)
def __init__(self, parameters=None, snapshots=None):
self._pairs = []

if parameters is None and snapshots is None:
return

@property
def parameters(self):
"""
The matrix containing the input parameters (by row).
:rtype: numpy.ndarray
"""
if self.scaler_parameters:
return self.scaler_parameters.fit_transform(self._parameters)
if len(parameters) != len(snapshots):
raise ValueError

return self._parameters
for param, snap in zip(parameters, snapshots):
self.add(Parameter(param), Snapshot(snap))

@property
def snapshots(self):
def parameters_matrix(self):
"""
The matrix containing the snapshots (by row).
The matrix containing the input parameters (by row).
:rtype: numpy.ndarray
"""
if self.scaler_snapshots:
return self.scaler_snapshots.fit_transform(self._snapshots)

return self._snapshots
return np.asarray([pair[0].values for pair in self._pairs])

@property
def space(self):
def snapshots_matrix(self):
"""
The matrix containing spatial information (by row).
The matrix containing the snapshots (by row).
:rtype: numpy.ndarray
"""
return self._space
return np.asarray([pair[1].flattened for pair in self._pairs])

def __getitem__(self, val):
"""
Expand All @@ -83,75 +55,85 @@ def __getitem__(self, val):
.. warning:: The new parameters and snapshots are a view of the
original Database.
"""
if isinstance(val, int):
if self._space is None:
return Database(np.reshape(self._parameters[val],
(1,len(self._parameters[val]))),
np.reshape(self._snapshots[val],
(1,len(self._snapshots[val]))),
self.scaler_parameters,
self.scaler_snapshots)

return Database(np.reshape(self._parameters[val],
(1,len(self._parameters[val]))),
np.reshape(self._snapshots[val],
(1,len(self._snapshots[val]))),
self.scaler_parameters,
self.scaler_snapshots,
np.reshape(self._space[val],
(1,len(self._space[val]))))

if self._space is None:
return Database(self._parameters[val],
self._snapshots[val],
self.scaler_parameters,
self.scaler_snapshots)

return Database(self._parameters[val],
self._snapshots[val],
self.scaler_parameters,
self.scaler_snapshots,
self._space[val])
if isinstance(val, np.ndarray):
view = Database()
for p, s in np.asarray(self._pairs)[val]:
view.add(p, s)
elif isinstance(val, (int, slice)):
view = Database()
view._pairs = self._pairs[val]
return view

def __len__(self):
"""
This method returns the number of snapshots.
:rtype: int
"""
return len(self._snapshots)
return len(self._pairs)

def __str__(self):
""" Print minimal info about the Database """
return str(self.parameters_matrix)

def add(self, parameters, snapshots, space=None):
def add(self, parameter, snapshot):
"""
Add (by row) new sets of snapshots and parameters to the original
database.
:param array_like parameters: the parameters to add.
:param array_like snapshots: the snapshots to add.
:param Parameter parameter: the parameter to add.
:param Snapshot snapshot: the snapshot to add.
"""
if len(parameters) != len(snapshots):
raise RuntimeError(
'Different number of parameters and snapshots.')

if self._space is not None and space is None:
raise RuntimeError('No Spatial Value given')

if (self._space is not None) or (space is not None):
if space.shape != snapshots.shape:
raise RuntimeError(
'shape of space and snapshots are different.')

if self._parameters is None and self._snapshots is None:
self._parameters = parameters
self._snapshots = snapshots
if self._space is None:
self._space = space
elif self._space is None:
self._parameters = np.vstack([self._parameters, parameters])
self._snapshots = np.vstack([self._snapshots, snapshots])
else:
self._parameters = np.vstack([self._parameters, parameters])
self._snapshots = np.vstack([self._snapshots, snapshots])
self._space = np.vstack([self._space, space])
if not isinstance(parameter, Parameter):
raise ValueError

if not isinstance(snapshot, Snapshot):
raise ValueError

self._pairs.append((parameter, snapshot))

return self


def split(self, chunks, seed=None):
"""
>>> db = Database(...)
>>> train, test = db.split([0.8, 0.2]) # ratio
>>> train, test = db.split([80, 20]) # n snapshots
"""
if all(isinstance(n, int) for n in chunks):
if sum(chunks) != len(self):
raise ValueError('chunk elements are inconsistent')

ids = [
j for j, chunk in enumerate(chunks)
for i in range(chunk)
]
np.random.shuffle(ids)


elif all(isinstance(n, float) for n in chunks):
if not np.isclose(sum(chunks), 1.):
raise ValueError('chunk elements are inconsistent')

cum_chunks = np.cumsum(chunks)
cum_chunks = np.insert(cum_chunks, 0, 0.0)
ids = np.ones(len(self)) * -1.
tmp = np.random.uniform(0, 1, size=len(self))
for i in range(len(cum_chunks)-1):
is_between = np.logical_and(
tmp >= cum_chunks[i], tmp < cum_chunks[i+1])
ids[is_between] = i

else:
ValueError

new_database = [Database() for _ in range(len(chunks))]
for i, chunk in enumerate(chunks):
chunk_ids = np.array(ids) == i
for p, s in np.asarray(self._pairs)[chunk_ids]:
new_database[i].add(p, s)

return new_database
18 changes: 18 additions & 0 deletions ezyrb/parameter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
""" Module for parameter object """
import numpy as np

class Parameter:

def __init__(self, values):
self.values = values

@property
def values(self):
""" Get the snapshot values. """
return self._values

@values.setter
def values(self, new_values):
if np.asarray(new_values).ndim != 1:
raise ValueError('only 1D array are usable as parameter.')
self._values = new_values
13 changes: 13 additions & 0 deletions ezyrb/plugin/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
""" Plugins submodule """

__all__ = [
'Plugin',
'DatabaseScaler',
'ShiftSnapshots',
'AutomaticShiftSnapshots',
]

from .scaler import DatabaseScaler
from .plugin import Plugin
from .shift import ShiftSnapshots
from .automatic_shift import AutomaticShiftSnapshots
Loading

0 comments on commit a88a799

Please sign in to comment.