Skip to content

Commit

Permalink
WIP update to functional programming for optimizers
Browse files Browse the repository at this point in the history
  • Loading branch information
mpvanderschelling committed Nov 11, 2024
1 parent 163f5c6 commit 8375257
Show file tree
Hide file tree
Showing 9 changed files with 118 additions and 118 deletions.
12 changes: 7 additions & 5 deletions src/f3dasm/_src/experimentdata/experimentdata.py
Original file line number Diff line number Diff line change
Expand Up @@ -1367,7 +1367,7 @@ def _run_cluster_parallel(
# Optimization
# =========================================================================

def optimize(self, optimizer: Optimizer | str,
def optimize(self, optimizer: Optimizer | str | Callable,
data_generator: DataGenerator | str,
iterations: int,
kwargs: Optional[Dict[str, Any]] = None,
Expand All @@ -1382,7 +1382,7 @@ def optimize(self, optimizer: Optimizer | str,
Parameters
----------
optimizer : Optimizer | str
optimizer : Optimizer | str | Callable
Optimizer object
data_generator : DataGenerator | str
DataGenerator object
Expand Down Expand Up @@ -1441,7 +1441,9 @@ def optimize(self, optimizer: Optimizer | str,
# Create the optimizer object if a string reference is passed
if isinstance(_optimizer, str):
_optimizer: Optimizer = _optimizer_factory(
_optimizer, self.domain, hyperparameters)
optimizer=_optimizer, domain=self.domain,
data_generator=data_generator,
hyperparameters=hyperparameters)

# Create the sampler object if a string reference is passed
if isinstance(sampler, str):
Expand Down Expand Up @@ -1560,12 +1562,12 @@ def _iterate(self, optimizer: Optimizer, data_generator: DataGenerator,

optimizer._check_number_of_datapoints()

optimizer._construct_model(data_generator)
optimizer.init()

for _ in range(number_of_updates(
iterations,
population=optimizer._population)):
new_samples = optimizer.update_step(data_generator)
new_samples = optimizer.update_step()

# If new_samples is a tuple of input_data and output_data
if isinstance(new_samples, tuple):
Expand Down
22 changes: 11 additions & 11 deletions src/f3dasm/_src/optimization/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,12 @@
# =============================================================================

# Standard
from typing import List
from typing import Callable, List

# Local
from .numpy_implementations import RandomSearch
from .numpy_implementations import random_search
from .optimizer import Optimizer
from .scipy_implementations import CG, LBFGSB, NelderMead
from .scipy_implementations import cg, lbfgsb, nelder_mead

# Authorship & Credits
# =============================================================================
Expand All @@ -22,21 +22,21 @@
# =============================================================================

# List of available optimizers
_OPTIMIZERS: List[Optimizer] = [RandomSearch, CG, LBFGSB, NelderMead]
_OPTIMIZERS: List[Callable] = [
cg, lbfgsb, nelder_mead, random_search]


__all__ = [
'CG',
'LBFGSB',
'NelderMead',
'Optimizer',
'RandomSearch',
'_OPTIMIZERS',
'find_optimizer',
'random_search',
'cg',
'lbfgsb',
'nelder_mead',
'Optimizer',
]


def find_optimizer(query: str) -> Optimizer:
def find_optimizer(query: str) -> Callable:
"""Find a optimizer from the f3dasm.optimizer submodule
Parameters
Expand Down
13 changes: 8 additions & 5 deletions src/f3dasm/_src/optimization/adapters/scipy_implementations.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,14 @@


class _SciPyOptimizer(Optimizer):
require_gradients: bool = False
type: str = 'scipy'

def __init__(self, domain: Domain, method: str, **hyperparameters):
def __init__(self, domain: Domain, data_generator: DataGenerator,
algorithm: str, **hyperparameters):
self.domain = domain
self.method = method
self.data_generator = data_generator
self.algorithm = algorithm
self.options = {**hyperparameters}

def _callback(self, xk: np.ndarray, *args, **kwargs) -> None:
Expand Down Expand Up @@ -63,14 +66,14 @@ def fun(x):
_, y = sample.to_numpy()
return float(y)

self.options['maxiter'] = iterations

if not hasattr(data_generator, 'dfdx'):
data_generator.dfdx = None

self.options['maxiter'] = iterations

minimize(
fun=fun,
method=self.method,
method=self.algorithm,
jac=data_generator.dfdx,
x0=self.data.get_n_best_output(1).to_numpy()[0].ravel(),
callback=self._callback,
Expand Down
21 changes: 10 additions & 11 deletions src/f3dasm/_src/optimization/numpy_implementations.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,14 @@
# =============================================================================

# Standard
from typing import List, Optional, Tuple
from typing import Optional, Tuple

# Third-party core
import numpy as np

# Locals
from ..datageneration.datagenerator import DataGenerator
from ..design.domain import Domain
from .optimizer import Optimizer
from .optimizer import Optimizer, OptimizerTuple

# Authorship & Credits
# =============================================================================
Expand All @@ -33,14 +32,9 @@ class RandomSearch(Optimizer):
def __init__(self, domain: Domain, seed: Optional[int] = None, **kwargs):
self.domain = domain
self.seed = seed
self._set_algorithm()

def _set_algorithm(self):
self.algorithm = np.random.default_rng(self.seed)

def update_step(
self, data_generator: DataGenerator
) -> Tuple[np.ndarray, np.ndarray]:
def update_step(self) -> Tuple[np.ndarray, np.ndarray]:
x_new = np.atleast_2d(
[
self.algorithm.uniform(
Expand All @@ -53,5 +47,10 @@ def update_step(
# return the data
return x_new, None

def _get_info(self) -> List[str]:
return ['Fast', 'Single-Solution']

def random_search(seed: Optional[int] = None, **kwargs) -> OptimizerTuple:
return OptimizerTuple(
base_class=RandomSearch,
algorithm=None,
hyperparameters={'seed': seed}
)
44 changes: 19 additions & 25 deletions src/f3dasm/_src/optimization/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,16 @@
from __future__ import annotations

# Standard
from typing import ClassVar, Iterable, List, Protocol, Tuple
from typing import (Callable, ClassVar, Iterable, List, NamedTuple, Protocol,
Tuple, Type)

# Third-party core
import numpy as np
import pandas as pd

# Locals
from ..datageneration.datagenerator import DataGenerator
from ..design.domain import Domain

# Authorship & Credits
# =============================================================================
Expand Down Expand Up @@ -120,15 +122,10 @@ def _population(self) -> int:
# Public Methods
# =============================================================================

def update_step(self, data_generator: DataGenerator) -> ExperimentData:
def update_step(self) -> ExperimentData:
"""Update step of the optimizer. Needs to be implemented
by the child class
Parameters
----------
data_generator : DataGenerator
data generator object to calculate the objective value
Returns
-------
ExperimentData
Expand Down Expand Up @@ -158,23 +155,6 @@ def _set_algorithm(self):
reset the algorithm to its initial state."""
...

def _construct_model(self, data_generator: DataGenerator):
"""
Method that is called before the optimization starts. This method can
be used to construct a model based on the available data or a specific
data generator.
Parameters
----------
data_generator : DataGenerator
DataGenerator object
Note
----
When this method is not implemented, the method will do nothing.
"""
...

def _check_number_of_datapoints(self):
"""
Check if the number of datapoints is sufficient for the
Expand Down Expand Up @@ -212,7 +192,7 @@ def _reset(self, data: ExperimentData):
- The algorithm is set to its initial state (self._set_algorithm)
"""
self._set_data(data)
self._set_algorithm()
self.init()

def _set_data(self, data: ExperimentData):
"""Set the data attribute to the given data
Expand All @@ -233,3 +213,17 @@ def _get_info(self) -> List[str]:
List of characteristics of the optimizer
"""
return []

def init(self):
...


class OptimizerTuple(NamedTuple):
base_class: Type[Optimizer]
algorithm: Callable
hyperparameters: dict

def init(self, domain: Domain, data_generator: DataGenerator) -> Optimizer:
return self.base_class(
domain=domain, data_generator=data_generator,
algorithm=self.algorithm, **self.hyperparameters)
24 changes: 18 additions & 6 deletions src/f3dasm/_src/optimization/optimizer_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from typing import Any, Dict, Optional

# Local
from ..datageneration import DataGenerator
from ..design.domain import Domain
from . import _OPTIMIZERS
from .optimizer import Optimizer
Expand Down Expand Up @@ -38,7 +39,7 @@


def _optimizer_factory(
optimizer: str, domain: Domain,
optimizer: str, domain: Domain, data_generator: DataGenerator,
hyperparameters: Optional[Dict[str, Any]] = None) -> Optimizer:
"""Factory function for optimizers
Expand All @@ -49,6 +50,8 @@ def _optimizer_factory(
Name of the optimizer to use
domain : Domain
Domain of the design space
data_generator : DataGenerator
Data generator instance
hyperparameters : dict, optional
Hyperparameters for the optimizer
Expand All @@ -68,12 +71,21 @@ def _optimizer_factory(
if hyperparameters is None:
hyperparameters = {}

filtered_name = optimizer.lower().replace(
' ', '').replace('-', '').replace('_', '')
if isinstance(optimizer, Optimizer):
return optimizer

if filtered_name in OPTIMIZER_MAPPING:
return OPTIMIZER_MAPPING[filtered_name](
domain=domain, **hyperparameters)
elif isinstance(optimizer, str):

filtered_name = optimizer.lower().replace(
' ', '').replace('-', '').replace('_', '')

if filtered_name in OPTIMIZER_MAPPING:
return OPTIMIZER_MAPPING[filtered_name](**hyperparameters).init(
domain=domain, data_generator=data_generator)

# check if optimizer is a function
elif callable(optimizer):
return optimizer.init(domain=domain, data_generator=data_generator)

else:
raise KeyError(f"Unknown optimizer: {optimizer}")
Loading

0 comments on commit 8375257

Please sign in to comment.