pip install sedna
Simple interface to Orion hyperparameter search, no storage, no setup required, sedna gives you back the full control over the optimization process.
- Random Search
- Grid Search
- Hyperband
- ASHA
- BOHB
- DEHB
- Population Based Training (PBT)
- Population Based Bandits (PB2)
- TPE
- Ax
- Evolution-ES
- MOFA
- Nevergrad
- HEBO
@hyperparameter(a=uniform(0, 1), b=uniform(1, 2))
def objective(a, b):
return a + b
def objective(a: uniform(0, 1), b: uniform(1, 2)) -> float:
return a + b
@dataclass
class MySpace:
a: uniform(0, 1) = 0
b: uniform(1, 2) = 1
from sedna.core.space import fidelity, get_space, hyperparameter, uniform
from sedna.core.hunt import Optimize
@hyperparameter(epoch=fidelity(2, 10, base=2), a=uniform(0, 1), b=uniform(1, 2))
def fun(epoch, a, b):
return (a + b) / epoch
def main():
space = get_space(fun)
opt = Optimize("hyperband", space, max_trials=10)
while not opt.is_done():
samples = opt.suggest(2)
for sample in samples:
result = fun(**sample.params)
opt.observe(sample, result)
from sedna.core.space import fidelity, get_space, hyperparameter, uniform
from sedna.core.hunt import Optimize
@hyperparameter(epoch=fidelity(2, 10, base=2), a=uniform(0, 1), b=uniform(1, 2))
def fun(epoch, a, b):
return (a + b) / epoch
def main(njob):
import submitit
executor = submitit.AutoExecutor(folder="log_test")
executor.update_parameters(timeout_min=1, slurm_partition="dev")
opt = Optimize("hyperband", space, max_trials=10)
while not opt.is_done():
samples = opt.suggest(njob)
futures = []
for sample in samples:
job = executor.submit(fun, **sample.params)
futures.append((sample, job)
for sample, future in futures:
result = job.result()
opt.observe(sample, result)