Skip to content

Commit

Permalink
Refactor, reduce tolerance
Browse files Browse the repository at this point in the history
  • Loading branch information
dweindl committed Sep 30, 2024
1 parent 221191d commit 36c2038
Show file tree
Hide file tree
Showing 2 changed files with 47 additions and 31 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -374,7 +374,7 @@ int SuiteSparse_divcomplex
// be done via the SUITESPARSE_TIME macro, defined below:
#define SUITESPARSE_TIMER_ENABLED
#define SUITESPARSE_HAVE_CLOCK_GETTIME
#define SUITESPARSE_CONFIG_TIMER clock_gettime
#define SUITESPARSE_CONFIG_TIMER omp_get_wtime
#if defined ( SUITESPARSE_TIMER_ENABLED )
#if defined ( _OPENMP )
// Avoid indirection through the library if the compilation unit
Expand Down
76 changes: 46 additions & 30 deletions tests/benchmark-models/test_petab_benchmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,8 @@
import pytest
from amici.petab.petab_import import import_petab_problem
import benchmark_models_petab


# Absolute and relative tolerances for finite difference gradient checks.
ATOL: float = 1e-3
RTOL: float = 1e-2
from collections import defaultdict
from dataclasses import dataclass

repo_root = Path(__file__).parent.parent.parent

Expand Down Expand Up @@ -42,9 +39,45 @@
debug_path.mkdir(exist_ok=True, parents=True)


@dataclass
class GradientCheckSettings:
# Absolute and relative tolerances for simulation
atol_sim: float = 1e-12
rtol_sim: float = 1e-12
# Absolute and relative tolerances for finite difference gradient checks.
atol_check: float = 1e-4
rtol_check: float = 1e-2
# Step sizes for finite difference gradient checks.
step_sizes = [
1e-1,
5e-2,
1e-2,
1e-3,
1e-4,
1e-5,
]


settings = defaultdict(GradientCheckSettings)
settings["Smith_BMCSystBiol2013"] = GradientCheckSettings(
atol_sim=1e-10,
rtol_sim=1e-10,
)
settings["Oliveira_NatCommun2021"] = GradientCheckSettings(
atol_sim=1e-10,
rtol_sim=1e-10,
)
settings["Okuonghae_ChaosSolitonsFractals2020"] = GradientCheckSettings(
atol_sim=1e-14,
rtol_sim=1e-14,
)
settings["Okuonghae_ChaosSolitonsFractals2020"].step_sizes.insert(0, 0.2)


# until fiddy is updated
@pytest.mark.filterwarnings(
"ignore:Importing amici.petab_objective is deprecated.:DeprecationWarning"
"ignore:Importing amici.petab_objective is deprecated.:DeprecationWarning",
r"ignore:.*petab\.v1.*instead.*:DeprecationWarning",
)
@pytest.mark.filterwarnings("ignore:divide by zero encountered in log10")
@pytest.mark.parametrize("scale", (True, False))
Expand Down Expand Up @@ -78,24 +111,16 @@ def test_benchmark_gradient(model, scale):
petab_problem.x_free_ids
]
parameter_ids = list(parameter_df_free.index)
cur_settings = settings[model]

# Setup AMICI objects.
amici_model = import_petab_problem(
petab_problem,
model_output_dir=benchmark_outdir / model,
)
amici_solver = amici_model.getSolver()
amici_solver.setAbsoluteTolerance(1e-12)
amici_solver.setRelativeTolerance(1e-12)
if model in (
"Smith_BMCSystBiol2013",
"Oliveira_NatCommun2021",
):
amici_solver.setAbsoluteTolerance(1e-10)
amici_solver.setRelativeTolerance(1e-10)
elif model in ("Okuonghae_ChaosSolitonsFractals2020",):
amici_solver.setAbsoluteTolerance(1e-14)
amici_solver.setRelativeTolerance(1e-14)
amici_solver.setAbsoluteTolerance(cur_settings.atol_sim)
amici_solver.setRelativeTolerance(cur_settings.rtol_sim)
amici_solver.setMaxSteps(int(1e5))

if model in ("Brannmark_JBC2010",):
Expand Down Expand Up @@ -132,21 +157,10 @@ def test_benchmark_gradient(model, scale):

expected_derivative = amici_derivative(point)

sizes = [
1e-1,
5e-2,
1e-2,
1e-3,
1e-4,
1e-5,
]
if model in ("Okuonghae_ChaosSolitonsFractals2020",):
sizes.insert(0, 0.2)

derivative = get_derivative(
function=amici_function,
point=point,
sizes=sizes,
sizes=cur_settings.step_sizes,
direction_ids=parameter_ids,
method_ids=[MethodId.CENTRAL, MethodId.FORWARD, MethodId.BACKWARD],
success_checker=Consistency(atol=1e-5, rtol=1e-1),
Expand All @@ -160,7 +174,9 @@ def test_benchmark_gradient(model, scale):
expectation=expected_derivative,
point=point,
)
success = check(rtol=RTOL, atol=ATOL)
success = check(
rtol=cur_settings.rtol_check, atol=cur_settings.atol_check
)

if debug:
df = pd.DataFrame(
Expand Down

0 comments on commit 36c2038

Please sign in to comment.