Skip to content

Commit

Permalink
CI: Prettify gradient check output
Browse files Browse the repository at this point in the history
Make the result more informative and more readable.
  • Loading branch information
dweindl committed Oct 2, 2024
1 parent 8bd63f3 commit 2e14e4a
Showing 1 changed file with 6 additions and 2 deletions.
8 changes: 6 additions & 2 deletions tests/benchmark-models/test_petab_benchmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
from fiddy.extensions.amici import simulate_petab_to_cached_functions
from fiddy.success import Consistency


repo_root = Path(__file__).parent.parent.parent

# reuse compiled models from test_benchmark_collection.sh
Expand Down Expand Up @@ -110,7 +111,11 @@ def assert_gradient_check_success(
if check_result.success is True:
return

raise AssertionError(f"Gradient check failed:\n{check_result.df}")
df = check_result.df
df["abs_diff"] = np.abs(df["expectation"] - df["test"])
df["rel_diff"] = df["abs_diff"] / np.abs(df["expectation"])
with pd.option_context("display.max_columns", None, "display.width", None):
raise AssertionError(f"Gradient check failed:\n{df}")


@pytest.mark.filterwarnings(
Expand Down Expand Up @@ -201,7 +206,6 @@ def test_benchmark_gradient(model, scale, sensitivity_method, request):
# cache=not debug,
cache=False,
)

noise_level = 0.1
np.random.seed(cur_settings.rng_seed)

Expand Down

0 comments on commit 2e14e4a

Please sign in to comment.