diff --git a/python/sdist/amici/petab_objective.py b/python/sdist/amici/petab_objective.py index 0e54fb022d..7a18d1943b 100644 --- a/python/sdist/amici/petab_objective.py +++ b/python/sdist/amici/petab_objective.py @@ -7,6 +7,7 @@ DeprecationWarning, ) +from .petab.parameter_mapping import create_parameter_mapping # noqa: F401 from .petab.simulations import create_edatas # noqa: F401 from .petab.simulations import ( # noqa: F401 aggregate_sllh, diff --git a/tests/benchmark-models/test_petab_benchmark.py b/tests/benchmark-models/test_petab_benchmark.py index af9dfc9a88..d0a783e2c4 100755 --- a/tests/benchmark-models/test_petab_benchmark.py +++ b/tests/benchmark-models/test_petab_benchmark.py @@ -8,10 +8,6 @@ import petab import pytest from amici.petab.petab_import import import_petab_problem -from fiddy import MethodId, get_derivative -from fiddy.derivative_check import NumpyIsCloseDerivativeCheck -from fiddy.extensions.amici import simulate_petab_to_cached_functions -from fiddy.success import Consistency # Absolute and relative tolerances for finite difference gradient checks. ATOL: float = 1e-3 @@ -53,12 +49,17 @@ # until fiddy is updated @pytest.mark.filterwarnings( - "ignore:Importing amici.petab_objective is deprecated.*:DeprecationWarning" + "ignore:Importing amici.petab_objective is deprecated.:DeprecationWarning" ) @pytest.mark.filterwarnings("ignore:divide by zero encountered in log10") @pytest.mark.parametrize("scale", (True, False)) @pytest.mark.parametrize("model", models) def test_benchmark_gradient(model, scale): + from fiddy import MethodId, get_derivative + from fiddy.derivative_check import NumpyIsCloseDerivativeCheck + from fiddy.extensions.amici import simulate_petab_to_cached_functions + from fiddy.success import Consistency + if not scale and model in ( "Smith_BMCSystBiol2013", "Brannmark_JBC2010",