diff --git a/CHANGELOG.rst b/CHANGELOG.rst index c69ec6607..e65d84e6d 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -6,6 +6,25 @@ Release notes .......... +0.3.3 (2023-10-19) +------------------- + +* Visualize: + * Get optimization result by id (#1116) +* Storage: + * allow "{id}" in history storage filename (#1118) +* Objective: + * adjusted PEtab.jl syntax to new release (#1128, #1131) + * Documentation on PEtab importer updated (#1126) +* Ensembles + * Additional option for cutoff calculation (#1124) + * Ensembles from optimization endpoints now only takes free parameters (#1130) +* General + * Added How to Cite (#1125) + * Additional summary option (#1134) + * Speed up base tests (#1127) + + 0.3.2 (2023-10-02) ------------------- diff --git a/README.md b/README.md index 58ef9ca52..e6f00ba30 100644 --- a/README.md +++ b/README.md @@ -59,8 +59,29 @@ We are happy about any contributions. For more information on how to contribute to pyPESTO check out +## Publications + +**Citeable DOI for the latest pyPESTO release:** +[![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.2553546.svg)](https://doi.org/10.5281/zenodo.2553546) + +There is a list of [publications using pyPESTO](https://pypesto.readthedocs.io/en/latest/references.html). +If you used pyPESTO in your work, we are happy to include +your project, please let us know via a GitHub issue. + +When using pyPESTO in your project, please cite +* Schälte, Y., Fröhlich, F., Jost, P. J., Vanhoefer, J., Pathirana, D., Stapor, P., + Lakrisenko, P., Wang, D., Raimúndez, E., Merkt, S., Schmiester, L., Städter, P., + Grein, S., Dudkin, E., Doresic, D., Weindl, D., & Hasenauer, J. (2023). pyPESTO: A + modular and scalable tool for parameter estimation for dynamic models [(arXiv:2305.01821)](https://doi.org/10.48550/arXiv.2305.01821). + +When presenting work that employs pyPESTO, feel free to use one of the icons in +[doc/logo/](https://github.com/ICB-DCM/pyPESTO/tree/main/doc/logo): + +

+ AMICI Logo +

+ ## References -[**PESTO**](https://github.com/ICB-DCM/PESTO/): -Parameter estimation toolbox for MATLAB. Development is discontinued, but PESTO -comes with additional features waiting to be ported to pyPESTO. +pyPESTO supersedes [**PESTO**](https://github.com/ICB-DCM/PESTO/) a parameter estimation +toolbox for MATLAB, whose development is discontinued. diff --git a/doc/example/conversion_reaction/PEtabJl_module.jl b/doc/example/conversion_reaction/PEtabJl_module.jl index 155ba776b..f1bac1935 100644 --- a/doc/example/conversion_reaction/PEtabJl_module.jl +++ b/doc/example/conversion_reaction/PEtabJl_module.jl @@ -5,15 +5,15 @@ using Sundials using PEtab pathYaml = "/Users/pauljonasjost/Documents/GitHub_Folders/pyPESTO/test/julia/../../doc/example/conversion_reaction/conversion_reaction.yaml" -petabModel = readPEtabModel(pathYaml, verbose=true) +petabModel = PEtabModel(pathYaml, verbose=true) -# A full list of options for createPEtabODEProblem can be found at https://sebapersson.github.io/PEtab.jl/dev/API_choosen/#PEtab.setupPEtabODEProblem -petabProblem = createPEtabODEProblem( +# A full list of options for PEtabODEProblem can be found at https://sebapersson.github.io/PEtab.jl/stable/API_choosen/ +petabProblem = PEtabODEProblem( petabModel, - odeSolverOptions=ODESolverOptions(Rodas5P(), abstol=1e-08, reltol=1e-08, maxiters=Int64(1e4)), - gradientMethod=:ForwardDiff, - hessianMethod=:ForwardDiff, - sparseJacobian=nothing, + ode_solver=ODESolver(Rodas5P(), abstol=1e-08, reltol=1e-08, maxiters=Int64(1e4)), + gradient_method=:ForwardDiff, + hessian_method=:ForwardDiff, + sparse_jacobian=nothing, verbose=true ) diff --git a/doc/how_to_cite.rst b/doc/how_to_cite.rst new file mode 100644 index 000000000..53054abd8 --- /dev/null +++ b/doc/how_to_cite.rst @@ -0,0 +1,28 @@ +How to cite pyPESTO +=================== + +**Citeable DOI for the latest pyPESTO release:** + +.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.2553546.svg + :target: https://doi.org/10.5281/zenodo.2553546 + :alt: pyPESTO release DOI + + +There is a list of `publications using pyPESTO `_. +If you used pyPESTO in your work, we are happy to include +your project, please let us know via a GitHub issue. + +When using pyPESTO in your project, please cite + +- Schälte, Y., Fröhlich, F., Jost, P. J., Vanhoefer, J., Pathirana, D., Stapor, P., + Lakrisenko, P., Wang, D., Raimúndez, E., Merkt, S., Schmiester, L., Städter, P., + Grein, S., Dudkin, E., Doresic, D., Weindl, D., & Hasenauer, J. (2023). pyPESTO: A + modular and scalable tool for parameter estimation for dynamic models `arXiv:2305.01821 `_. + +When presenting work that employs pyPESTO, feel free to use one of the icons in +`doc/logo/ `_: + +.. image:: https://raw.githubusercontent.com/ICB-DCM/pyPESTO/master/doc/logo/logo.png + :target: https://raw.githubusercontent.com/ICB-DCM/pyPESTO/master/doc/logo/logo.png + :height: 75 + :alt: pyPESTO LOGO diff --git a/doc/index.rst b/doc/index.rst index 062f4c034..5035172d0 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -45,6 +45,7 @@ pyPESTO - Parameter EStimation TOolbox for python references contact license + how_to_cite logo diff --git a/doc/references.rst b/doc/references.rst index 79742222b..c8cb861c0 100644 --- a/doc/references.rst +++ b/doc/references.rst @@ -1,7 +1,7 @@ -Publications using pypesto +Publications using pyPESTO ========================== -pypesto was used in the following publications: +pyPESTO was used in the following publications: .. bibliography:: using_pypesto.bib :list: enumerated diff --git a/pypesto/C.py b/pypesto/C.py index 61f4eb449..c5b65452a 100644 --- a/pypesto/C.py +++ b/pypesto/C.py @@ -35,6 +35,9 @@ ENSEMBLE_TYPE = 'ensemble_type' PREDICTIONS = 'predictions' +SIMULTANEOUS = 'simultaneous' +POINTWISE = 'pointwise' + LOWER_BOUND = 'lower_bound' UPPER_BOUND = 'upper_bound' PREEQUILIBRATION_CONDITION_ID = 'preequilibrationConditionId' diff --git a/pypesto/ensemble/ensemble.py b/pypesto/ensemble/ensemble.py index 178c1c2b1..d3bd07687 100644 --- a/pypesto/ensemble/ensemble.py +++ b/pypesto/ensemble/ensemble.py @@ -29,12 +29,14 @@ OUTPUT, OUTPUT_SENSI, PERCENTILE, + POINTWISE, PREDICTION_ARRAYS, PREDICTION_ID, PREDICTION_RESULTS, PREDICTION_SUMMARY, PREDICTIONS, PREDICTOR, + SIMULTANEOUS, STANDARD_DEVIATION, SUMMARY, TIMEPOINTS, @@ -668,7 +670,7 @@ def from_optimization_endpoints( # did not reach maximum size and the next value is still # lower than the cutoff value if start['fval'] <= abs_cutoff and len(x_vectors) < max_size: - x_vectors.append(start['x']) + x_vectors.append(start['x'][result.problem.x_free_indices]) # the vector tag will be a -1 to indicate it is the last step vector_tags.append((int(start['id']), -1)) @@ -1204,7 +1206,11 @@ def get_percentile_label(percentile: Union[float, int, str]) -> str: return f'{PERCENTILE} {percentile}' -def calculate_cutoff(result: Result, percentile: float = 0.95): +def calculate_cutoff( + result: Result, + percentile: float = 0.95, + cr_option: str = SIMULTANEOUS, +): """ Calculate the cutoff of the ensemble. @@ -1220,6 +1226,10 @@ def calculate_cutoff(result: Result, percentile: float = 0.95): The percentile of the chi^2 distribution. Between 0 and 100. Higher values will result in a more lax cutoff. If the value is greater than 100, the cutoff will be returned as np.inf. + cr_option: + The type of confidence region, which determines the degree of freedom of + the chi^2 distribution for the cutoff value. It can take 'simultaneous' or + 'pointwise'. Returns ------- @@ -1230,10 +1240,19 @@ def calculate_cutoff(result: Result, percentile: float = 0.95): f"percentile={percentile} is too large. Choose " f"0<=percentile<=100." ) + if cr_option not in [SIMULTANEOUS, POINTWISE]: + raise ValueError( + "Confidence region must be either simultaneous or pointwise." + ) + # optimal point as base: fval_opt = result.optimize_result[0].fval - # degrees of freedom is equal to the number of parameters - df = result.problem.dim - range = chi2.ppf(q=percentile / 100, df=df) + if cr_option == SIMULTANEOUS: + # degrees of freedom is equal to the number of parameters + df = result.problem.dim + elif cr_option == POINTWISE: + # degrees of freedom is equal to 1 + df = 1 + range = chi2.ppf(q=percentile / 100, df=df) return fval_opt + range diff --git a/pypesto/objective/julia/petabJl.py b/pypesto/objective/julia/petabJl.py index 344c78f71..bceca4a60 100644 --- a/pypesto/objective/julia/petabJl.py +++ b/pypesto/objective/julia/petabJl.py @@ -61,10 +61,10 @@ def __init__( self.petab_jl_problem = petab_jl_problem # get functions - fun = self.petab_jl_problem.computeCost - grad = self.petab_jl_problem.computeGradient - hess = self.petab_jl_problem.computeHessian - x_names = np.asarray(self.petab_jl_problem.θ_estNames) + fun = self.petab_jl_problem.compute_cost + grad = self.petab_jl_problem.compute_gradient + hess = self.petab_jl_problem.compute_hessian + x_names = np.asarray(self.petab_jl_problem.θ_names) # call the super super super constructor super(JuliaObjective, self).__init__( @@ -102,10 +102,10 @@ def __setstate__(self, state): self.petab_jl_problem = petab_jl_problem # get functions - fun = self.petab_jl_problem.computeCost - grad = self.petab_jl_problem.computeGradient - hess = self.petab_jl_problem.computeHessian - x_names = np.asarray(self.petab_jl_problem.θ_estNames) + fun = self.petab_jl_problem.compute_cost + grad = self.petab_jl_problem.compute_gradient + hess = self.petab_jl_problem.compute_hessian + x_names = np.asarray(self.petab_jl_problem.θ_names) # call the super super constructor super(JuliaObjective, self).__init__(fun, grad, hess, x_names) diff --git a/pypesto/objective/julia/petab_jl_importer.py b/pypesto/objective/julia/petab_jl_importer.py index 3b8f2c1d2..06aa8bdf1 100644 --- a/pypesto/objective/julia/petab_jl_importer.py +++ b/pypesto/objective/julia/petab_jl_importer.py @@ -50,10 +50,10 @@ def __init__( @staticmethod def from_yaml( yaml_file: str, - odeSolverOptions: Optional[dict] = None, - gradientMethod: Optional[str] = None, - hessianMethod: Optional[str] = None, - sparseJacobian: Optional[bool] = None, + ode_solver_options: Optional[dict] = None, + gradient_method: Optional[str] = None, + hessian_method: Optional[str] = None, + sparse_jacobian: Optional[bool] = None, verbose: Optional[bool] = None, directory: Optional[str] = None, ) -> PetabJlImporter: @@ -67,11 +67,11 @@ def from_yaml( ---------- yaml_file: The yaml file of the PEtab problem - odeSolverOptions: + ode_solver_options: Dictionary like options for the ode solver in julia - gradientMethod, hessianMethod: + gradient_method, hessian_method: Julia methods to compute gradient and hessian - sparseJacobian: + sparse_jacobian: Whether to compute sparse Jacobians verbose: Whether to have a more informative log. @@ -81,10 +81,10 @@ def from_yaml( """ # get default values options = _get_default_options( - odeSolverOptions=odeSolverOptions, - gradientMethod=gradientMethod, - hessianMethod=hessianMethod, - sparseJacobian=sparseJacobian, + ode_solver_options=ode_solver_options, + gradient_method=gradient_method, + hessian_method=hessian_method, + sparse_jacobian=sparse_jacobian, verbose=verbose, ) @@ -166,8 +166,8 @@ def create_problem( multistart optimization. """ obj = self.create_objective(precompile=precompile) - lb = np.asarray(self.petab_jl_problem.lowerBounds) - ub = np.asarray(self.petab_jl_problem.upperBounds) + lb = np.asarray(self.petab_jl_problem.lower_bounds) + ub = np.asarray(self.petab_jl_problem.upper_bounds) return Problem( objective=obj, @@ -181,10 +181,10 @@ def create_problem( def _get_default_options( - odeSolverOptions: Union[dict, None] = None, - gradientMethod: Union[str, None] = None, - hessianMethod: Union[str, None] = None, - sparseJacobian: Union[str, None] = None, + ode_solver_options: Union[dict, None] = None, + gradient_method: Union[str, None] = None, + hessian_method: Union[str, None] = None, + sparse_jacobian: Union[str, None] = None, verbose: Union[str, None] = None, ) -> dict: """ @@ -194,13 +194,13 @@ def _get_default_options( Parameters ---------- - odeSolverOptions: + ode_solver_options: Options for the ODE solver. - gradientMethod: + gradient_method: Method for gradient calculation. - hessianMethod: + hessian_method: Method for hessian calculation. - sparseJacobian: + sparse_jacobian: Whether the jacobian should be sparse. verbose: Whether to print verbose output. @@ -211,51 +211,51 @@ def _get_default_options( The options. """ # get default values - if odeSolverOptions is None: - odeSolverOptions = { + if ode_solver_options is None: + ode_solver_options = { "solver": "Rodas5P", "abstol": 1e-8, "reltol": 1e-8, "maxiters": "Int64(1e4)", } - if not odeSolverOptions["solver"].endswith("()"): - odeSolverOptions["solver"] += "()" # add parentheses - if gradientMethod is None: - gradientMethod = "nothing" - if hessianMethod is None: - hessianMethod = "nothing" - if sparseJacobian is None: - sparseJacobian = "nothing" + if not ode_solver_options["solver"].endswith("()"): + ode_solver_options["solver"] += "()" # add parentheses + if gradient_method is None: + gradient_method = "nothing" + if hessian_method is None: + hessian_method = "nothing" + if sparse_jacobian is None: + sparse_jacobian = "nothing" if verbose is None: verbose = "true" - # check values for gradientMethod and hessianMethod + # check values for gradient_method and hessian_method allowed_gradient_methods = [ "ForwardDiff", "ForwardEquations", "Adjoint", "Zygote", ] - if gradientMethod not in allowed_gradient_methods: + if gradient_method not in allowed_gradient_methods: logger.warning( - f"gradientMethod {gradientMethod} is not in " + f"gradient_method {gradient_method} is not in " f"{allowed_gradient_methods}. Defaulting to ForwardDiff." ) - gradientMethod = "ForwardDiff" + gradient_method = "ForwardDiff" allowed_hessian_methods = ["ForwardDiff", "BlocForwardDiff", "GaussNewton"] - if hessianMethod not in allowed_hessian_methods: + if hessian_method not in allowed_hessian_methods: logger.warning( - f"hessianMethod {hessianMethod} is not in " + f"hessian_method {hessian_method} is not in " f"{allowed_hessian_methods}. Defaulting to ForwardDiff." ) - hessianMethod = "ForwardDiff" + hessian_method = "ForwardDiff" # fill options options = { - "odeSolverOptions": odeSolverOptions, - "gradientMethod": gradientMethod, - "hessianMethod": hessianMethod, - "sparseJacobian": sparseJacobian, + "ode_solver_options": ode_solver_options, + "gradient_method": gradient_method, + "hessian_method": hessian_method, + "sparse_jacobian": sparse_jacobian, "verbose": verbose, } return options @@ -293,7 +293,7 @@ def _write_julia_file( "PEtab.jl/dev/API_choosen/#PEtab.setupPEtabODEProblem" ) odeSolvOpt_str = ", ".join( - [f"{k}={v}" for k, v in options["odeSolverOptions"].items()] + [f"{k}={v}" for k, v in options["ode_solver_options"].items()] ) # delete "solver=" from string odeSolvOpt_str = odeSolvOpt_str.replace("solver=", "") @@ -304,15 +304,15 @@ def _write_julia_file( f"using Sundials\n" f"using PEtab\n\n" f"pathYaml = \"{yaml_file}\"\n" - f"petabModel = readPEtabModel(pathYaml, verbose=true)\n\n" - f"# A full list of options for createPEtabODEProblem can be " + f"petabModel = PEtabModel(pathYaml, verbose=true)\n\n" + f"# A full list of options for PEtabODEProblem can be " f"found at {link_to_options}\n" - f"petabProblem = createPEtabODEProblem(\n\t" + f"petabProblem = PEtabODEProblem(\n\t" f"petabModel,\n\t" - f"odeSolverOptions=ODESolverOptions({odeSolvOpt_str}),\n\t" - f"gradientMethod=:{options['gradientMethod']},\n\t" - f"hessianMethod=:{options['hessianMethod']},\n\t" - f"sparseJacobian={options['sparseJacobian']},\n\t" + f"ode_solver=ODESolver({odeSolvOpt_str}),\n\t" + f"gradient_method=:{options['gradient_method']},\n\t" + f"hessian_method=:{options['hessian_method']},\n\t" + f"sparse_jacobian={options['sparse_jacobian']},\n\t" f"verbose={options['verbose']}\n)\n\nend\n" ) # write file diff --git a/pypesto/optimize/util.py b/pypesto/optimize/util.py index a620ae37d..4e2e86383 100644 --- a/pypesto/optimize/util.py +++ b/pypesto/optimize/util.py @@ -61,9 +61,16 @@ def preprocess_hdf5_history( return False # create directory with same name as original file stem - template_path = ( - path.parent / path.stem / (path.stem + "_{id}" + path.suffix) - ) + if "{id}" in path.stem: + template_path = ( + path.parent + / path.stem.replace("{id}", "") + / (path.stem + path.suffix) + ) + else: + template_path = ( + path.parent / path.stem / (path.stem + "_{id}" + path.suffix) + ) template_path.parent.mkdir(parents=True, exist_ok=True) # set history file to template path history_options.storage_file = str(template_path) @@ -92,6 +99,8 @@ def postprocess_hdf5_history( History options used in the optimization. """ # create hdf5 file that gathers the others within history group + if "{id}" in storage_file: + storage_file = storage_file.replace("{id}", "") with h5py.File(storage_file, mode='w') as f: # create file and group f.require_group("history") diff --git a/pypesto/petab/importer.py b/pypesto/petab/importer.py index fd1e812be..5c536128b 100644 --- a/pypesto/petab/importer.py +++ b/pypesto/petab/importer.py @@ -61,11 +61,15 @@ class PetabImporter(AmiciObjectBuilder): """ - Importer for Petab files. + Importer for PEtab files. Create an `amici.Model`, an `objective.AmiciObjective` or a - `pypesto.Problem` from Petab files. - """ + `pypesto.Problem` from PEtab files. The created objective function is a + negative log-likelihood function and can thus be negative. The actual + form of the likelihood depends on the noise model specified in the provided PEtab problem. + For more information, see + [the PEtab documentation](https://petab.readthedocs.io/en/latest/documentation_data_format.html#noise-distributions) + """ # noqa MODEL_BASE_DIR = "amici_models" diff --git a/pypesto/result/optimize.py b/pypesto/result/optimize.py index 6f38a4fba..30d08454d 100644 --- a/pypesto/result/optimize.py +++ b/pypesto/result/optimize.py @@ -126,7 +126,7 @@ def __getattr__(self, key): __setattr__ = dict.__setitem__ __delattr__ = dict.__delitem__ - def summary(self, full: bool = False) -> str: + def summary(self, full: bool = False, show_hess: bool = True) -> str: """ Get summary of the object. @@ -134,6 +134,8 @@ def summary(self, full: bool = False) -> str: ---------- full: If True, print full vectors including fixed parameters. + show_hess: + If True, display the Hessian of the result. Returns ------- @@ -164,7 +166,7 @@ def summary(self, full: bool = False) -> str: f"* final gradient value: " f"{self.grad if full else self.grad[self.free_indices]}\n" ) - if self.hess is not None: + if self.hess is not None and show_hess: hess = self.hess if not full: hess = self.hess[np.ix_(self.free_indices, self.free_indices)] @@ -239,6 +241,7 @@ def summary( disp_best: bool = True, disp_worst: bool = False, full: bool = False, + show_hess: bool = True, ) -> str: """ Get summary of the object. @@ -251,6 +254,8 @@ def summary( Whether to display a detailed summary of the worst run. full: If True, print full vectors including fixed parameters. + show_hess: + If True, display the Hessian of the OptimizerResult. """ if len(self) == 0: return "## Optimization Result \n\n*empty*\n" @@ -295,7 +300,8 @@ def summary( ) if disp_best: summary += ( - f"\nA summary of the best run:\n\n{self[0].summary(full)}" + f"\nA summary of the best run:\n\n" + f"{self[0].summary(full, show_hess=show_hess)}" ) if disp_worst: summary += ( @@ -406,3 +412,11 @@ def get_for_key(self, key) -> list: "releases." ) return [res[key] for res in self.list] + + def get_by_id(self, ores_id: str): + """Get OptimizationResult with the specified id.""" + for res in self.list: + if res.id == ores_id: + return res + else: + raise ValueError(f"no optimization result with id={ores_id}") diff --git a/pypesto/result/result.py b/pypesto/result/result.py index a2ba81dac..e85bbcde4 100644 --- a/pypesto/result/result.py +++ b/pypesto/result/result.py @@ -36,7 +36,7 @@ def __init__( self.profile_result = profile_result or ProfileResult() self.sample_result = sample_result or SampleResult() - def summary(self, full: bool = False) -> str: + def summary(self, full: bool = False, show_hess: bool = True) -> str: """ Get summary of the object. @@ -44,5 +44,7 @@ def summary(self, full: bool = False) -> str: ---------- full: If True, print full vectors including fixed parameters. + show_hess: + If True, display the Hessian of the OptimizeResult. """ - return self.optimize_result.summary(full=full) + return self.optimize_result.summary(full=full, show_hess=show_hess) diff --git a/pypesto/version.py b/pypesto/version.py index f9aa3e110..e19434e2e 100644 --- a/pypesto/version.py +++ b/pypesto/version.py @@ -1 +1 @@ -__version__ = "0.3.2" +__version__ = "0.3.3" diff --git a/test/base/test_engine.py b/test/base/test_engine.py index c3e5979e5..d4d045c20 100644 --- a/test/base/test_engine.py +++ b/test/base/test_engine.py @@ -36,12 +36,12 @@ def _test_basic(engine): optimizer = pypesto.optimize.ScipyOptimizer(options={'maxiter': 10}) result = pypesto.optimize.minimize( problem=problem, - n_starts=5, + n_starts=2, engine=engine, optimizer=optimizer, progress_bar=False, ) - assert len(result.optimize_result) == 5 + assert len(result.optimize_result) == 2 def test_petab(): @@ -59,7 +59,9 @@ def test_petab(): def _test_petab(engine): petab_importer = pypesto.petab.PetabImporter.from_yaml( os.path.join( - models.MODELS_DIR, "Zheng_PNAS2012", "Zheng_PNAS2012.yaml" + models.MODELS_DIR, + "Boehm_JProteomeRes2014", + "Boehm_JProteomeRes2014.yaml", ) ) objective = petab_importer.create_objective() @@ -79,7 +81,9 @@ def test_deepcopy_objective(): """Test copying objectives (needed for MultiProcessEngine).""" petab_importer = pypesto.petab.PetabImporter.from_yaml( os.path.join( - models.MODELS_DIR, "Zheng_PNAS2012", "Zheng_PNAS2012.yaml" + models.MODELS_DIR, + "Boehm_JProteomeRes2014", + "Boehm_JProteomeRes2014.yaml", ) ) objective = petab_importer.create_objective() @@ -114,7 +118,9 @@ def test_pickle_objective(): """Test serializing objectives (needed for MultiThreadEngine).""" petab_importer = pypesto.petab.PetabImporter.from_yaml( os.path.join( - models.MODELS_DIR, "Zheng_PNAS2012", "Zheng_PNAS2012.yaml" + models.MODELS_DIR, + "Boehm_JProteomeRes2014", + "Boehm_JProteomeRes2014.yaml", ) ) objective = petab_importer.create_objective() diff --git a/tox.ini b/tox.ini index 9477f90f0..89af0a9c1 100644 --- a/tox.ini +++ b/tox.ini @@ -53,10 +53,10 @@ deps = git+https://github.com/Benchmarking-Initiative/Benchmark-Models-PEtab.git@master\#subdirectory=src/python commands = pytest --cov=pypesto --cov-report=xml --cov-append \ - test/base \ - test/profile \ - test/sample \ - test/visualize \ + test/base --durations=0 \ + test/profile --durations=0 \ + test/sample --durations=0 \ + test/visualize --durations=0 \ -s description = Test basic functionality