From 48019e2122512f340d87b587078b8d8293e36e97 Mon Sep 17 00:00:00 2001 From: Paul Jonas Jost <70631928+PaulJonasJost@users.noreply.github.com> Date: Wed, 25 Oct 2023 15:18:12 +0200 Subject: [PATCH] Add warning, if fixed parameter is supposed to be profiled (#1155) * Replaced logger.warn with loggerwarning as warn is depracated. * Replaced logger.warn with logger.warning as warn is depracated. * Added warning if a fixed parameter is called for profiling. --------- Co-authored-by: Daniel Weindl --- pypesto/history/optimizer.py | 2 +- pypesto/optimize/optimizer.py | 8 ++++---- pypesto/profile/profile.py | 4 ++++ 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/pypesto/history/optimizer.py b/pypesto/history/optimizer.py index a4876b111..46524a0c1 100644 --- a/pypesto/history/optimizer.py +++ b/pypesto/history/optimizer.py @@ -134,7 +134,7 @@ def finalize(self, message: str = None, exitflag: int = None): and not allclose(result[X], self.x_min) ): # issue a warning, as if this happens, then something may be wrong - logger.warn( + logger.warning( f"History has a better point {fval} than the current best " "point {self.fval_min}." ) diff --git a/pypesto/optimize/optimizer.py b/pypesto/optimize/optimizer.py index f1d446b40..9060f3434 100644 --- a/pypesto/optimize/optimizer.py +++ b/pypesto/optimize/optimizer.py @@ -608,7 +608,7 @@ def get_default_options(self): def check_x0_support(self, x_guesses: np.ndarray = None) -> bool: """Check whether optimizer supports x0.""" if x_guesses is not None and x_guesses.size > 0: - logger.warn("The Dlib optimizer does not support x0.") + logger.warning("The Dlib optimizer does not support x0.") return False @@ -666,7 +666,7 @@ def is_least_squares(self): def check_x0_support(self, x_guesses: np.ndarray = None) -> bool: """Check whether optimizer supports x0.""" if x_guesses is not None and x_guesses.size > 0: - logger.warn("The pyswarm optimizer does not support x0.") + logger.warning("The pyswarm optimizer does not support x0.") return False @@ -940,7 +940,7 @@ def is_least_squares(self): def check_x0_support(self, x_guesses: np.ndarray = None) -> bool: """Check whether optimizer supports x0.""" if x_guesses is not None and x_guesses.size > 0: - logger.warn("The pyswarms optimizer does not support x0.") + logger.warning("The pyswarms optimizer does not support x0.") return False @@ -1186,7 +1186,7 @@ def check_x0_support(self, x_guesses: np.ndarray = None) -> bool: nlopt.GN_DIRECT_L_RAND_NOSCAL, ): if x_guesses is not None and x_guesses.size > 0: - logger.warn( + logger.warning( f"The NLopt optimizer method {self.method} does " "not support x0." ) diff --git a/pypesto/profile/profile.py b/pypesto/profile/profile.py index 056c21a79..30d1b303e 100644 --- a/pypesto/profile/profile.py +++ b/pypesto/profile/profile.py @@ -135,6 +135,10 @@ def create_next_guess( for i_par in profile_index: # only compute profiles for free parameters if i_par in problem.x_fixed_indices: + # log a warning + logger.warning( + f"Parameter {i_par} is fixed and will not be profiled." + ) continue current_profile = result.profile_result.get_profiler_result(