From cbb08ad2f47b403ed2c4fc1f3a06423dcefcc32d Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Wed, 25 Oct 2023 19:48:27 +0200 Subject: [PATCH] Doc: Fix/update/add/prettify some pypesto.profile documentation (#1149) --- doc/api.rst | 1 + pypesto/profile/options.py | 2 +- pypesto/profile/profile.py | 13 ++++---- pypesto/profile/profile_next_guess.py | 43 +++++++++++++++++---------- 4 files changed, 37 insertions(+), 22 deletions(-) diff --git a/doc/api.rst b/doc/api.rst index 685637971..92a36ffe8 100644 --- a/doc/api.rst +++ b/doc/api.rst @@ -20,6 +20,7 @@ API reference pypesto.predict pypesto.problem pypesto.profile + pypesto.profile.profile_next_guess pypesto.result pypesto.sample pypesto.select diff --git a/pypesto/profile/options.py b/pypesto/profile/options.py index 9867c4fa5..21cd0b858 100644 --- a/pypesto/profile/options.py +++ b/pypesto/profile/options.py @@ -5,7 +5,7 @@ class ProfileOptions(dict): """ Options for optimization based profiling. - Parameters + Attributes ---------- default_step_size: Default step size of the profiling routine along the profile path diff --git a/pypesto/profile/profile.py b/pypesto/profile/profile.py index 30d1b303e..bdbaf7ab7 100644 --- a/pypesto/profile/profile.py +++ b/pypesto/profile/profile.py @@ -56,8 +56,9 @@ def parameter_profile( Index from which optimization result profiling should be started (default: global optimum, i.e., index = 0). next_guess_method: - Function handle to a method that creates the next starting point for - optimization in profiling. + Method that creates the next starting point for optimization in profiling. + One of the ``update_type`` options supported by + :func:`pypesto.profile.profile_next_guess.next_guess`. profile_options: Various options applied to the profile optimization. progress_bar: @@ -66,8 +67,8 @@ def parameter_profile( Name of the hdf5 file, where the result will be saved. Default is None, which deactivates automatic saving. If set to "Auto" it will automatically generate a file named - `year_month_day_profiling_result.hdf5`. - Optionally a method, see docs for `pypesto.store.auto.autosave`. + ``year_month_day_profiling_result.hdf5``. + Optionally a method, see docs for :func:`pypesto.store.auto.autosave`. overwrite: Whether to overwrite `result/profiling` in the autosave file if it already exists. @@ -113,12 +114,12 @@ def create_next_guess( ) elif callable(next_guess_method): - raise Exception( + raise NotImplementedError( 'Passing function handles for computation of next ' 'profiling point is not yet supported.' ) else: - raise Exception('Unsupported input for next_guess_method.') + raise ValueError('Unsupported input for next_guess_method.') # create the profile result object (retrieve global optimum) or append to # existing list of profiles diff --git a/pypesto/profile/profile_next_guess.py b/pypesto/profile/profile_next_guess.py index dc792903c..32da44b79 100644 --- a/pypesto/profile/profile_next_guess.py +++ b/pypesto/profile/profile_next_guess.py @@ -1,5 +1,5 @@ import copy -from typing import Callable, List, Tuple, Union +from typing import Callable, List, Literal, Tuple, Union import numpy as np @@ -7,13 +7,20 @@ from ..result import ProfilerResult from .options import ProfileOptions +__all__ = ['next_guess', 'fixed_step', 'adaptive_step'] + def next_guess( x: np.ndarray, par_index: int, - par_direction: int, + par_direction: Literal[1, -1], profile_options: ProfileOptions, - update_type: str, + update_type: Literal[ + 'fixed_step', + 'adaptive_step_order_0', + 'adaptive_step_order_1', + 'adaptive_step_regression', + ], current_profile: ProfilerResult, problem: Problem, global_opt: float, @@ -31,11 +38,14 @@ def next_guess( par_index: The index of the parameter of the current profile. par_direction: - The direction, in which the profiling is done (1 or -1). + The direction, in which the profiling is done (``1`` or ``-1``). profile_options: Various options applied to the profile optimization. update_type: - Type of update for next profile point. + Type of update for next profile point: + ``fixed_step`` (see :func:`fixed_step`), + ``adaptive_step_order_0``, ``adaptive_step_order_1``, or ``adaptive_step_regression`` + (see :func:`adaptive_step`). current_profile: The profile which should be computed. problem: @@ -60,7 +70,7 @@ def next_guess( order = np.nan else: raise Exception( - 'Unsupported update_type for ' 'create_next_startpoint.' + f'Unsupported `update_type` {update_type} for `next_guess`.' ) return adaptive_step( @@ -78,12 +88,15 @@ def next_guess( def fixed_step( x: np.ndarray, par_index: int, - par_direction: int, + par_direction: Literal[1, -1], options: ProfileOptions, problem: Problem, ) -> np.ndarray: """Most simple method to create the next guess. + Computes the next point based on the fixed step size given by + ``default_step_size`` in :class:`ProfileOptions`. + Parameters ---------- x: @@ -91,7 +104,7 @@ def fixed_step( par_index: The index of the parameter of the current profile par_direction: - The direction, in which the profiling is done (1 or -1) + The direction, in which the profiling is done (``1`` or ``-1``) options: Various options applied to the profile optimization. problem: @@ -119,7 +132,7 @@ def fixed_step( def adaptive_step( x: np.ndarray, par_index: int, - par_direction: int, + par_direction: Literal[1, -1], options: ProfileOptions, current_profile: ProfilerResult, problem: Problem, @@ -148,9 +161,9 @@ def adaptive_step( global_opt: log-posterior value of the global optimum order: - Specifies the precise algorithm for extrapolation: can be 0 ( - just one parameter is updated), 1 (last two points used to - extrapolate all parameters), and np.nan (indicates that a more + Specifies the precise algorithm for extrapolation: can be ``0`` ( + just one parameter is updated), ``1`` (last two points used to + extrapolate all parameters), and ``np.nan`` (indicates that a more complex regression should be used) Returns @@ -252,7 +265,7 @@ def par_extrapol(step_length): # iterate until good step size is found if next_obj_target < next_obj: # The step is rather too long - return do_line_seach( + return do_line_search( next_x, step_size_guess, 'decrease', @@ -268,7 +281,7 @@ def par_extrapol(step_length): else: # The step is rather too short - return do_line_seach( + return do_line_search( next_x, step_size_guess, 'increase', @@ -387,7 +400,7 @@ def get_reg_polynomial( return reg_par -def do_line_seach( +def do_line_search( next_x: np.ndarray, step_size_guess: float, direction: str,