Skip to content

Commit

Permalink
cq: update warnings with automatic stack_level
Browse files Browse the repository at this point in the history
  • Loading branch information
JoepVanlier committed Mar 11, 2024
1 parent bc39954 commit ea89a78
Show file tree
Hide file tree
Showing 20 changed files with 118 additions and 52 deletions.
5 changes: 3 additions & 2 deletions lumicks/pylake/detail/confocal.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from .image import reconstruct_image, reconstruct_image_sum
from .mixin import PhotonCounts, ExcitationLaserPower
from .plotting import parse_color_channel
from .utilities import method_cache, could_sum_overflow
from .utilities import method_cache, find_stack_level, could_sum_overflow
from ..adjustments import no_adjustment
from .imaging_mixins import TiffExport

Expand Down Expand Up @@ -350,7 +350,8 @@ def _tiff_image_metadata(self) -> dict:
except NotImplementedError:
warnings.warn(
f"Pixel times are not defined for this {self.__class__.__name__}. "
"The corresponding metadata in the output file is set to `None`."
"The corresponding metadata in the output file is set to `None`.",
stacklevel=find_stack_level(),
)
pixel_time_seconds = None

Expand Down
6 changes: 5 additions & 1 deletion lumicks/pylake/detail/image.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@

import numpy as np

from .utilities import find_stack_level


class InfowaveCode(enum.IntEnum):
discard = 0 # this data sample does not contain useful information
Expand Down Expand Up @@ -197,7 +199,9 @@ def histogram_rows(image, pixels_per_bin, pixel_width):
remainder = n_rows % pixels_per_bin
if remainder != 0:
warnings.warn(
f"{n_rows} pixels is not divisible by {pixels_per_bin}, final bin only contains {remainder} pixels"
f"{n_rows} pixels is not divisible by {pixels_per_bin}, final bin only contains "
f"{remainder} pixels",
stacklevel=find_stack_level(),
)
pad = np.zeros((pixels_per_bin - remainder, image.shape[1]))
image = np.vstack((image, pad))
Expand Down
6 changes: 4 additions & 2 deletions lumicks/pylake/detail/plotting.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import warnings

from ..adjustments import no_adjustment
from ..detail.utilities import find_stack_level


def get_axes(axes=None, image_handle=None):
Expand Down Expand Up @@ -36,8 +37,9 @@ def parse_color_channel(channel):
warnings.warn(
DeprecationWarning(
"In future versions, the `channel` argument will be restricted to lowercase "
f"letters only. Use '{channel}' instead of '{input_channel}'."
)
f"letters only. Use '{channel}' instead of '{input_channel}'.",
),
stacklevel=find_stack_level(),
)

# check rgb order
Expand Down
4 changes: 2 additions & 2 deletions lumicks/pylake/detail/utilities.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import math
import contextlib
import pathlib
import inspect
import pathlib
import contextlib

import numpy as np
import cachetools
Expand Down
3 changes: 2 additions & 1 deletion lumicks/pylake/detail/widefield.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import tifffile

from .plotting import parse_color_channel
from .utilities import find_stack_level
from ..adjustments import no_adjustment


Expand Down Expand Up @@ -205,7 +206,7 @@ def __init__(self, tiff_files, align_requested, roi=None, tether=None):
# warn on file open if alignment is requested, but not possible
# stacklevel=4 corresponds to ImageStack.__init__()
if self._description._alignment.has_problem:
warnings.warn(self._description._alignment.status.value, stacklevel=4)
warnings.warn(self._description._alignment.status.value, stacklevel=find_stack_level())

if roi is None:
self._roi = Roi(0, self._description.width, 0, self._description.height)
Expand Down
3 changes: 2 additions & 1 deletion lumicks/pylake/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from .calibration import ForceCalibration
from .detail.mixin import Force, PhotonCounts, DownsampledFD, PhotonTimeTags, BaselineCorrectedForce
from .detail.h5_helper import write_h5
from .detail.utilities import find_stack_level

__all__ = ["File"]

Expand Down Expand Up @@ -276,7 +277,7 @@ def try_from_dataset(*args):
try:
return cls.from_dataset(*args)
except Exception as e:
warnings.warn(e.args[0])
warnings.warn(e.args[0], stacklevel=find_stack_level())
return None

if field not in self.h5:
Expand Down
5 changes: 4 additions & 1 deletion lumicks/pylake/fitting/detail/derivative_manipulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
import numpy as np
import scipy

from ...detail.utilities import find_stack_level


def numerical_diff(fn, x, dx=1e-6):
return (fn(x + dx) - fn(x - dx)) / (2.0 * dx)
Expand Down Expand Up @@ -126,7 +128,8 @@ def invert_function_interpolation(
except Exception as e:
warnings.warn(
f"Interpolation failed. Cause: {e}. Falling back to brute force evaluation. "
f"Results should be fine, but slower."
f"Results should be fine, but slower.",
stacklevel=find_stack_level(),
)
result[interpolated_idx] = manual_inversion(d[interpolated_idx], initial)
else:
Expand Down
7 changes: 5 additions & 2 deletions lumicks/pylake/fitting/detail/model_implementation.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

from .utilities import latex_frac, latex_sqrt, solve_formatter, solve_formatter_tex
from ..parameters import Parameter
from ...detail.utilities import find_stack_level
from .derivative_manipulation import invert_function, invert_jacobian, invert_function_interpolation


Expand Down Expand Up @@ -103,8 +104,10 @@ def wlc_marko_siggia_force(d, Lp, Lc, kT):

if np.any(d > Lc):
warnings.warn(
"Marko Siggia model is only defined properly up to the contour length (d = Lc)",
RuntimeWarning,
RuntimeWarning(
"Marko Siggia model is only defined properly up to the contour length (d = Lc)"
),
stacklevel=find_stack_level(),
)

d_div_Lc = d / Lc
Expand Down
7 changes: 5 additions & 2 deletions lumicks/pylake/force_calibration/power_spectrum.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import numpy as np

from lumicks.pylake.detail.utilities import downsample
from lumicks.pylake.detail.utilities import downsample, find_stack_level


class PowerSpectrum:
Expand Down Expand Up @@ -52,7 +52,10 @@ def squared_fft(d):
int(np.round(window_seconds * sample_rate)) if window_seconds else len(data)
)
if num_points_per_window > len(data):
warnings.warn(RuntimeWarning("Longer window than data duration: not using windowing."))
warnings.warn(
RuntimeWarning("Longer window than data duration: not using windowing."),
stacklevel=find_stack_level(),
)
num_points_per_window = len(data)

squared_fft_chunks = [
Expand Down
11 changes: 7 additions & 4 deletions lumicks/pylake/force_calibration/touchdown.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import numpy as np
import scipy

from lumicks.pylake.detail.utilities import downsample
from lumicks.pylake.detail.utilities import downsample, find_stack_level


def mack_model(
Expand Down Expand Up @@ -82,7 +82,8 @@ def f_test(sse_restricted, sse_unrestricted, num_data, num_pars_difference, num_
RuntimeWarning(
"Denominator in F-Test is zero. "
"This may be caused by using noise-free data or fewer than 4 data points."
)
),
stacklevel=find_stack_level(),
)
return 0.0
else:
Expand Down Expand Up @@ -310,15 +311,17 @@ def touchdown(
RuntimeWarning(
"Insufficient data available to reliably fit touchdown curve. We need at least two "
"oscillations to reliably fit the interference pattern."
)
),
stacklevel=find_stack_level(),
)
focal_shift = None

if p_value > maximum_p_value:
warnings.warn(
RuntimeWarning(
"Surface detection failed (piecewise linear fit not better than linear fit)"
)
),
stacklevel=find_stack_level(),
)
surface_position = None

Expand Down
9 changes: 6 additions & 3 deletions lumicks/pylake/group.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import warnings

from .channel import channel_class
from .detail.utilities import find_stack_level


class Group:
Expand Down Expand Up @@ -34,9 +35,11 @@ def __getitem__(self, item):
redirect_location, redirect_class = self._lk_file.redirect_list.get(item_type, (None, None))
if redirect_location and not redirect_class:
warnings.warn(
f"Direct access to this field is deprecated. Use file.{redirect_location} "
"instead. In case raw access is needed, go through the fn.h5 directly.",
FutureWarning,
FutureWarning(
f"Direct access to this field is deprecated. Use file.{redirect_location} "
"instead. In case raw access is needed, go through the fn.h5 directly.",
),
stacklevel=find_stack_level(),
)

if type(thing) is h5py.Group:
Expand Down
4 changes: 3 additions & 1 deletion lumicks/pylake/image_stack.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from .adjustments import no_adjustment
from .detail.image import make_image_title
from .detail.plotting import get_axes, show_image
from .detail.utilities import find_stack_level
from .detail.widefield import TiffStack, _frame_timestamps_from_exposure_timestamps
from .detail.imaging_mixins import FrameIndex, TiffExport, VideoExport

Expand Down Expand Up @@ -658,7 +659,8 @@ def frame_timestamp_ranges(self, *, include_dead_time=False):
"lag behind. This means that when you average data over the frame, some frames "
"after the switch may take an incorrect exposure time into account in the "
"averaging."
)
),
stacklevel=find_stack_level(),
)

return frame_timestamps
Expand Down
13 changes: 9 additions & 4 deletions lumicks/pylake/kymo.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from .detail.confocal import ScanAxis, ScanMetaData, ConfocalImage
from .detail.plotting import get_axes, show_image
from .detail.timeindex import to_timestamp
from .detail.utilities import method_cache
from .detail.utilities import method_cache, find_stack_level
from .detail.bead_cropping import find_beads_template, find_beads_brightness


Expand Down Expand Up @@ -279,8 +279,10 @@ def _fix_incorrect_start(self):
self.start = seek_timestamp_next_line(self.infowave[self.start :])
self._cache = {}
warnings.warn(
"Start of the kymograph was truncated. Omitting the truncated first line.",
RuntimeWarning,
RuntimeWarning(
"Start of the kymograph was truncated. Omitting the truncated first line."
),
stacklevel=find_stack_level(),
)

def _to_spatial(self, data):
Expand Down Expand Up @@ -475,7 +477,10 @@ def set_aspect_ratio(axis, ar):
)

warnings.warn(
RuntimeWarning("Using downsampled force since high frequency force is unavailable.")
RuntimeWarning(
"Using downsampled force since high frequency force is unavailable."
),
stacklevel=find_stack_level(),
)

time_ranges = self.line_timestamp_ranges(include_dead_time=False)
Expand Down
21 changes: 15 additions & 6 deletions lumicks/pylake/kymotracker/detail/msd_estimation.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
import numpy as np
import numpy.typing as npt

from ...detail.utilities import find_stack_level


@dataclass(frozen=True)
class DiffusionEstimate:
Expand Down Expand Up @@ -472,7 +474,10 @@ def _diffusion_gls(lag_idx, mean_squared_displacements, num_points, tolerance=1e

def fallback(warning_message):
"""Fallback method if the GLS fails"""
warnings.warn(RuntimeWarning(f"{warning_message} Reverting to two-point OLS."))
warnings.warn(
RuntimeWarning(f"{warning_message} Reverting to two-point OLS."),
stacklevel=find_stack_level(),
)
return _diffusion_ols(lag_idx[:2], mean_squared_displacements[:2], num_points)

# Since the covariance matrix depends on the parameters for the intercept and slope, we obtain
Expand Down Expand Up @@ -600,7 +605,7 @@ def estimate_diffusion_constant_simple(
"`help(lk.refine_tracks_centroid)` or `help(lk.refine_tracks_gaussian)` for "
"more information."
),
stacklevel=2,
stacklevel=find_stack_level(),
)

frame_lags, msd = calculate_msd(frame_idx, coordinate, max_lag)
Expand Down Expand Up @@ -718,7 +723,8 @@ def determine_optimal_points(frame_idx, coordinate, max_iterations=100):
RuntimeWarning(
"Your tracks have missing frames. Note that this can lead to a suboptimal "
"estimate of the optimal number of lags when using OLS."
)
),
stacklevel=find_stack_level(),
)

num_slope = max(2, len(coordinate) // 10) # Need at least two points for a linear regression!
Expand Down Expand Up @@ -750,7 +756,8 @@ def determine_optimal_points(frame_idx, coordinate, max_iterations=100):
return num_slope, num_intercept

warnings.warn(
RuntimeWarning("Warning, maximum number of iterations exceeded. Returning best solution.")
RuntimeWarning("Warning, maximum number of iterations exceeded. Returning best solution."),
stacklevel=find_stack_level(),
)
return num_slope, num_intercept

Expand Down Expand Up @@ -1129,7 +1136,8 @@ def _determine_optimal_points_ensemble(frame_lags, msds, n_coord, max_iterations
return num_slope

warnings.warn(
RuntimeWarning("Warning, maximum number of iterations exceeded. Returning best solution.")
RuntimeWarning("Warning, maximum number of iterations exceeded. Returning best solution."),
stacklevel=find_stack_level(),
)

return num_slope
Expand All @@ -1143,7 +1151,8 @@ def ensemble_ols(kymotracks, max_lag):
warnings.warn(
RuntimeWarning(
"Your tracks have missing frames. Note that this can lead to a suboptimal estimates"
)
),
stacklevel=find_stack_level(),
)

optimal_lags = (
Expand Down
Loading

0 comments on commit ea89a78

Please sign in to comment.