Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into xarray/main
Browse files Browse the repository at this point in the history
  • Loading branch information
mats-knmi committed Jan 14, 2025
2 parents 3226042 + e332585 commit 4e2d4b7
Show file tree
Hide file tree
Showing 5 changed files with 191 additions and 62 deletions.
2 changes: 1 addition & 1 deletion PKG-INFO
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
Metadata-Version: 1.2
Name: pysteps
Version: 1.12.0
Version: 1.13.0
Summary: Python framework for short-term ensemble prediction systems
Home-page: http://pypi.python.org/pypi/pysteps/
License: LICENSE
Expand Down
13 changes: 8 additions & 5 deletions pysteps/io/importers.py
Original file line number Diff line number Diff line change
Expand Up @@ -1400,10 +1400,13 @@ def import_odim_hdf5(filename, qty="RATE", **kwargs):
# check if the "what" group is in the "dataset" group
if "what" in list(dsg[1].keys()):
if "quantity" in dsg[1]["what"].attrs.keys():
qty_, gain, offset, nodata, undetect = _read_opera_hdf5_what_group(
dsg[1]["what"]
)
what_grp_found = True
try:
qty_, gain, offset, nodata, undetect = (
_read_opera_hdf5_what_group(dsg[1]["what"])
)
what_grp_found = True
except KeyError:
pass

for dg in dsg[1].items():
if dg[0][0:4] == "data":
Expand Down Expand Up @@ -1576,7 +1579,7 @@ def import_opera_hdf5(filename, qty="RATE", **kwargs):


def _read_opera_hdf5_what_group(whatgrp):
qty = whatgrp.attrs["quantity"]
qty = whatgrp.attrs["quantity"] if "quantity" in whatgrp.attrs.keys() else b"QIND"
gain = whatgrp.attrs["gain"] if "gain" in whatgrp.attrs.keys() else 1.0
offset = whatgrp.attrs["offset"] if "offset" in whatgrp.attrs.keys() else 0.0
nodata = whatgrp.attrs["nodata"] if "nodata" in whatgrp.attrs.keys() else np.nan
Expand Down
88 changes: 41 additions & 47 deletions pysteps/nowcasts/steps.py
Original file line number Diff line number Diff line change
Expand Up @@ -266,48 +266,14 @@ class StepsNowcasterState:
default_factory=list
)
velocity_perturbations: list[Callable] | None = field(default_factory=list)
fft_objs: list[Any] | None = field(default_factory=list)
fft_objects: list[Any] | None = field(default_factory=list)


class StepsNowcaster:
def __init__(
self, dataset: xr.Dataset, time_steps, steps_config: StepsNowcasterConfig
self, precip, velocity, time_steps, steps_config: StepsNowcasterConfig
):
"""
Initialize a nowcast class to be called with `compute_forecast`.
Parameters
----------
dataset: xarray.Dataset
Input dataset as described in the documentation of
:py:mod:`pysteps.io.importers`. It has to contain the ``velocity_x`` and
``velocity_y`` data variables, as well as any precipitation data variable.
The time dimension of the dataset has to be size
``ar_order + 1`` and the precipitation variable has to have this dimension. All
velocity values are required to be finite.
timesteps: int or list of floats
Number of time steps to forecast or a list of time steps for which the
forecasts are computed (relative to the input time step). The elements
of the list are required to be in ascending order.
config: StepsNowcasterConfig
Provides a set of configuration parameters for the nowcast ensemble generation.
See also
--------
pysteps.extrapolation.interface, pysteps.cascade.interface,
pysteps.noise.interface, pysteps.noise.utils.compute_noise_stddev_adjs
References
----------
:cite:`Seed2003`, :cite:`BPS2006`, :cite:`SPN2013`, :cite:`PCH2019b`
"""
precip_var = dataset.attrs["precip_var"]
precip = dataset[precip_var].values
velocity = np.stack(
[dataset["velocity_x"].values, dataset["velocity_y"].values]
)
# Store inputs and optional parameters
self.__dataset = dataset
self.__precip = precip
self.__velocity = velocity
self.__time_steps = time_steps
Expand All @@ -329,15 +295,32 @@ def compute_forecast(self):
Generate a nowcast ensemble by using the Short-Term Ensemble Prediction
System (STEPS) method.
Parameters
----------
precip: array-like
Array of shape (ar_order+1,m,n) containing the input precipitation fields
ordered by timestamp from oldest to newest. The time steps between the
inputs are assumed to be regular.
velocity: array-like
Array of shape (2,m,n) containing the x- and y-components of the advection
field. The velocities are assumed to represent one time step between the
inputs. All values are required to be finite.
timesteps: int or list of floats
Number of time steps to forecast or a list of time steps for which the
forecasts are computed (relative to the input time step). The elements
of the list are required to be in ascending order.
config: StepsNowcasterConfig
Provides a set of configuration parameters for the nowcast ensemble generation.
Returns
-------
out: xarray.Dataset
If return_output is True, a dataset as described in the documentation of
:py:mod:`pysteps.io.importers` is returned containing a time series of forecast
out: ndarray
If return_output is True, a four-dimensional array of shape
(n_ens_members,num_timesteps,m,n) containing a time series of forecast
precipitation fields for each ensemble member. Otherwise, a None value
is returned. The time series starts from t0+timestep, where timestep is
taken from the metadata of the time coordinate. If measure_time is True, the
return value is a three-element tuple containing the nowcast dataset, the
taken from the input precipitation fields. If measure_time is True, the
return value is a three-element tuple containing the nowcast array, the
initialization time of the nowcast generator and the time used in the
main loop (seconds).
Expand Down Expand Up @@ -379,13 +362,22 @@ def compute_forecast(self):

# Stack and return the forecast output
if self.__config.return_output:
output_dataset = convert_output_to_xarray_dataset(
self.__dataset, self.__time_steps, self.__state.precip_forecast
self.__state.precip_forecast = np.stack(
[
np.stack(self.__state.precip_forecast[j])
for j in range(self.__config.n_ens_members)
]
)
if self.__config.measure_time:
return (output_dataset, self.__init_time, self.__mainloop_time)
return output_dataset
return None
return (
self.__state.precip_forecast,
self.__init_time,
self.__mainloop_time,
)
else:
return self.__state.precip_forecast
else:
return None

def __nowcast_main(self):
"""
Expand Down Expand Up @@ -1476,7 +1468,9 @@ def forecast(
)

# Create an instance of the new class with all the provided arguments
nowcaster = StepsNowcaster(dataset, timesteps, steps_config=nowcaster_config)
nowcaster = StepsNowcaster(
precip, velocity, timesteps, steps_config=nowcaster_config
)
forecast_steps_nowcast = nowcaster.compute_forecast()
nowcaster.reset_states_and_params()
# Call the appropriate methods within the class
Expand Down
148 changes: 140 additions & 8 deletions pysteps/tests/test_io_opera_hdf5.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,19 +9,57 @@

pytest.importorskip("h5py")

# tests for three OPERA products:
# Odyssey rain rate composite (production discontinued on October 30th 2024)
# CIRRUS max. reflectivity composites
# NIMBUS rain rate composites

root_path = pysteps.rcparams.data_sources["opera"]["root_path"]

filename = os.path.join(root_path, "20180824", "T_PAAH21_C_EUOC_20180824180000.hdf")
precip, _, metadata = pysteps.io.import_opera_hdf5(filename)
precip_odyssey, _, metadata_odyssey = pysteps.io.import_opera_hdf5(filename, qty="RATE")

filename = os.path.join(
root_path, "20241126", "CIRRUS", "T_PABV21_C_EUOC_20241126010000.hdf"
)
precip_cirrus, _, metadata_cirrus = pysteps.io.import_opera_hdf5(filename, qty="DBZH")

filename = os.path.join(
root_path, "20241126", "NIMBUS", "T_PAAH22_C_EUOC_20241126010000.hdf"
)
precip_nimbus_rain_rate, _, metadata_nimbus_rain_rate = pysteps.io.import_opera_hdf5(
filename, qty="RATE"
)

filename = os.path.join(
root_path, "20241126", "NIMBUS", "T_PASH22_C_EUOC_20241126010000.hdf"
)
precip_nimbus_rain_accum, _, metadata_nimbus_rain_accum = pysteps.io.import_opera_hdf5(
filename, qty="ACRR"
)


def test_io_import_opera_hdf5_shape():
def test_io_import_opera_hdf5_odyssey_shape():
"""Test the importer OPERA HDF5."""
assert precip.shape == (2200, 1900)
assert precip_odyssey.shape == (2200, 1900)


# test_metadata: list of (variable,expected, tolerance) tuples
def test_io_import_opera_hdf5_cirrus_shape():
"""Test the importer OPERA HDF5."""
assert precip_cirrus.shape == (4400, 3800)


def test_io_import_opera_hdf5_nimbus_rain_rate_shape():
"""Test the importer OPERA HDF5."""
assert precip_nimbus_rain_rate.shape == (2200, 1900)


def test_io_import_opera_hdf5_nimbus_rain_accum_shape():
"""Test the importer OPERA HDF5."""
assert precip_nimbus_rain_accum.shape == (2200, 1900)


# test_metadata: list of (variable,expected, tolerance) tuples
expected_proj = (
"+proj=laea +lat_0=55.0 +lon_0=10.0 "
"+x_0=1950000.0 "
Expand All @@ -30,7 +68,7 @@ def test_io_import_opera_hdf5_shape():
)

# list of (variable,expected,tolerance) tuples
test_attrs = [
test_odyssey_attrs = [
("projection", expected_proj, None),
("ll_lon", -10.434576838640398, 1e-10),
("ll_lat", 31.746215319325056, 1e-10),
Expand All @@ -53,7 +91,101 @@ def test_io_import_opera_hdf5_shape():
]


@pytest.mark.parametrize("variable, expected, tolerance", test_attrs)
def test_io_import_mch_gif_dataset_attrs(variable, expected, tolerance):
@pytest.mark.parametrize("variable, expected, tolerance", test_odyssey_attrs)
def test_io_import_opera_hdf5_odyssey_dataset_attrs(variable, expected, tolerance):
"""Test the importer OPERA HDF5."""
smart_assert(metadata[variable], expected, tolerance)
smart_assert(metadata_odyssey[variable], expected, tolerance)


# list of (variable,expected,tolerance) tuples
test_cirrus_attrs = [
("projection", expected_proj, None),
("ll_lon", -10.4345768386404, 1e-10),
("ll_lat", 31.7462153182675, 1e-10),
("ur_lon", 57.8119647501499, 1e-10),
("ur_lat", 67.6210371071631, 1e-10),
("x1", -0.00027143326587975025, 1e-6),
("y1", -4400000.00116988, 1e-10),
("x2", 3800000.0000817003, 1e-10),
("y2", -8.761277422308922e-05, 1e-6),
("xpixelsize", 1000.0, 1e-10),
("ypixelsize", 1000.0, 1e-10),
("cartesian_unit", "m", None),
("accutime", 15.0, 1e-10),
("yorigin", "upper", None),
("unit", "dBZ", None),
("institution", "Odyssey datacentre", None),
("transform", "dB", None),
("zerovalue", -32.0, 1e-10),
("threshold", -31.5, 1e-10),
]


@pytest.mark.parametrize("variable, expected, tolerance", test_cirrus_attrs)
def test_io_import_opera_hdf5_cirrus_dataset_attrs(variable, expected, tolerance):
"""Test OPERA HDF5 importer: max. reflectivity composites from CIRRUS."""
smart_assert(metadata_cirrus[variable], expected, tolerance)


# list of (variable,expected,tolerance) tuples
test_nimbus_rain_rate_attrs = [
("projection", expected_proj, None),
("ll_lon", -10.434599999137568, 1e-10),
("ll_lat", 31.74619995126678, 1e-10),
("ur_lon", 57.8119032106317, 1e-10),
("ur_lat", 67.62104536996274, 1e-10),
("x1", -2.5302714337594807, 1e-6),
("y1", -4400001.031169886, 1e-10),
("x2", 3799997.4700817037, 1e-10),
("y2", -1.0300876162946224, 1e-6),
("xpixelsize", 2000.0, 1e-10),
("ypixelsize", 2000.0, 1e-10),
("cartesian_unit", "m", None),
("accutime", 15.0, 1e-10),
("yorigin", "upper", None),
("unit", "mm/h", None),
("institution", "Odyssey datacentre", None),
("transform", None, None),
("zerovalue", 0.0, 1e-10),
("threshold", 0.01, 1e-10),
]


@pytest.mark.parametrize("variable, expected, tolerance", test_nimbus_rain_rate_attrs)
def test_io_import_opera_hdf5_nimbus_rain_rate_dataset_attrs(
variable, expected, tolerance
):
"""Test OPERA HDF5 importer: rain rate composites from NIMBUS."""
smart_assert(metadata_nimbus_rain_rate[variable], expected, tolerance)


# list of (variable,expected,tolerance) tuples
test_nimbus_rain_accum_attrs = [
("projection", expected_proj, None),
("ll_lon", -10.434599999137568, 1e-10),
("ll_lat", 31.74619995126678, 1e-10),
("ur_lon", 57.8119032106317, 1e-10),
("ur_lat", 67.62104536996274, 1e-10),
("x1", -2.5302714337594807, 1e-6),
("y1", -4400001.031169886, 1e-10),
("x2", 3799997.4700817037, 1e-10),
("y2", -1.0300876162946224, 1e-6),
("xpixelsize", 2000.0, 1e-10),
("ypixelsize", 2000.0, 1e-10),
("cartesian_unit", "m", None),
("accutime", 15.0, 1e-10),
("yorigin", "upper", None),
("unit", "mm", None),
("institution", "Odyssey datacentre", None),
("transform", None, None),
("zerovalue", 0.0, 1e-10),
("threshold", 0.01, 1e-10),
]


@pytest.mark.parametrize("variable, expected, tolerance", test_nimbus_rain_accum_attrs)
def test_io_import_opera_hdf5_nimbus_rain_accum_dataset_attrs(
variable, expected, tolerance
):
"""Test OPERA HDF5 importer: rain accumulation composites from NIMBUS."""
smart_assert(metadata_nimbus_rain_accum[variable], expected, tolerance)
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@

setup(
name="pysteps",
version="1.12.0",
version="1.13.0",
author="PySteps developers",
packages=find_packages(),
license="LICENSE",
Expand Down

0 comments on commit 4e2d4b7

Please sign in to comment.