Skip to content

Commit

Permalink
Merge branch 'dev' into prt_support
Browse files Browse the repository at this point in the history
  • Loading branch information
dbrakenhoff committed Dec 26, 2024
2 parents 73c7c4e + 35ba6b2 commit 9f4f3eb
Show file tree
Hide file tree
Showing 5 changed files with 26 additions and 9 deletions.
2 changes: 1 addition & 1 deletion .readthedocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ version: 2

# Set the version of Python and other tools you might need
build:
os: ubuntu-22.04
os: ubuntu-lts-latest
tools:
python: "3.11"

Expand Down
9 changes: 7 additions & 2 deletions nlmod/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -447,13 +447,13 @@ def _same_function_arguments(func_args_dic, func_args_dic_cache):
for key, item in func_args_dic.items():
# check if cache and function call have same argument names
if key not in func_args_dic_cache:
msg = f"cache was created using different function argument {key}, do not use cached data"
msg = f"cache was created using different function argument '{key}' not in cached arguments, do not use cached data"
logger.info(msg)
return False

# check if cache and function call have same argument types
if not isinstance(item, type(func_args_dic_cache[key])):
msg = f"cache was created using different function argument type: {key}: {type(func_args_dic_cache[key])}, do not use cached data"
msg = f"cache was created using different function argument types for {key}: current '{type(item)}' cache: '{type(func_args_dic_cache[key])}', do not use cached data"
logger.info(msg)
return False

Expand All @@ -469,11 +469,16 @@ def _same_function_arguments(func_args_dic, func_args_dic_cache):
else:
msg = f"cache was created using different function argument: {key}, do not use cached data"
logger.info(msg)
logger.debug(f"{key}: {item} != {func_args_dic_cache[key]}")
return False
elif isinstance(item, np.ndarray):
if not np.allclose(item, func_args_dic_cache[key]):
msg = f"cache was created using different numpy array for: {key}, do not use cached data"
logger.info(msg)
logger.debug(
f"array '{key}' max difference with stored copy is "
f"{np.max(np.abs(item - func_args_dic_cache[key]))}"
)
return False
elif isinstance(item, (pd.DataFrame, pd.Series, xr.DataArray)):
if not item.equals(func_args_dic_cache[key]):
Expand Down
2 changes: 2 additions & 0 deletions nlmod/gwf/surface_water.py
Original file line number Diff line number Diff line change
Expand Up @@ -869,6 +869,8 @@ def add_min_ahn_to_gdf(
The default is 'ahn_min'.
statistic : string, optional
The statistic to calculate at each surface water feature. The default is 'min'.
progressbar : bool, optional
Show a progressbar when True. The default is False.
Returns
-------
Expand Down
2 changes: 1 addition & 1 deletion nlmod/sim/sim.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ def sim(ds, exe_name=None, version_tag=None, **kwargs):
sim_name=ds.model_name,
exe_name=exe_name,
version=ds.mfversion,
sim_ws=ds.model_ws,
sim_ws=kwargs.pop("sim_ws", ds.model_ws),
**kwargs,
)

Expand Down
20 changes: 15 additions & 5 deletions nlmod/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -878,7 +878,7 @@ def format(self, record) -> str:
return super().format(record)


def get_color_logger(level="INFO"):
def get_color_logger(level="INFO", logger_name=None):
"""Get a logger with colored output.
Parameters
Expand Down Expand Up @@ -911,7 +911,7 @@ def get_color_logger(level="INFO"):
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(formatter)

logger = logging.getLogger()
logger = logging.getLogger(logger_name)
logger.handlers[:] = []
logger.addHandler(handler)
logger.setLevel(getattr(logging, level))
Expand Down Expand Up @@ -1137,7 +1137,13 @@ def gdf_intersection_join(
if np.any(measure.sum() > min_total_overlap * measure_org):
# take the largest
ind = measure.idxmax()
gdf_to.loc[index, columns] = gdf_from.loc[ind, columns]
idf_from = gdf_from.loc[ind, columns]
if idf_from.index.size > 1:
logger.warning(
f"Warning, multiple entries found for '{ind}'. Using the first one."
)
idf_from = idf_from.iloc[0]
gdf_to.loc[index, columns] = idf_from
if add_index_from_column:
gdf_to.loc[index, add_index_from_column] = ind
return gdf_to
Expand All @@ -1152,8 +1158,9 @@ def zonal_statistics(
all_touched=True,
statistics="mean",
add_to_gdf=True,
progressbar=False,
):
"""Calculate raster statistics in the features of a GeoDataFrame
"""Calculate raster statistics in the features of a GeoDataFrame.
Parameters
----------
Expand Down Expand Up @@ -1186,7 +1193,8 @@ def zonal_statistics(
add_to_gdf : bool, optional
Add the result to the orignal GeoDataFrame if True. Otherwise return a
GeoDataFrame with only the statistics. The default is True.
progressbar : bool, optional
show progressbar when using rasterstats. The default is False.
Returns
-------
Expand Down Expand Up @@ -1259,6 +1267,7 @@ def zonal_statistics(
all_touched=all_touched,
affine=da.rio.transform(),
nodata=da.rio.nodata,
progress=progressbar,
)
else:
# we assume da is a filename
Expand All @@ -1267,6 +1276,7 @@ def zonal_statistics(
da,
stats=stat,
all_touched=all_touched,
progress=progressbar,
)
for stat, column in zip(statistics, columns):
geometry[column] = [x[stat] for x in stats]
Expand Down

0 comments on commit 9f4f3eb

Please sign in to comment.