Skip to content

Commit

Permalink
Clean up print statements to align consistently
Browse files Browse the repository at this point in the history
  • Loading branch information
justin-richling committed Jan 31, 2025
1 parent 9ee0a67 commit 6da97a2
Show file tree
Hide file tree
Showing 10 changed files with 47 additions and 47 deletions.
14 changes: 7 additions & 7 deletions lib/adf_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ def get_timeseries_file(self, case, field):
def get_ref_timeseries_file(self, field):
"""Return list of reference time series files"""
if self.adf.compare_obs:
warnings.warn("ADF does not currently expect observational time series files.")
warnings.warn("\t WARNING: ADF does not currently expect observational time series files.")
return None
else:
ts_loc = Path(self.adf.get_baseline_info("cam_ts_loc", required=True))
Expand All @@ -125,11 +125,11 @@ def load_timeseries_dataset(self, fils):
else:
sfil = str(fils[0])
if not Path(sfil).is_file():
warnings.warn(f"Expecting to find file: {sfil}")
warnings.warn(f"\t WARNING: Expecting to find file: {sfil}")
return None
ds = xr.open_dataset(sfil, decode_times=False)
if ds is None:
warnings.warn(f"invalid data on load_dataset")
warnings.warn(f"\t WARNING: invalid data on load_dataset")
# assign time to midpoint of interval (even if it is already)
if 'time_bnds' in ds:
t = ds['time_bnds'].mean(dim='nbnd')
Expand All @@ -140,7 +140,7 @@ def load_timeseries_dataset(self, fils):
t.attrs = ds['time'].attrs
ds = ds.assign_coords({'time':t})
else:
warnings.warn("Timeseries file does not have time bounds info.")
warnings.warn("\t INFO: Timeseries file does not have time bounds info.")
return xr.decode_cf(ds)

def load_timeseries_da(self, case, variablename):
Expand Down Expand Up @@ -301,18 +301,18 @@ def load_reference_regrid_da(self, case, field):
def load_dataset(self, fils):
"""Return xarray DataSet from file(s)"""
if (len(fils) == 0):
warnings.warn("\t Input file list is empty.")
warnings.warn("\t WARNING: Input file list is empty.")
return None
elif (len(fils) > 1):
ds = xr.open_mfdataset(fils, combine='by_coords')
else:
sfil = str(fils[0])
if not Path(sfil).is_file():
warnings.warn(f"Expecting to find file: {sfil}")
warnings.warn(f"\t WARNING: Expecting to find file: {sfil}")
return None
ds = xr.open_dataset(sfil)
if ds is None:
warnings.warn(f"invalid data on load_dataset")
warnings.warn(f"\t WARNING: invalid data on load_dataset")
return ds

# Load DataArray
Expand Down
2 changes: 1 addition & 1 deletion lib/adf_diag.py
Original file line number Diff line number Diff line change
Expand Up @@ -667,7 +667,7 @@ def call_ncrcat(cmd):

if "PS" in hist_file_var_list:
ncrcat_var_list = ncrcat_var_list + ",PS"
print("\t Adding PS to file")
print(f"\t INFO: Adding PS to file for '{var}'")
else:
wmsg = "WARNING: PS not found in history file."
wmsg += " It might be needed at some point."
Expand Down
8 changes: 4 additions & 4 deletions lib/adf_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -418,23 +418,23 @@ def __init__(self, config_file, debug=False):
#History file path isn't needed if user is running ADF directly on time series.
#So make sure start and end year are specified:
if syear is None:
msg = f"No given start year for {case_name}, "
msg = f"\t - No given start year for {case_name}, "
msg += f"using first found year: {found_syear}"
print(msg)
syear = found_syear
if syear not in found_yr_range:
msg = f"Given start year '{syear}' is not in current dataset "
msg = f"\t - Given start year '{syear}' is not in current dataset "
msg += f"{case_name}, using first found year: {found_syear}\n"
print(msg)
syear = found_syear
#End if
if eyear is None:
msg = f"No given end year for {case_name}, "
msg = f"\t - No given end year for {case_name}, "
msg += f"using last found year: {found_eyear}"
print(msg)
eyear = found_eyear
if eyear not in found_yr_range:
msg = f"Given end year '{eyear}' is not in current dataset "
msg = f"\t - Given end year '{eyear}' is not in current dataset "
msg += f"{case_name}, using last found year: {found_eyear}\n"
print(msg)
eyear = found_eyear
Expand Down
2 changes: 1 addition & 1 deletion lib/plotting_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -377,7 +377,7 @@ def spatial_average(indata, weights=None, spatial_dims=None):
warnings.warn("area variable being used to generated normalized weights.")
weights = indata['area'] / indata['area'].sum()
else:
warnings.warn("We need a way to get area variable. Using equal weights.")
warnings.warn("\t We need a way to get area variable. Using equal weights.")
weights = xr.DataArray(1.)
weights.name = "weights"
else:
Expand Down
26 changes: 13 additions & 13 deletions scripts/plotting/global_latlon_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ def global_latlon_map(adfobj):

o_has_dims = pf.validate_dims(odata, ["lat", "lon", "lev"]) # T iff dims are (lat,lon) -- can't plot unless we have both
if (not o_has_dims['has_lat']) or (not o_has_dims['has_lon']):
print(f"\t = skipping global map for {var} as REFERENCE does not have both lat and lon")
print(f"\t WARNING: skipping global map for {var} as REFERENCE does not have both lat and lon")
continue

#Loop over model cases:
Expand Down Expand Up @@ -215,11 +215,11 @@ def global_latlon_map(adfobj):
#Determine dimensions of variable:
has_dims = pf.validate_dims(mdata, ["lat", "lon", "lev"])
if (not has_dims['has_lat']) or (not has_dims['has_lon']):
print(f"\t = skipping global map for {var} for case {case_name} as it does not have both lat and lon")
print(f"\t WARNING: skipping global map for {var} for case {case_name} as it does not have both lat and lon")
continue
else: # i.e., has lat&lon
if (has_dims['has_lev']) and (not pres_levs):
print(f"\t - skipping global map for {var} as it has more than lev dimension, but no pressure levels were provided")
print(f"\t WARNING: skipping global map for {var} as it has more than lev dimension, but no pressure levels were provided")
continue

# Check output file. If file does not exist, proceed.
Expand All @@ -238,7 +238,7 @@ def global_latlon_map(adfobj):
plot_name = plot_loc / f"{var}_{pres}hpa_{s}_LatLon_Mean.{plot_type}"
doplot[plot_name] = plot_file_op(adfobj, plot_name, f"{var}_{pres}hpa", case_name, s, web_category, redo_plot, "LatLon")
if all(value is None for value in doplot.values()):
print(f"All plots exist for {var}. Redo is {redo_plot}. Existing plots added to website data. Continue.")
print(f"\t INFO: All plots exist for {var}. Redo is {redo_plot}. Existing plots added to website data. Continue.")
continue

#Create new dictionaries:
Expand Down Expand Up @@ -289,7 +289,7 @@ def global_latlon_map(adfobj):
#have been interpolated to the standard reference
#pressure levels:
if (not (pres in mdata['lev'])) or (not (pres in odata['lev'])):
print(f"plot_press_levels value '{pres}' not present in {var} [test: {(pres in mdata['lev'])}, ref: {pres in odata['lev']}], so skipping.")
print(f"\t WARNING: plot_press_levels value '{pres}' not present in {var} [test: {(pres in mdata['lev'])}, ref: {pres in odata['lev']}], so skipping.")
continue

#Loop over seasons:
Expand Down Expand Up @@ -444,7 +444,7 @@ def aod_latlon(adfobj):
file_mod08_m3 = os.path.join(obs_dir, 'MOD08_M3_192x288_AOD_2001-2020_climo.nc')

if (not Path(file_merra2).is_file()) or (not Path(file_mod08_m3).is_file()):
print("\t ** AOD Panel plots not made, missing MERRA2 and/or MODIS file")
print("\t WARNING: AOD Panel plots not made, missing MERRA2 and/or MODIS file")
return

ds_merra2 = xr.open_dataset(file_merra2)
Expand Down Expand Up @@ -480,7 +480,7 @@ def aod_latlon(adfobj):

#Skip this variable/case if the climo file doesn't exist:
if ds_case is None:
dmsg = f"No test climo file for {case} for variable `{var}`, global lat/lon plots skipped."
dmsg = f"\t WARNING: No test climo file for {case} for variable `{var}`, global lat/lon plots skipped."
adfobj.debug_log(dmsg)
continue
else:
Expand All @@ -498,7 +498,7 @@ def aod_latlon(adfobj):
case_lat = True
else:
err_msg = "AOD 4-panel plot:\n"
err_msg += f"\t The lat values don't match between obs and '{case}'\n"
err_msg += f"\t WARNING: The lat values don't match between obs and '{case}'\n"
err_msg += f"\t - {case} lat shape: {case_lat_shape} and "
err_msg += f"obs lat shape: {obs_lat_shape}"
adfobj.debug_log(err_msg)
Expand All @@ -510,7 +510,7 @@ def aod_latlon(adfobj):
case_lon = True
else:
err_msg = "AOD 4-panel plot:\n"
err_msg += f"\t The lon values don't match between obs and '{case}'\n"
err_msg += f"\t WARNING: The lon values don't match between obs and '{case}'\n"
err_msg += f"\t - {case} lon shape: {case_lon_shape} and "
err_msg += f"obs lon shape: {obs_lon_shape}"
adfobj.debug_log(err_msg)
Expand Down Expand Up @@ -547,7 +547,7 @@ def aod_latlon(adfobj):
# Gather reference variable data
ds_base = adfobj.data.load_reference_climo_da(base_name, var)
if ds_base is None:
dmsg = f"No baseline climo file for {base_name} for variable `{var}`, global lat/lon plots skipped."
dmsg = f"\t WARNING: No baseline climo file for {base_name} for variable `{var}`, global lat/lon plots skipped."
adfobj.debug_log(dmsg)
else:
# Round lat/lons so they match obs
Expand All @@ -564,7 +564,7 @@ def aod_latlon(adfobj):
base_lat = True
else:
err_msg = "AOD 4-panel plot:\n"
err_msg += f"\t The lat values don't match between obs and '{base_name}'\n"
err_msg += f"\t WARNING: The lat values don't match between obs and '{base_name}'\n"
err_msg += f"\t - {base_name} lat shape: {base_lat_shape} and "
err_msg += f"obs lat shape: {obs_lat_shape}"
adfobj.debug_log(err_msg)
Expand All @@ -576,7 +576,7 @@ def aod_latlon(adfobj):
base_lon = True
else:
err_msg = "AOD 4-panel plot:\n"
err_msg += f"\t The lon values don't match between obs and '{base_name}'\n"
err_msg += f"\t WARNING: The lon values don't match between obs and '{base_name}'\n"
err_msg += f"\t - {base_name} lon shape: {base_lon_shape} and "
err_msg += f"obs lon shape: {obs_lon_shape}"
adfobj.debug_log(err_msg)
Expand Down Expand Up @@ -792,7 +792,7 @@ def aod_panel_latlon(adfobj, plot_titles, plot_params, data, season, obs_name, c
if field.ndim > 2:
print(f"Required 2d lat/lon coordinates, got {field.ndim}d")
emg = "AOD panel plot:\n"
emg += f"\t Too many dimensions for {case_name}. Needs 2 (lat/lon) but got {field.ndim}"
emg += f"\t WARNING: Too many dimensions for {case_name}. Needs 2 (lat/lon) but got {field.ndim}"
adfobj.debug_log(emg)
print(f"{emg} ")
return
Expand Down
6 changes: 3 additions & 3 deletions scripts/plotting/global_latlon_vect_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,7 @@ def global_latlon_vect_map(adfobj):
# check if there is a lat dimension:
if not has_lat_ref:
print(
f"Variable named {var} is missing a lat dimension for '{base_name}', cannot continue to plot."
f"\t - WARNING: Variable '{var}' is missing a lat dimension for '{base_name}', cannot continue to plot."
)
continue
# End if
Expand All @@ -289,7 +289,7 @@ def global_latlon_vect_map(adfobj):

#Check if plot output directory exists, and if not, then create it:
if not plot_loc.is_dir():
print("\t {} not found, making new directory".format(plot_loc))
print(f"\t {plot_loc} not found, making new directory")
plot_loc.mkdir(parents=True)
#End if

Expand Down Expand Up @@ -336,7 +336,7 @@ def global_latlon_vect_map(adfobj):
# check if there is a lat dimension:
if not has_lat:
print(
f"Variable named {var} is missing a lat dimension for '{case_name}', cannot continue to plot."
f"\t - {var} is missing a lat dimension for '{case_name}', cannot continue to plot."
)
continue
# End if
Expand Down
14 changes: 7 additions & 7 deletions scripts/plotting/global_mean_timeseries.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def global_mean_timeseries(adfobj):
# Check to see if this field is available
if ref_ts_da is None:
print(
f"\t Variable named {field} for case '{base_name}' provides Nonetype. Skipping this variable"
f"\t WARNING: Variable {field} for case '{base_name}' provides Nonetype. Skipping this variable"
)
else:
# check data dimensions:
Expand All @@ -85,15 +85,15 @@ def global_mean_timeseries(adfobj):
# check if this is a "2-d" varaible:
if has_lev_ref:
print(
f"\t Variable named {field} has a lev dimension for '{base_name}', which does not work with this script."
f"\t WARNING: Variable {field} has a lev dimension for '{base_name}', which does not work with this script."
)
continue
# End if

# check if there is a lat dimension:
if not has_lat_ref:
print(
f"\t Variable named {field} is missing a lat dimension for '{base_name}', cannot continue to plot."
f"\t WARNING: Variable {field} is missing a lat dimension for '{base_name}', cannot continue to plot."
)
continue
# End if
Expand Down Expand Up @@ -129,15 +129,15 @@ def global_mean_timeseries(adfobj):
field
) # Provides access to LENS2 dataset when available (class defined below)
else:
print(f"\t ** Some model years for '{field}' are outside LENS years, will skip plotting LENS data for clarity")
print(f"\t ** Some model years for {field} are outside LENS years, will skip plotting LENS data for clarity")
lens2_data = None
# End if - LENS

c_ts_da = adfobj.data.load_timeseries_da(case_name, field)

if c_ts_da is None:
print(
f"\t Variable named {field} for case '{case_name}' provides Nonetype. Skipping this variable"
f"\t WARNING: Variable {field} for case '{case_name}' provides Nonetype. Skipping this variable"
)
skip_var = True
continue
Expand All @@ -155,7 +155,7 @@ def global_mean_timeseries(adfobj):
# If 3-d variable, notify user, flag and move to next test case
if has_lev_case:
print(
f"\t Variable named {field} has a lev dimension for '{case_name}', which does not work with this script."
f"\t WARNING: Variable {field} has a lev dimension for '{case_name}', which does not work with this script."
)

skip_var = True
Expand All @@ -165,7 +165,7 @@ def global_mean_timeseries(adfobj):
# check if there is a lat dimension:
if not has_lat_case:
print(
f"\t Variable named {field} is missing a lat dimension for '{case_name}', cannot continue to plot."
f"\t WARNING: Variable {field} is missing a lat dimension for '{case_name}', cannot continue to plot."
)
continue
# End if
Expand Down
4 changes: 2 additions & 2 deletions scripts/plotting/polar_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -304,7 +304,7 @@ def polar_map(adfobj):
# check if there is a lat dimension:
if not has_lat:
print(
f"Variable named {var} is missing a lat dimension for '{case_name}', cannot continue to plot."
f"\t - WARNING: Variable {var} is missing a lat dimension for '{case_name}', cannot continue to plot."
)
continue
# End if
Expand All @@ -319,7 +319,7 @@ def polar_map(adfobj):
# check if there is a lat dimension:
if not has_lat_ref:
print(
f"Variable named {var} is missing a lat dimension for '{data_name}', cannot continue to plot."
f"\t - WARNING: Variable {var} is missing a lat dimension for '{data_name}', cannot continue to plot."
)
continue

Expand Down
Loading

0 comments on commit 6da97a2

Please sign in to comment.