Skip to content

Commit

Permalink
Merge pull request #1026 from ocefpaf/pre-commit_fixes
Browse files Browse the repository at this point in the history
Enable bugbear...
  • Loading branch information
benjwadams authored May 17, 2023
2 parents 9858f48 + ea352b8 commit 07a7874
Show file tree
Hide file tree
Showing 13 changed files with 59 additions and 39 deletions.
2 changes: 1 addition & 1 deletion cchecker.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def parse_options(opts):
try:
checker_type, checker_opt = opt_str.split(":", 1)
except ValueError:
warnings.warn(f"Could not split option {opt_str}, ignoring")
warnings.warn(f"Could not split option {opt_str}, ignoring", stacklevel=2)
else:
options_dict[checker_type].add(checker_opt)
return options_dict
Expand Down
10 changes: 5 additions & 5 deletions compliance_checker/acdd.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@ def check_var_units(self, ds):
msgs = []
# Check units and dims for variable
unit_check = hasattr(ds.variables[variable], "units")
no_dim_check = getattr(ds.variables[variable], "dimensions") == ()
no_dim_check = ds.variables[variable].dimensions == ()
# Check if we have no dimensions. If no dims, skip test
if no_dim_check:
continue
Expand Down Expand Up @@ -256,7 +256,7 @@ def check_lat_extents(self, ds):

# identify lat var(s) as per CF 4.1
lat_vars = {} # var -> number of criteria passed
for name, var in ds.variables.items():
for _name, var in ds.variables.items():
# must have units
if not hasattr(var, "units"):
continue
Expand Down Expand Up @@ -354,7 +354,7 @@ def check_lon_extents(self, ds):

# identify lon var(s) as per CF 4.2
lon_vars = {} # var -> number of criteria passed
for name, var in ds.variables.items():
for _name, var in ds.variables.items():
# must have units
if not hasattr(var, "units"):
continue
Expand Down Expand Up @@ -779,7 +779,7 @@ def check_metadata_link(self, ds):
if not hasattr(ds, "metadata_link"):
return
msgs = []
meta_link = getattr(ds, "metadata_link")
meta_link = ds.metadata_link
if "http" not in meta_link:
msgs.append("Metadata URL should include http:// or https://")
valid_link = len(msgs) == 0
Expand All @@ -793,7 +793,7 @@ def check_id_has_no_blanks(self, ds):
"""
if not hasattr(ds, "id"):
return
if " " in getattr(ds, "id"):
if " " in ds.id:
return Result(
BaseCheck.MEDIUM,
False,
Expand Down
7 changes: 5 additions & 2 deletions compliance_checker/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -549,7 +549,7 @@ def attr_check(kvp, ds, priority, ret_val, gname=None, var_name=None):
# starting with "check". Avoid naming check functions
# starting with check if you want to pass them in with
# a tuple to avoid them being checked more than once
elif hasattr(other, "__call__"):
elif callable(other):
# check that the attribute is actually present.
# This reduces boilerplate in functions by not needing
# to check whether the attribute is present every time
Expand Down Expand Up @@ -629,7 +629,10 @@ def score_group(group_name=None):
Please do not using scoring groups and update your plugins
if necessary
"""
warnings.warn("Score_group is deprecated as of Compliance Checker v3.2.")
warnings.warn(
"Score_group is deprecated as of Compliance Checker v3.2.",
stacklevel=2,
)

def _inner(func):
def _dec(s, ds):
Expand Down
6 changes: 3 additions & 3 deletions compliance_checker/cf/cf_1_6.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def check_child_attr_data_types(self, ds):
"_FillValue",
}

for var_name, var in ds.variables.items():
for _var_name, var in ds.variables.items():
for att_name in special_attrs.intersection(var.ncattrs()):
self._parent_var_attr_type_check(att_name, var, ctx)
return ctx.to_result()
Expand Down Expand Up @@ -416,7 +416,7 @@ def check_fill_value_equal_missing_value(self, ds):
fails = []
total = 0

for name, variable in ds.variables.items():
for _name, variable in ds.variables.items():
# If the variable have a defined _FillValue a defined missing_value check it.

if hasattr(variable, "_FillValue") and hasattr(variable, "missing_value"):
Expand Down Expand Up @@ -447,7 +447,7 @@ def check_valid_range_or_valid_min_max_present(self, ds):
fails = []
total = 0

for name, variable in ds.variables.items():
for _name, variable in ds.variables.items():
if hasattr(variable, "valid_max") and (
hasattr(variable, "valid_min") or hasattr(variable, "valid_range")
):
Expand Down
8 changes: 6 additions & 2 deletions compliance_checker/cf/cf_1_7.py
Original file line number Diff line number Diff line change
Expand Up @@ -794,7 +794,8 @@ def _evaluate_towgs84(self, val):
return (True, msg)

def check_grid_mapping(self, ds):
super().check_grid_mapping.__doc__
# FIXME: Looks like this is not needed.
# super().check_grid_mapping.__doc__
prev_return = super().check_grid_mapping(ds)
grid_mapping_variables = cfutil.get_grid_mapping_variables(ds)
for var_name in sorted(grid_mapping_variables):
Expand Down Expand Up @@ -876,6 +877,7 @@ def check_grid_mapping(self, ds):
warn(
"Error occurred while trying to query "
"Proj4 SQLite database at {}: {}".format(proj_db_path, str(e)),
stacklevel=2,
)
prev_return[var.name] = test_ctx.to_result()

Expand All @@ -891,6 +893,7 @@ def check_standard_name_deprecated_modifiers(self, ds):
if deprecated_var_names:
warn(
f"Deprecated standard_name modifiers found on variables {deprecated_var_names}",
stacklevel=2,
)

def _process_v_datum_str(self, v_datum_str, conn):
Expand Down Expand Up @@ -926,7 +929,8 @@ def _check_dimensionless_vertical_coordinate_1_7(
formula_terms = getattr(variable, "formula_terms", None)
# Skip the variable if it's dimensional
correct_computed_std_name_ctx = TestCtx(
BaseCheck.MEDIUM, self.section_titles["4.3"]
BaseCheck.MEDIUM,
self.section_titles["4.3"],
)
# IMPLEMENTATION CONFORMANCE 4.3.3 REQUIRED
correct_computed_std_name_ctx.assert_true(
Expand Down
7 changes: 4 additions & 3 deletions compliance_checker/cf/cf_1_8.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ def check_geometry(self, ds: Dataset):
else:
geometry_var = ds.variables[geometry_var_name]

geometry_type = getattr(geometry_var, "geometry_type")
geometry_type = geometry_var.geometry_type
try:
node_coord_var_names = geometry_var.node_coordinates
except AttributeError:
Expand Down Expand Up @@ -471,6 +471,7 @@ def handle_lsid(self, taxon_lsid_variable, taxon_name_variable):
"'urn:lsid:marinespecies.org:taxname:<AphiaID>' or "
"'urn:lsid:itis.gov:itis_tsn:<TSN>'. Assuming "
"pass condition",
stacklevel=1,
)

return messages
Expand Down Expand Up @@ -597,8 +598,8 @@ def check_polygon_orientation(self, transposed_coords, interior=False):

try:
polygon = Polygon(transposed_coords.tolist())
except ValueError:
raise ValueError(
except ValueError as err:
raise ValueError from err(
"Polygon contains too few points to perform orientation test",
)

Expand Down
3 changes: 2 additions & 1 deletion compliance_checker/cf/cf_1_9.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,8 @@ def check_time_coordinate_variable_has_calendar(self, ds):
return ret_val

def check_time_coordinate(self, ds):
super().check_calendar.__doc__
# FIXME: Looks like this is not needed.
# super().check_calendar.__doc__
prev_return = super().check_time_coordinate(ds)
seconds_regex = regex.compile(
r"\w+ since \d{1,4}-\d{1,2}-\d{1,2}[ T]"
Expand Down
21 changes: 12 additions & 9 deletions compliance_checker/cf/cf_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -400,20 +400,20 @@ def _check_formula_terms(self, ds, coord, dimless_coords_dict):
# the regex grouping always has component names in even positions and
# the corresponding variable name in odd positions.
poorly_formed_formula_terms = ("Attribute formula_terms is not well-formed",)
matches = [
match
for match in regex.finditer(
r"(\w+):\s+(\w+)(?:\s+(?!$)|$)", variable.formula_terms
)
]
matches = list(
regex.finditer(
r"(\w+):\s+(\w+)(?:\s+(?!$)|$)",
variable.formula_terms,
),
)
if not matches:
valid_formula_terms.add_failure(poorly_formed_formula_terms)
return valid_formula_terms.to_result()

terms = set(m.group(1) for m in matches)
terms = {m.group(1) for m in matches}
# get the variables named in the formula terms and check if any
# are not present in the dataset
missing_vars = sorted(set(m.group(2) for m in matches) - set(ds.variables))
missing_vars = sorted({m.group(2) for m in matches} - set(ds.variables))
missing_fmt = "The following variable(s) referenced in {}:formula_terms are not present in the dataset: {}"
valid_formula_terms.assert_true(
len(missing_vars) == 0,
Expand Down Expand Up @@ -567,7 +567,7 @@ def _find_ancillary_vars(self, ds, refresh=False):
# Invalidate the cache at all costs
self._ancillary_vars[ds] = []

for name, var in ds.variables.items():
for _name, var in ds.variables.items():
if hasattr(var, "ancillary_variables"):
for anc_name in var.ancillary_variables.split(" "):
if anc_name in ds.variables:
Expand Down Expand Up @@ -638,6 +638,7 @@ def _find_cf_standard_name_table(self, ds):
warn(
"Cannot extract CF standard name version number "
"from standard_name_vocabulary string",
stacklevel=2,
)
return False
else:
Expand All @@ -650,6 +651,7 @@ def _find_cf_standard_name_table(self, ds):
"Cannot convert standard name table to lowercase. This can "
"occur if a non-string standard_name_vocabulary global "
"attribute is supplied",
stacklevel=2,
)
return False

Expand Down Expand Up @@ -694,6 +696,7 @@ def _find_cf_standard_name_table(self, ds):
warn(
f"Problem fetching standard name table:\n{e}\n"
f"Using packaged v{self._std_names._version}",
stacklevel=2,
)
return False

Expand Down
18 changes: 9 additions & 9 deletions compliance_checker/cfutil.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,8 @@ def attr_membership(attr_val, value_set, attr_type=str, modifier_fn=lambda x: x)

if not isinstance(attr_val, attr_type):
warnings.warn(
"Attribute is of type {}, {} expected. "
"Attempting to cast to expected type.".format(type(attr_val), attr_type),
f"Attribute is of type {type(attr_val)!r}, {attr_type!r} expected. Attempting to cast to expected type.",
stacklevel=2,
)
try:
# if the expected type is str, try casting to unicode type
Expand All @@ -79,7 +79,7 @@ def attr_membership(attr_val, value_set, attr_type=str, modifier_fn=lambda x: x)
new_attr_val = attr_type(attr_val)
# catch casting errors
except (ValueError, UnicodeEncodeError):
warnings.warn(f"Could not cast to type {attr_type}")
warnings.warn(f"Could not cast to type {attr_type}", stacklevel=2)
return False
else:
new_attr_val = attr_val
Expand All @@ -88,8 +88,8 @@ def attr_membership(attr_val, value_set, attr_type=str, modifier_fn=lambda x: x)
is_in_set = modifier_fn(new_attr_val) in value_set
except Exception as e:
warnings.warn(
"Could not apply modifier function {} to value: "
" {}".format(modifier_fn, e.msg),
f"Could not apply modifier function {modifier_fn} to value: {e.msg}",
stacklevel=2,
)
return False

Expand Down Expand Up @@ -172,15 +172,15 @@ def is_geophysical(ds, variable):

if not isinstance(standard_name_test, str):
warnings.warn(
"Variable {} has non string standard name, "
"Attempting cast to string".format(variable),
f"Variable {variable} has non string standard name, Attempting cast to string",
stacklevel=2,
)
try:
standard_name = str(standard_name_test)
except ValueError:
warnings.warn(
"Unable to cast standard name to string, excluding "
"from geophysical variables",
"Unable to cast standard name to string, excluding from geophysical variables",
stacklevel=2,
)
else:
standard_name = standard_name_test
Expand Down
1 change: 1 addition & 0 deletions compliance_checker/protocols/netcdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@ def is_remote_netcdf(ds_str):
except requests.exceptions.RequestException as e:
warnings.warn(
f"Received exception when making HEAD request to {ds_str}: {e}",
stacklevel=2,
)
content_type = None
else:
Expand Down
4 changes: 2 additions & 2 deletions compliance_checker/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def run_checker(
skip_checks=None,
include_checks=None,
output_filename="-",
output_format=["text"],
output_format="text",
options=None,
):
"""
Expand Down Expand Up @@ -184,7 +184,7 @@ def html_output(cls, cs, score_dict, output_filename, ds_loc, limit):
"""
checkers_html = []
for ds, score_groups in score_dict.items():
for checker, (groups, errors) in score_groups.items():
for checker, (groups, _errors) in score_groups.items():
checkers_html.append(cs.checker_html_output(checker, groups, ds, limit))

html = cs.html_output(checkers_html)
Expand Down
7 changes: 6 additions & 1 deletion compliance_checker/suite.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,7 @@ def _load_checkers(cls, checkers):
'attributes. "name" attribute is deprecated. '
"Assuming checker is latest version.",
DeprecationWarning,
stacklevel=2,
)
# append "unknown" to version string since no versioning
# info was provided
Expand Down Expand Up @@ -350,6 +351,7 @@ def _process_skip_checks(cls, skip_checks):
split_check_spec[1],
check_name,
),
stacklevel=2,
)
check_max_level = BaseCheck.HIGH

Expand All @@ -358,7 +360,10 @@ def _process_skip_checks(cls, skip_checks):
return check_dict

def run(self, ds, skip_checks, *checker_names):
warnings.warn("suite.run is deprecated, use suite.run_all in calls " "instead")
warnings.warn(
"suite.run is deprecated, use suite.run_all in calls instead",
stacklevel=2,
)
return self.run_all(ds, checker_names, skip_checks=skip_checks)

def run_all(self, ds, checker_names, include_checks=None, skip_checks=None):
Expand Down
4 changes: 3 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ markers = [
[tool.ruff]
select = [
"A", # flake8-builtins
# "B", # flake8-bugbear
"B", # flake8-bugbear
"C4", # flake8-comprehensions
"E", # pycodecstyle
"F", # flakes
Expand All @@ -39,3 +39,5 @@ ignore = [
"E402",
"A001",
]
"compliance_checker/__init__.py" = ["B019"]
"compliance_checker/cfutil.py" = ["B028"]

0 comments on commit 07a7874

Please sign in to comment.