diff --git a/cchecker.py b/cchecker.py index a234df0f..90329cdf 100755 --- a/cchecker.py +++ b/cchecker.py @@ -34,7 +34,7 @@ def parse_options(opts): try: checker_type, checker_opt = opt_str.split(":", 1) except ValueError: - warnings.warn(f"Could not split option {opt_str}, ignoring") + warnings.warn(f"Could not split option {opt_str}, ignoring", stacklevel=2) else: options_dict[checker_type].add(checker_opt) return options_dict diff --git a/compliance_checker/acdd.py b/compliance_checker/acdd.py index 4bea622a..603b4156 100644 --- a/compliance_checker/acdd.py +++ b/compliance_checker/acdd.py @@ -188,7 +188,7 @@ def check_var_units(self, ds): msgs = [] # Check units and dims for variable unit_check = hasattr(ds.variables[variable], "units") - no_dim_check = getattr(ds.variables[variable], "dimensions") == () + no_dim_check = ds.variables[variable].dimensions == () # Check if we have no dimensions. If no dims, skip test if no_dim_check: continue @@ -256,7 +256,7 @@ def check_lat_extents(self, ds): # identify lat var(s) as per CF 4.1 lat_vars = {} # var -> number of criteria passed - for name, var in ds.variables.items(): + for _name, var in ds.variables.items(): # must have units if not hasattr(var, "units"): continue @@ -354,7 +354,7 @@ def check_lon_extents(self, ds): # identify lon var(s) as per CF 4.2 lon_vars = {} # var -> number of criteria passed - for name, var in ds.variables.items(): + for _name, var in ds.variables.items(): # must have units if not hasattr(var, "units"): continue @@ -779,7 +779,7 @@ def check_metadata_link(self, ds): if not hasattr(ds, "metadata_link"): return msgs = [] - meta_link = getattr(ds, "metadata_link") + meta_link = ds.metadata_link if "http" not in meta_link: msgs.append("Metadata URL should include http:// or https://") valid_link = len(msgs) == 0 @@ -793,7 +793,7 @@ def check_id_has_no_blanks(self, ds): """ if not hasattr(ds, "id"): return - if " " in getattr(ds, "id"): + if " " in ds.id: return Result( BaseCheck.MEDIUM, False, diff --git a/compliance_checker/base.py b/compliance_checker/base.py index e4194398..34981221 100644 --- a/compliance_checker/base.py +++ b/compliance_checker/base.py @@ -549,7 +549,7 @@ def attr_check(kvp, ds, priority, ret_val, gname=None, var_name=None): # starting with "check". Avoid naming check functions # starting with check if you want to pass them in with # a tuple to avoid them being checked more than once - elif hasattr(other, "__call__"): + elif callable(other): # check that the attribute is actually present. # This reduces boilerplate in functions by not needing # to check whether the attribute is present every time @@ -629,7 +629,10 @@ def score_group(group_name=None): Please do not using scoring groups and update your plugins if necessary """ - warnings.warn("Score_group is deprecated as of Compliance Checker v3.2.") + warnings.warn( + "Score_group is deprecated as of Compliance Checker v3.2.", + stacklevel=2, + ) def _inner(func): def _dec(s, ds): diff --git a/compliance_checker/cf/cf_1_6.py b/compliance_checker/cf/cf_1_6.py index 55a95b1e..4519f2fb 100644 --- a/compliance_checker/cf/cf_1_6.py +++ b/compliance_checker/cf/cf_1_6.py @@ -126,7 +126,7 @@ def check_child_attr_data_types(self, ds): "_FillValue", } - for var_name, var in ds.variables.items(): + for _var_name, var in ds.variables.items(): for att_name in special_attrs.intersection(var.ncattrs()): self._parent_var_attr_type_check(att_name, var, ctx) return ctx.to_result() @@ -416,7 +416,7 @@ def check_fill_value_equal_missing_value(self, ds): fails = [] total = 0 - for name, variable in ds.variables.items(): + for _name, variable in ds.variables.items(): # If the variable have a defined _FillValue a defined missing_value check it. if hasattr(variable, "_FillValue") and hasattr(variable, "missing_value"): @@ -447,7 +447,7 @@ def check_valid_range_or_valid_min_max_present(self, ds): fails = [] total = 0 - for name, variable in ds.variables.items(): + for _name, variable in ds.variables.items(): if hasattr(variable, "valid_max") and ( hasattr(variable, "valid_min") or hasattr(variable, "valid_range") ): diff --git a/compliance_checker/cf/cf_1_7.py b/compliance_checker/cf/cf_1_7.py index f893a47b..fbfac740 100644 --- a/compliance_checker/cf/cf_1_7.py +++ b/compliance_checker/cf/cf_1_7.py @@ -794,7 +794,8 @@ def _evaluate_towgs84(self, val): return (True, msg) def check_grid_mapping(self, ds): - super().check_grid_mapping.__doc__ + # FIXME: Looks like this is not needed. + # super().check_grid_mapping.__doc__ prev_return = super().check_grid_mapping(ds) grid_mapping_variables = cfutil.get_grid_mapping_variables(ds) for var_name in sorted(grid_mapping_variables): @@ -876,6 +877,7 @@ def check_grid_mapping(self, ds): warn( "Error occurred while trying to query " "Proj4 SQLite database at {}: {}".format(proj_db_path, str(e)), + stacklevel=2, ) prev_return[var.name] = test_ctx.to_result() @@ -891,6 +893,7 @@ def check_standard_name_deprecated_modifiers(self, ds): if deprecated_var_names: warn( f"Deprecated standard_name modifiers found on variables {deprecated_var_names}", + stacklevel=2, ) def _process_v_datum_str(self, v_datum_str, conn): @@ -926,7 +929,8 @@ def _check_dimensionless_vertical_coordinate_1_7( formula_terms = getattr(variable, "formula_terms", None) # Skip the variable if it's dimensional correct_computed_std_name_ctx = TestCtx( - BaseCheck.MEDIUM, self.section_titles["4.3"] + BaseCheck.MEDIUM, + self.section_titles["4.3"], ) # IMPLEMENTATION CONFORMANCE 4.3.3 REQUIRED correct_computed_std_name_ctx.assert_true( diff --git a/compliance_checker/cf/cf_1_8.py b/compliance_checker/cf/cf_1_8.py index 85ccfaf7..3c1f5d79 100644 --- a/compliance_checker/cf/cf_1_8.py +++ b/compliance_checker/cf/cf_1_8.py @@ -134,7 +134,7 @@ def check_geometry(self, ds: Dataset): else: geometry_var = ds.variables[geometry_var_name] - geometry_type = getattr(geometry_var, "geometry_type") + geometry_type = geometry_var.geometry_type try: node_coord_var_names = geometry_var.node_coordinates except AttributeError: @@ -471,6 +471,7 @@ def handle_lsid(self, taxon_lsid_variable, taxon_name_variable): "'urn:lsid:marinespecies.org:taxname:' or " "'urn:lsid:itis.gov:itis_tsn:'. Assuming " "pass condition", + stacklevel=1, ) return messages @@ -597,8 +598,8 @@ def check_polygon_orientation(self, transposed_coords, interior=False): try: polygon = Polygon(transposed_coords.tolist()) - except ValueError: - raise ValueError( + except ValueError as err: + raise ValueError from err( "Polygon contains too few points to perform orientation test", ) diff --git a/compliance_checker/cf/cf_1_9.py b/compliance_checker/cf/cf_1_9.py index e26fb22a..4f05fef9 100644 --- a/compliance_checker/cf/cf_1_9.py +++ b/compliance_checker/cf/cf_1_9.py @@ -76,7 +76,8 @@ def check_time_coordinate_variable_has_calendar(self, ds): return ret_val def check_time_coordinate(self, ds): - super().check_calendar.__doc__ + # FIXME: Looks like this is not needed. + # super().check_calendar.__doc__ prev_return = super().check_time_coordinate(ds) seconds_regex = regex.compile( r"\w+ since \d{1,4}-\d{1,2}-\d{1,2}[ T]" diff --git a/compliance_checker/cf/cf_base.py b/compliance_checker/cf/cf_base.py index 2ad5ee9b..a6f6ed5d 100644 --- a/compliance_checker/cf/cf_base.py +++ b/compliance_checker/cf/cf_base.py @@ -400,20 +400,20 @@ def _check_formula_terms(self, ds, coord, dimless_coords_dict): # the regex grouping always has component names in even positions and # the corresponding variable name in odd positions. poorly_formed_formula_terms = ("Attribute formula_terms is not well-formed",) - matches = [ - match - for match in regex.finditer( - r"(\w+):\s+(\w+)(?:\s+(?!$)|$)", variable.formula_terms - ) - ] + matches = list( + regex.finditer( + r"(\w+):\s+(\w+)(?:\s+(?!$)|$)", + variable.formula_terms, + ), + ) if not matches: valid_formula_terms.add_failure(poorly_formed_formula_terms) return valid_formula_terms.to_result() - terms = set(m.group(1) for m in matches) + terms = {m.group(1) for m in matches} # get the variables named in the formula terms and check if any # are not present in the dataset - missing_vars = sorted(set(m.group(2) for m in matches) - set(ds.variables)) + missing_vars = sorted({m.group(2) for m in matches} - set(ds.variables)) missing_fmt = "The following variable(s) referenced in {}:formula_terms are not present in the dataset: {}" valid_formula_terms.assert_true( len(missing_vars) == 0, @@ -567,7 +567,7 @@ def _find_ancillary_vars(self, ds, refresh=False): # Invalidate the cache at all costs self._ancillary_vars[ds] = [] - for name, var in ds.variables.items(): + for _name, var in ds.variables.items(): if hasattr(var, "ancillary_variables"): for anc_name in var.ancillary_variables.split(" "): if anc_name in ds.variables: @@ -638,6 +638,7 @@ def _find_cf_standard_name_table(self, ds): warn( "Cannot extract CF standard name version number " "from standard_name_vocabulary string", + stacklevel=2, ) return False else: @@ -650,6 +651,7 @@ def _find_cf_standard_name_table(self, ds): "Cannot convert standard name table to lowercase. This can " "occur if a non-string standard_name_vocabulary global " "attribute is supplied", + stacklevel=2, ) return False @@ -694,6 +696,7 @@ def _find_cf_standard_name_table(self, ds): warn( f"Problem fetching standard name table:\n{e}\n" f"Using packaged v{self._std_names._version}", + stacklevel=2, ) return False diff --git a/compliance_checker/cfutil.py b/compliance_checker/cfutil.py index b43aa730..e583570f 100644 --- a/compliance_checker/cfutil.py +++ b/compliance_checker/cfutil.py @@ -67,8 +67,8 @@ def attr_membership(attr_val, value_set, attr_type=str, modifier_fn=lambda x: x) if not isinstance(attr_val, attr_type): warnings.warn( - "Attribute is of type {}, {} expected. " - "Attempting to cast to expected type.".format(type(attr_val), attr_type), + f"Attribute is of type {type(attr_val)!r}, {attr_type!r} expected. Attempting to cast to expected type.", + stacklevel=2, ) try: # if the expected type is str, try casting to unicode type @@ -79,7 +79,7 @@ def attr_membership(attr_val, value_set, attr_type=str, modifier_fn=lambda x: x) new_attr_val = attr_type(attr_val) # catch casting errors except (ValueError, UnicodeEncodeError): - warnings.warn(f"Could not cast to type {attr_type}") + warnings.warn(f"Could not cast to type {attr_type}", stacklevel=2) return False else: new_attr_val = attr_val @@ -88,8 +88,8 @@ def attr_membership(attr_val, value_set, attr_type=str, modifier_fn=lambda x: x) is_in_set = modifier_fn(new_attr_val) in value_set except Exception as e: warnings.warn( - "Could not apply modifier function {} to value: " - " {}".format(modifier_fn, e.msg), + f"Could not apply modifier function {modifier_fn} to value: {e.msg}", + stacklevel=2, ) return False @@ -172,15 +172,15 @@ def is_geophysical(ds, variable): if not isinstance(standard_name_test, str): warnings.warn( - "Variable {} has non string standard name, " - "Attempting cast to string".format(variable), + f"Variable {variable} has non string standard name, Attempting cast to string", + stacklevel=2, ) try: standard_name = str(standard_name_test) except ValueError: warnings.warn( - "Unable to cast standard name to string, excluding " - "from geophysical variables", + "Unable to cast standard name to string, excluding from geophysical variables", + stacklevel=2, ) else: standard_name = standard_name_test diff --git a/compliance_checker/protocols/netcdf.py b/compliance_checker/protocols/netcdf.py index db1230b4..415a94ec 100644 --- a/compliance_checker/protocols/netcdf.py +++ b/compliance_checker/protocols/netcdf.py @@ -84,6 +84,7 @@ def is_remote_netcdf(ds_str): except requests.exceptions.RequestException as e: warnings.warn( f"Received exception when making HEAD request to {ds_str}: {e}", + stacklevel=2, ) content_type = None else: diff --git a/compliance_checker/runner.py b/compliance_checker/runner.py index aa323746..114bcac1 100644 --- a/compliance_checker/runner.py +++ b/compliance_checker/runner.py @@ -40,7 +40,7 @@ def run_checker( skip_checks=None, include_checks=None, output_filename="-", - output_format=["text"], + output_format="text", options=None, ): """ @@ -184,7 +184,7 @@ def html_output(cls, cs, score_dict, output_filename, ds_loc, limit): """ checkers_html = [] for ds, score_groups in score_dict.items(): - for checker, (groups, errors) in score_groups.items(): + for checker, (groups, _errors) in score_groups.items(): checkers_html.append(cs.checker_html_output(checker, groups, ds, limit)) html = cs.html_output(checkers_html) diff --git a/compliance_checker/suite.py b/compliance_checker/suite.py index 08dc4610..3eb6ecad 100644 --- a/compliance_checker/suite.py +++ b/compliance_checker/suite.py @@ -172,6 +172,7 @@ def _load_checkers(cls, checkers): 'attributes. "name" attribute is deprecated. ' "Assuming checker is latest version.", DeprecationWarning, + stacklevel=2, ) # append "unknown" to version string since no versioning # info was provided @@ -350,6 +351,7 @@ def _process_skip_checks(cls, skip_checks): split_check_spec[1], check_name, ), + stacklevel=2, ) check_max_level = BaseCheck.HIGH @@ -358,7 +360,10 @@ def _process_skip_checks(cls, skip_checks): return check_dict def run(self, ds, skip_checks, *checker_names): - warnings.warn("suite.run is deprecated, use suite.run_all in calls " "instead") + warnings.warn( + "suite.run is deprecated, use suite.run_all in calls instead", + stacklevel=2, + ) return self.run_all(ds, checker_names, skip_checks=skip_checks) def run_all(self, ds, checker_names, include_checks=None, skip_checks=None): diff --git a/pyproject.toml b/pyproject.toml index 26542353..63198262 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,7 +15,7 @@ markers = [ [tool.ruff] select = [ "A", # flake8-builtins - # "B", # flake8-bugbear + "B", # flake8-bugbear "C4", # flake8-comprehensions "E", # pycodecstyle "F", # flakes @@ -39,3 +39,5 @@ ignore = [ "E402", "A001", ] +"compliance_checker/__init__.py" = ["B019"] +"compliance_checker/cfutil.py" = ["B028"]