Skip to content

Commit

Permalink
run ruff fix
Browse files Browse the repository at this point in the history
  • Loading branch information
ocefpaf committed Sep 5, 2023
1 parent d3c7976 commit ac2f83b
Show file tree
Hide file tree
Showing 8 changed files with 134 additions and 194 deletions.
4 changes: 2 additions & 2 deletions cchecker.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,8 +253,8 @@ def main():
for checker_name in sorted(checker_names):
if checker_name not in check_suite.checkers:
print(
"Cannot find checker '{}' with which to "
"describe checks".format(checker_name),
f"Cannot find checker '{checker_name}' with which to "
"describe checks",
file=sys.stderr,
)
error_stat = 1
Expand Down
20 changes: 7 additions & 13 deletions compliance_checker/acdd.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,8 +249,8 @@ def check_lat_extents(self, ds):
False,
"geospatial_lat_extents_match",
[
"Could not convert one of geospatial_lat_min ({}) or max ({}) to float see CF-1.6 spec chapter 4.1"
"".format(ds.geospatial_lat_min, ds.geospatial_lat_max),
f"Could not convert one of geospatial_lat_min ({ds.geospatial_lat_min}) or max ({ds.geospatial_lat_max}) to float see CF-1.6 spec chapter 4.1"
"",
],
)

Expand Down Expand Up @@ -347,8 +347,8 @@ def check_lon_extents(self, ds):
False,
"geospatial_lon_extents_match",
[
"Could not convert one of geospatial_lon_min ({}) or max ({}) to float see CF-1.6 spec chapter 4.1"
"".format(ds.geospatial_lon_min, ds.geospatial_lon_max),
f"Could not convert one of geospatial_lon_min ({ds.geospatial_lon_min}) or max ({ds.geospatial_lon_max}) to float see CF-1.6 spec chapter 4.1"
"",
],
)

Expand Down Expand Up @@ -440,7 +440,7 @@ def verify_geospatial_bounds(self, ds):
[
(
"Could not parse WKT from geospatial_bounds,"
' possible bad value: "{}"'.format(ds.geospatial_bounds)
f' possible bad value: "{ds.geospatial_bounds}"'
),
],
variable_name="geospatial_bounds",
Expand Down Expand Up @@ -637,19 +637,13 @@ def check_time_extents(self, ds):
if start_dt > timedelta(hours=1):
msgs.append(
"Date time mismatch between time_coverage_start and actual "
"time values {} (time_coverage_start) != {} (time[0])".format(
t_min.isoformat(),
time0.isoformat(),
),
f"time values {t_min.isoformat()} (time_coverage_start) != {time0.isoformat()} (time[0])",
)
score -= 1
if end_dt > timedelta(hours=1):
msgs.append(
"Date time mismatch between time_coverage_end and actual "
"time values {} (time_coverage_end) != {} (time[N])".format(
t_max.isoformat(),
time1.isoformat(),
),
f"time values {t_max.isoformat()} (time_coverage_end) != {time1.isoformat()} (time[N])",
)
score -= 1

Expand Down
188 changes: 84 additions & 104 deletions compliance_checker/cf/cf_1_6.py

Large diffs are not rendered by default.

51 changes: 15 additions & 36 deletions compliance_checker/cf/cf_1_7.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,21 +263,15 @@ def check_cell_boundaries(self, ds):
if boundary_variable.ndim != variable.ndim + 1:
valid = False
reasoning.append(
"The number of dimensions of the variable {} is {}, but the "
"number of dimensions of the boundary variable {} is {}. The boundary variable "
"should have {} dimensions".format(
variable.name,
variable.ndim,
boundary_variable.name,
boundary_variable.ndim,
variable.ndim + 1,
),
f"The number of dimensions of the variable {variable.name} is {variable.ndim}, but the "
f"number of dimensions of the boundary variable {boundary_variable.name} is {boundary_variable.ndim}. The boundary variable "
f"should have {variable.ndim + 1} dimensions",
)
if variable.dimensions[:] != boundary_variable.dimensions[: variable.ndim]:
valid = False
reasoning.append(
"Boundary variable coordinates (for {}) are in improper order: {}. Bounds-specific dimensions should be last"
"".format(variable.name, boundary_variable.dimensions),
f"Boundary variable coordinates (for {variable.name}) are in improper order: {boundary_variable.dimensions}. Bounds-specific dimensions should be last"
"",
)

# 7.1 Required 2/5: continue
Expand Down Expand Up @@ -315,15 +309,9 @@ def check_cell_boundaries(self, ds):
if getattr(variable, item) != getattr(boundary_variable, item):
valid = False
reasoning.append(
"'{}' has attr '{}' with value '{}' that does not agree "
"with its associated variable ('{}')'s attr value '{}'"
"".format(
boundary_variable_name,
item,
getattr(boundary_variable, item),
variable.name,
getattr(variable, item),
),
f"'{boundary_variable_name}' has attr '{item}' with value '{getattr(boundary_variable, item)}' that does not agree "
f"with its associated variable ('{variable.name}')'s attr value '{getattr(variable, item)}'"
"",
)

# 7.1 Required 5/5:
Expand Down Expand Up @@ -396,14 +384,9 @@ def check_cell_boundaries_interval(self, ds):
):
valid = False
reasoning.append(
"The points specified by the coordinate variable {} ({})"
f"The points specified by the coordinate variable {variable_name} ({variable[ii]})"
" lie outside the boundary of the cell specified by the "
"associated boundary variable {} ({})".format(
variable_name,
variable[ii],
boundary_variable_name,
boundary_variable[ii],
),
f"associated boundary variable {boundary_variable_name} ({boundary_variable[ii]})",
)

result = Result(
Expand Down Expand Up @@ -819,7 +802,7 @@ def check_grid_mapping(self, ds):
test_ctx.messages.append(
"Cannot have both 'geoid_name' and "
"'geopotential_datum_name' attributes in "
"grid mapping variable '{}'".format(var.name),
f"grid mapping variable '{var.name}'",
)
elif len_vdatum_name_attrs == 1:
# should be one or zero attrs
Expand All @@ -834,20 +817,16 @@ def check_grid_mapping(self, ds):
)

invalid_msg = (
"Vertical datum value '{}' for "
"attribute '{}' in grid mapping "
"variable '{}' is not valid".format(
v_datum_value,
v_datum_attr,
var.name,
)
f"Vertical datum value '{v_datum_value}' for "
f"attribute '{v_datum_attr}' in grid mapping "
f"variable '{var.name}' is not valid"
)
test_ctx.assert_true(v_datum_str_valid, invalid_msg)
except sqlite3.Error as e:
# if we hit an error, skip the check
warn(
"Error occurred while trying to query "
"Proj4 SQLite database at {}: {}".format(proj_db_path, str(e)),
f"Proj4 SQLite database at {proj_db_path}: {str(e)}",
stacklevel=2,
)
prev_return[var.name] = test_ctx.to_result()
Expand Down
40 changes: 15 additions & 25 deletions compliance_checker/cf/cf_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,8 +185,8 @@ def check_grid_mapping(self, ds):
)
defines_grid_mapping.assert_true(
(isinstance(grid_mapping, str) and grid_mapping),
"{}'s grid_mapping attribute must be a "
"space-separated non-empty string".format(variable.name),
f"{variable.name}'s grid_mapping attribute must be a "
"space-separated non-empty string",
)
if isinstance(grid_mapping, str):
# TODO (badams): refactor functionality to split functionality
Expand Down Expand Up @@ -313,7 +313,7 @@ def check_conventions_version(self, ds):
else:
reasoning = [
"§2.6.1 Conventions global attribute does not contain "
'"{}"'.format(correct_version_string),
f'"{correct_version_string}"',
]
else:
valid = False
Expand Down Expand Up @@ -375,8 +375,8 @@ def _check_formula_terms(self, ds, coord, dimless_coords_dict):

valid_formula_terms.assert_true(
isinstance(formula_terms, str) and formula_terms,
"§4.3.2: {}'s formula_terms is a required attribute and must be a non-empty string"
"".format(coord),
f"§4.3.2: {coord}'s formula_terms is a required attribute and must be a non-empty string"
"",
)
# We can't check any more
if not formula_terms:
Expand Down Expand Up @@ -417,16 +417,16 @@ def _check_formula_terms(self, ds, coord, dimless_coords_dict):

valid_formula_terms.assert_true(
standard_name in dimless_coords_dict,
"unknown standard_name '{}' for dimensionless vertical coordinate {}"
"".format(standard_name, coord),
f"unknown standard_name '{standard_name}' for dimensionless vertical coordinate {coord}"
"",
)
if standard_name not in dimless_coords_dict:
return valid_formula_terms.to_result()

valid_formula_terms.assert_true(
no_missing_terms(standard_name, terms, dimless_coords_dict),
"{}'s formula_terms are invalid for {}, please see appendix D of CF 1.6"
"".format(coord, standard_name),
f"{coord}'s formula_terms are invalid for {standard_name}, please see appendix D of CF 1.6"
"",
)

return valid_formula_terms.to_result()
Expand Down Expand Up @@ -476,13 +476,8 @@ def _parent_var_attr_type_check(self, attr_name, var, ctx):

ctx.assert_true(
type_match,
"Attribute '{}' (type: {}) and parent variable '{}' (type: {}) "
"must have equivalent datatypes".format(
attr_name,
val_type,
var.name,
var.dtype.type,
),
f"Attribute '{attr_name}' (type: {val_type}) and parent variable '{var.name}' (type: {var.dtype.type}) "
"must have equivalent datatypes",
)

def _find_aux_coord_vars(self, ds, refresh=False):
Expand Down Expand Up @@ -1070,8 +1065,8 @@ def _att_loc_msg(att_loc):
test_ctx.out_of += 1
if "G" not in att_loc:
test_ctx.messages.append(
'[Appendix A] Attribute "{}" should not be present in global (G) '
"attributes. {}".format(global_att_name, valid_loc_warn),
f'[Appendix A] Attribute "{global_att_name}" should not be present in global (G) '
f"attributes. {valid_loc_warn}",
)
else:
result = self._handle_dtype_check(global_att, global_att_name, att_dict)
Expand Down Expand Up @@ -1109,13 +1104,8 @@ def _att_loc_msg(att_loc):
test_ctx.out_of += 1
if coord_letter not in att_loc:
test_ctx.messages.append(
'[Appendix A] Attribute "{}" should not be present in {} '
'variable "{}". {}'.format(
att_name,
att_loc_print_helper(coord_letter),
var_name,
valid_loc_warn,
),
f'[Appendix A] Attribute "{att_name}" should not be present in {att_loc_print_helper(coord_letter)} '
f'variable "{var_name}". {valid_loc_warn}',
)
else:
result = self._handle_dtype_check(att, att_name, att_dict, var)
Expand Down
16 changes: 8 additions & 8 deletions compliance_checker/ioos.py
Original file line number Diff line number Diff line change
Expand Up @@ -1232,8 +1232,8 @@ def check_creator_and_publisher_type(self, ds):
else:
pass_stat = False
messages.append(
"If specified, {} must be in value list "
"({})".format(global_att_name, sorted(expected_types)),
f"If specified, {global_att_name} must be in value list "
f"({sorted(expected_types)})",
)

result_list.append(
Expand Down Expand Up @@ -1390,14 +1390,14 @@ def check_vertical_coordinates(self, ds):

valid_vertical_coord = TestCtx(BaseCheck.HIGH, "Vertical coordinates")
units_set_msg = (
"{}'s units attribute {} is not equivalent to one "
"of {}".format(name, units_str, expected_unit_strs)
f"{name}'s units attribute {units_str} is not equivalent to one "
f"of {expected_unit_strs}"
)
valid_vertical_coord.assert_true(pass_stat, units_set_msg)

pos_msg = (
"{}: vertical coordinates must include a positive "
"attribute that is either 'up' or 'down'".format(name)
f"{name}: vertical coordinates must include a positive "
"attribute that is either 'up' or 'down'"
)
valid_vertical_coord.assert_true(positive in ("up", "down"), pos_msg)

Expand Down Expand Up @@ -1685,9 +1685,9 @@ def check_qartod_variables_references(self, ds):
attval = getattr(v, "references", None)
if attval is None:
msg = (
'"references" attribute not present for variable {}.'
f'"references" attribute not present for variable {v.name}.'
"If present, it should be a valid URL."
).format(v.name)
)
val = False
else:
msg = f'"references" attribute for variable "{v.name}" must be a valid URL'
Expand Down
7 changes: 2 additions & 5 deletions compliance_checker/suite.py
Original file line number Diff line number Diff line change
Expand Up @@ -346,11 +346,8 @@ def _process_skip_checks(cls, skip_checks):
check_max_level = check_lookup[split_check_spec[1]]
except KeyError:
warnings.warn(
"Skip specifier '{}' on check '{}' not found,"
" defaulting to skip entire check".format(
split_check_spec[1],
check_name,
),
f"Skip specifier '{split_check_spec[1]}' on check '{check_name}' not found,"
" defaulting to skip entire check",
stacklevel=2,
)
check_max_level = BaseCheck.HIGH
Expand Down
2 changes: 1 addition & 1 deletion compliance_checker/tests/test_acdd.py
Original file line number Diff line number Diff line change
Expand Up @@ -439,7 +439,7 @@ def test_geospatial_bounds(self):
if result.variable_name == "geospatial_bounds":
assert (
"Could not parse WKT from geospatial_bounds,"
' possible bad value: "{}"'.format(empty_ds.geospatial_bounds)
f' possible bad value: "{empty_ds.geospatial_bounds}"'
in result.msgs
)

Expand Down

0 comments on commit ac2f83b

Please sign in to comment.