diff --git a/cchecker.py b/cchecker.py index 72d022df..fd3ef88b 100755 --- a/cchecker.py +++ b/cchecker.py @@ -253,8 +253,8 @@ def main(): for checker_name in sorted(checker_names): if checker_name not in check_suite.checkers: print( - "Cannot find checker '{}' with which to " - "describe checks".format(checker_name), + f"Cannot find checker '{checker_name}' with which to " + "describe checks", file=sys.stderr, ) error_stat = 1 diff --git a/compliance_checker/acdd.py b/compliance_checker/acdd.py index 6ea610ac..9b0abdc7 100644 --- a/compliance_checker/acdd.py +++ b/compliance_checker/acdd.py @@ -249,8 +249,8 @@ def check_lat_extents(self, ds): False, "geospatial_lat_extents_match", [ - "Could not convert one of geospatial_lat_min ({}) or max ({}) to float see CF-1.6 spec chapter 4.1" - "".format(ds.geospatial_lat_min, ds.geospatial_lat_max), + f"Could not convert one of geospatial_lat_min ({ds.geospatial_lat_min}) or max ({ds.geospatial_lat_max}) to float see CF-1.6 spec chapter 4.1" + "", ], ) @@ -347,8 +347,8 @@ def check_lon_extents(self, ds): False, "geospatial_lon_extents_match", [ - "Could not convert one of geospatial_lon_min ({}) or max ({}) to float see CF-1.6 spec chapter 4.1" - "".format(ds.geospatial_lon_min, ds.geospatial_lon_max), + f"Could not convert one of geospatial_lon_min ({ds.geospatial_lon_min}) or max ({ds.geospatial_lon_max}) to float see CF-1.6 spec chapter 4.1" + "", ], ) @@ -440,7 +440,7 @@ def verify_geospatial_bounds(self, ds): [ ( "Could not parse WKT from geospatial_bounds," - ' possible bad value: "{}"'.format(ds.geospatial_bounds) + f' possible bad value: "{ds.geospatial_bounds}"' ), ], variable_name="geospatial_bounds", @@ -637,19 +637,13 @@ def check_time_extents(self, ds): if start_dt > timedelta(hours=1): msgs.append( "Date time mismatch between time_coverage_start and actual " - "time values {} (time_coverage_start) != {} (time[0])".format( - t_min.isoformat(), - time0.isoformat(), - ), + f"time values {t_min.isoformat()} (time_coverage_start) != {time0.isoformat()} (time[0])", ) score -= 1 if end_dt > timedelta(hours=1): msgs.append( "Date time mismatch between time_coverage_end and actual " - "time values {} (time_coverage_end) != {} (time[N])".format( - t_max.isoformat(), - time1.isoformat(), - ), + f"time values {t_max.isoformat()} (time_coverage_end) != {time1.isoformat()} (time[N])", ) score -= 1 diff --git a/compliance_checker/cf/cf_1_6.py b/compliance_checker/cf/cf_1_6.py index 2cc5a74d..63b00337 100644 --- a/compliance_checker/cf/cf_1_6.py +++ b/compliance_checker/cf/cf_1_6.py @@ -238,8 +238,8 @@ def check_naming_conventions(self, ds): for name, variable in ds.variables.items(): variable_naming.assert_true( rname.match(name) is not None, - "variable {} should begin with a letter and be composed of " - "letters, digits, and underscores".format(name), + f"variable {name} should begin with a letter and be composed of " + "letters, digits, and underscores", ) # Keep track of all the attributes, we'll need to check them @@ -254,8 +254,8 @@ def check_naming_conventions(self, ds): continue attribute_naming.assert_true( rname.match(attr) is not None, - "attribute {}:{} should begin with a letter and be composed of " - "letters, digits, and underscores".format(name, attr), + f"attribute {name}:{attr} should begin with a letter and be composed of " + "letters, digits, and underscores", ) ret_val.append(variable_naming.to_result()) @@ -263,8 +263,8 @@ def check_naming_conventions(self, ds): for dimension in ds.dimensions: dimension_naming.assert_true( rname.match(dimension) is not None, - "dimension {} should begin with a latter and be composed of " - "letters, digits, and underscores".format(dimension), + f"dimension {dimension} should begin with a latter and be composed of " + "letters, digits, and underscores", ) ret_val.append(dimension_naming.to_result()) @@ -276,8 +276,8 @@ def check_naming_conventions(self, ds): continue attribute_naming.assert_true( rname.match(global_attr) is not None, - "global attribute {} should begin with a letter and be composed of " - "letters, digits, and underscores".format(global_attr), + f"global attribute {global_attr} should begin with a letter and be composed of " + "letters, digits, and underscores", ) ret_val.append(attribute_naming.to_result()) @@ -449,10 +449,8 @@ def check_valid_range_or_valid_min_max_present(self, ds): total = total + 1 fails.append( - "For the variable {} the valid_range attribute must not be present " - "if the valid_min and/or valid_max attributes are present".format( - variable.name, - ), + f"For the variable {variable.name} the valid_range attribute must not be present " + "if the valid_min and/or valid_max attributes are present", ) return Result( @@ -526,8 +524,8 @@ def check_fill_value_outside_valid_range(self, ds): valid_fill_range.assert_true( valid, - "{}:_FillValue ({}) should be outside the range specified by {} ({}, {})" - "".format(name, fill_value, spec_by, rmin, rmax), + f"{name}:_FillValue ({fill_value}) should be outside the range specified by {spec_by} ({rmin}, {rmax})" + "", ) return valid_fill_range.to_result() @@ -552,8 +550,8 @@ def check_convention_globals(self, ds): is_string = isinstance(dataset_attr, str) valid_globals.assert_true( is_string and len(dataset_attr), - "§2.6.2 global attribute {} should exist and be a non-empty string" # subsection message - "".format(attr), + f"§2.6.2 global attribute {attr} should exist and be a non-empty string" # subsection message + "", ) return valid_globals.to_result() @@ -606,8 +604,7 @@ def check_convention_possibly_var_attrs(self, ds): is_string = isinstance(varattr, str) valid_attributes.assert_true( is_string and len(varattr) > 0, - "§2.6.2 {}:{} should be a non-empty string" - "".format(name, attribute), + f"§2.6.2 {name}:{attribute} should be a non-empty string" "", ) attr_bin.add(attribute) @@ -618,8 +615,8 @@ def check_convention_possibly_var_attrs(self, ds): is_string = isinstance(dsattr, str) valid_attributes.assert_true( is_string and len(dsattr) > 0, - "§2.6.2 {} global attribute should be a non-empty string" - "".format(attribute), + f"§2.6.2 {attribute} global attribute should be a non-empty string" + "", ) attr_bin.add(attribute) return valid_attributes.to_result() @@ -926,7 +923,7 @@ def _check_valid_standard_units(self, ds, variable_name): valid_standard_units.assert_true( util.units_convertible(units, "seconds since 1970-01-01"), "time must be in a valid units format since " - "not {}".format(units), + f"not {units}", ) # UDunits can't tell the difference between east and north facing coordinates @@ -935,9 +932,9 @@ def _check_valid_standard_units(self, ds, variable_name): allowed_units = cfutil.VALID_LAT_UNITS | {"degrees"} valid_standard_units.assert_true( (units.lower() if units is not None else None) in allowed_units, - 'variables defining latitude ("{}") must use degrees_north ' + f'variables defining latitude ("{variable_name}") must use degrees_north ' "or degrees if defining a transformed grid. Currently " - "{}".format(variable_name, units), + f"{units}", ) # UDunits can't tell the difference between east and north facing coordinates elif standard_name == "longitude": @@ -945,9 +942,9 @@ def _check_valid_standard_units(self, ds, variable_name): allowed_units = cfutil.VALID_LON_UNITS | {"degrees"} valid_standard_units.assert_true( (units.lower() if units is not None else None) in allowed_units, - 'variables defining longitude ("{}") must use degrees_east ' + f'variables defining longitude ("{variable_name}") must use degrees_east ' "or degrees if defining a transformed grid. Currently " - "{}".format(variable_name, units), + f"{units}", ) return valid_standard_units.to_result() @@ -1048,11 +1045,8 @@ def check_standard_name(self, ds): valid_modifier = TestCtx(BaseCheck.HIGH, self.section_titles["3.3"]) valid_modifier.assert_true( standard_name_modifier in valid_modifiers, - 'Standard name modifier "{}" for variable {} is not a valid modifier ' - "according to CF Appendix C".format( - standard_name_modifier, - name, - ), + f'Standard name modifier "{standard_name_modifier}" for variable {name} is not a valid modifier ' + "according to CF Appendix C", ) ret_val.append(valid_modifier.to_result()) @@ -1095,8 +1089,7 @@ def check_ancillary_variables(self, ds): valid_ancillary.assert_true( isinstance(ancillary_variables, str), - "ancillary_variables attribute defined by {} " - "should be string".format(name), + f"ancillary_variables attribute defined by {name} " "should be string", ) # Can't perform the second check if it's not a string @@ -1233,8 +1226,8 @@ def _check_flag_values(self, ds, name): # the data type for flag_values should be the same as the variable valid_values.assert_true( variable.dtype.type == flag_values.dtype.type, - "flag_values ({}) must be the same data type as {} ({})" - "".format(flag_values.dtype.type, name, variable.dtype.type), + f"flag_values ({flag_values.dtype.type}) must be the same data type as {name} ({variable.dtype.type})" + "", ) # IMPLEMENTATION CONFORMANCE 3.5 REQUIRED 4/8 @@ -1270,8 +1263,8 @@ def _check_flag_masks(self, ds, name): valid_masks.assert_true( variable.dtype.type == flag_masks.dtype.type, - "flag_masks ({}) must be the same data type as {} ({})" - "".format(flag_masks.dtype.type, name, variable.dtype.type), + f"flag_masks ({flag_masks.dtype.type}) must be the same data type as {name} ({variable.dtype.type})" + "", ) type_ok = ( @@ -1497,14 +1490,14 @@ def check_latitude(self, ds): # but are convertible to angular units allowed_units.assert_true( units not in e_n_units and Unit(units) == Unit("degree"), - "Grid latitude variable '{}' should use degree equivalent units without east or north components. " - "Current units are {}".format(latitude, units), + f"Grid latitude variable '{latitude}' should use degree equivalent units without east or north components. " + f"Current units are {units}", ) else: allowed_units.assert_true( units_is_string and units.lower() in allowed_lat_units, - "latitude variable '{}' should define valid units for latitude" - "".format(latitude), + f"latitude variable '{latitude}' should define valid units for latitude" + "", ) ret_val.append(allowed_units.to_result()) @@ -1513,8 +1506,8 @@ def check_latitude(self, ds): # This is only a recommendation and we won't penalize but we # will include a recommended action. msg = ( - "CF recommends latitude variable '{}' to use units degrees_north" - "".format(latitude) + f"CF recommends latitude variable '{latitude}' to use units degrees_north" + "" ) recommended_units = Result( BaseCheck.LOW, @@ -1529,8 +1522,8 @@ def check_latitude(self, ds): definition = TestCtx(BaseCheck.MEDIUM, self.section_titles["4.1"]) definition.assert_true( standard_name == "latitude" or axis == "Y" or y_variables != [], - "latitude variable '{}' should define standard_name='latitude' or axis='Y'" - "".format(latitude), + f"latitude variable '{latitude}' should define standard_name='latitude' or axis='Y'" + "", ) ret_val.append(definition.to_result()) @@ -1606,14 +1599,14 @@ def check_longitude(self, ds): # but are convertible to angular units allowed_units.assert_true( units not in e_n_units and Unit(units) == Unit("degree"), - "Grid longitude variable '{}' should use degree equivalent units without east or north components. " - "Current units are {}".format(longitude, units), + f"Grid longitude variable '{longitude}' should use degree equivalent units without east or north components. " + f"Current units are {units}", ) else: allowed_units.assert_true( units_is_string and units.lower() in allowed_lon_units, - "longitude variable '{}' should define valid units for longitude" - "".format(longitude), + f"longitude variable '{longitude}' should define valid units for longitude" + "", ) ret_val.append(allowed_units.to_result()) @@ -1622,8 +1615,8 @@ def check_longitude(self, ds): # This is only a recommendation and we won't penalize but we # will include a recommended action. msg = ( - "CF recommends longitude variable '{}' to use units degrees_east" - "".format(longitude) + f"CF recommends longitude variable '{longitude}' to use units degrees_east" + "" ) recommended_units = Result( BaseCheck.LOW, @@ -1638,8 +1631,8 @@ def check_longitude(self, ds): definition = TestCtx(BaseCheck.MEDIUM, self.section_titles["4.2"]) definition.assert_true( standard_name == "longitude" or axis == "X" or x_variables != [], - "longitude variable '{}' should define standard_name='longitude' or axis='X'" - "".format(longitude), + f"longitude variable '{longitude}' should define standard_name='longitude' or axis='X'" + "", ) ret_val.append(definition.to_result()) @@ -1693,15 +1686,15 @@ def check_dimensional_vertical_coordinate( valid_vertical_coord = TestCtx(BaseCheck.HIGH, self.section_titles["4.3"]) valid_vertical_coord.assert_true( isinstance(units, str) and units, - "§4.3.1 {}'s units must be defined for vertical coordinates, " - "there is no default".format(name), + f"§4.3.1 {name}'s units must be defined for vertical coordinates, " + "there is no default", ) if not util.units_convertible("bar", units): valid_vertical_coord.assert_true( positive in ("up", "down"), - "{}: vertical coordinates not defining pressure must include " - "a positive attribute that is either 'up' or 'down'".format(name), + f"{name}: vertical coordinates not defining pressure must include " + "a positive attribute that is either 'up' or 'down'", ) # _check_valid_standard_units, part of the Chapter 3 checks, @@ -1740,8 +1733,7 @@ def _check_dimensionless_vertical_coordinate_1_6( is_not_deprecated.assert_true( units not in deprecated_units, - "§4.3.2: units are deprecated by CF in variable {}: {}" - "".format(vname, units), + f"§4.3.2: units are deprecated by CF in variable {vname}: {units}" "", ) # check the vertical coordinates @@ -2144,9 +2136,9 @@ def check_aux_coordinates(self, ds): for aux_coord in coordinates.split(): valid_aux_coords.assert_true( aux_coord in ds.variables, - "{}'s auxiliary coordinate specified by the coordinates attribute, {}, " + f"{name}'s auxiliary coordinate specified by the coordinates attribute, {aux_coord}, " "is not a variable in this dataset" - "".format(name, aux_coord), + "", ) if aux_coord not in ds.variables: continue @@ -2246,7 +2238,7 @@ def check_multi_dimensional_coords(self, ds): not_matching.assert_true( coord not in variable.dimensions, - "{} shares the same name as one of its dimensions" "".format(coord), + f"{coord} shares the same name as one of its dimensions" "", ) ret_val.append(not_matching.to_result()) @@ -2376,8 +2368,7 @@ def check_reduced_horizontal_grid(self, ds): # Make sure reduced grid features define coordinates valid_rgrid.assert_true( isinstance(coords, str) and coords, - "reduced grid feature {} must define coordinates attribute" - "".format(name), + f"reduced grid feature {name} must define coordinates attribute" "", ) # We can't check anything else if there are no defined coordinates if not isinstance(coords, str) and coords: @@ -2405,16 +2396,16 @@ def check_reduced_horizontal_grid(self, ds): compress = getattr(coord, "compress", None) valid_rgrid.assert_true( isinstance(compress, str) and compress, - "compress attribute for compression coordinate {} must be a non-empty string" - "".format(compressed_coord), + f"compress attribute for compression coordinate {compressed_coord} must be a non-empty string" + "", ) if not isinstance(compress, str): continue for dim in compress.split(): valid_rgrid.assert_true( dim in ds.dimensions, - "dimension {} referenced by {}:compress must exist" - "".format(dim, compressed_coord), + f"dimension {dim} referenced by {compressed_coord}:compress must exist" + "", ) ret_val.append(valid_rgrid.to_result()) @@ -2754,21 +2745,15 @@ def check_cell_boundaries(self, ds): if boundary_variable.ndim != variable.ndim + 1: valid = False reasoning.append( - "The number of dimensions of the variable {} is {}, but the " - "number of dimensions of the boundary variable {} is {}. The boundary variable " - "should have {} dimensions".format( - variable.name, - variable.ndim, - boundary_variable.name, - boundary_variable.ndim, - variable.ndim + 1, - ), + f"The number of dimensions of the variable {variable.name} is {variable.ndim}, but the " + f"number of dimensions of the boundary variable {boundary_variable.name} is {boundary_variable.ndim}. The boundary variable " + f"should have {variable.ndim + 1} dimensions", ) if variable.dimensions[:] != boundary_variable.dimensions[: variable.ndim]: valid = False reasoning.append( - "Boundary variable coordinates (for {}) are in improper order: {}. Bounds-specific dimensions should be last" - "".format(variable.name, boundary_variable.dimensions), + f"Boundary variable coordinates (for {variable.name}) are in improper order: {boundary_variable.dimensions}. Bounds-specific dimensions should be last" + "", ) # ensure p vertices form a valid simplex given previous a...n @@ -2805,11 +2790,11 @@ def _cell_measures_core(self, ds, var, external_set, variable_template): if not search_res: valid = False reasoning.append( - "The cell_measures attribute for variable {} " + f"The cell_measures attribute for variable {var.name} " "is formatted incorrectly. It should take the " "form of either 'area: cell_var' or " "'volume: cell_var' where cell_var is an existing name of " - "a variable describing the cell measures.".format(var.name), + "a variable describing the cell measures.", ) else: valid = True @@ -2842,8 +2827,8 @@ def _cell_measures_core(self, ds, var, external_set, variable_template): if not hasattr(cell_measure_var, "units"): valid = False reasoning.append( - "Cell measure variable {} is required " - "to have units attribute defined".format(cell_measure_var_name), + f"Cell measure variable {cell_measure_var_name} is required " + "to have units attribute defined", ) else: # IMPLEMENTATION CONFORMANCE REQUIRED 2/2 @@ -2870,12 +2855,9 @@ def _cell_measures_core(self, ds, var, external_set, variable_template): if not set(cell_measure_var.dimensions).issubset(var.dimensions): valid = False reasoning.append( - "Cell measure variable {} must have " + f"Cell measure variable {cell_measure_var_name} must have " "dimensions which are a subset of " - "those defined in variable {}.".format( - cell_measure_var_name, - var.name, - ), + f"those defined in variable {var.name}.", ) return Result(BaseCheck.MEDIUM, valid, (self.section_titles["7.2"]), reasoning) @@ -2954,8 +2936,8 @@ def check_cell_methods(self, ds): ) # changed from 7.1 to 7.3 valid_attribute.assert_true( regex.match(psep, method) is not None, - '"{}" is not a valid format for cell_methods attribute of "{}"' - "".format(method, var.name), + f'"{method}" is not a valid format for cell_methods attribute of "{var.name}"' + "", ) ret_val.append(valid_attribute.to_result()) @@ -2979,8 +2961,8 @@ def check_cell_methods(self, ds): valid_cell_names.assert_true( valid, - "{}'s cell_methods name component {} does not match a dimension, " - "area or auxiliary coordinate".format(var.name, var_str), + f"{var.name}'s cell_methods name component {var_str} does not match a dimension, " + "area or auxiliary coordinate", ) ret_val.append(valid_cell_names.to_result()) @@ -3284,9 +3266,9 @@ def check_climatological_statistics(self, ds): "|".join(methods), ) # "or" comparison for the methods re_string = ( - r"^time: {0} within (years|days)" # regex string to test - r" time: {0} over \1(?<=days)(?: time: {0} over years)?" - r"(?: \([^)]+\))?$".format(meth_regex) + rf"^time: {meth_regex} within (years|days)" # regex string to test + rf" time: {meth_regex} over \1(?<=days)(?: time: {meth_regex} over years)?" + r"(?: \([^)]+\))?$" ) # find any variables with a valid climatological cell_methods @@ -3526,10 +3508,8 @@ def check_compression_gathering(self, ds): if coord_size not in range(0, upper_limit_size): valid = False reasoning.append( - "The dimenssion size {} referenced by the compress attribute is not " - "in the range (0, The product of the compressed dimension sizes minus 1)".format( - coord_size, - ), + f"The dimenssion size {coord_size} referenced by the compress attribute is not " + "in the range (0, The product of the compressed dimension sizes minus 1)", ) result = Result( BaseCheck.MEDIUM, @@ -3645,8 +3625,8 @@ def check_variable_features(self, ds): matching_feature = TestCtx(BaseCheck.MEDIUM, self.section_titles["9.1"]) matching_feature.assert_true( variable_feature.lower() == _feature, - "{} is not a {}, it is detected as a {}" - "".format(name, _feature, variable_feature), + f"{name} is not a {_feature}, it is detected as a {variable_feature}" + "", ) ret_val.append(matching_feature.to_result()) @@ -3661,8 +3641,8 @@ def check_variable_features(self, ds): all_same_features = TestCtx(BaseCheck.HIGH, self.section_titles["9.1"]) all_same_features.assert_true( len(feature_types_found) < 2, - "Different feature types discovered in this dataset: {}" - "".format(feature_description), + f"Different feature types discovered in this dataset: {feature_description}" + "", ) ret_val.append(all_same_features.to_result()) @@ -3696,8 +3676,8 @@ def _check_hint_bounds(self, ds): for name in ds.variables: if name.endswith("_bounds") and name not in boundary_variables: msg = ( - "{} might be a cell boundary variable but there are no variables that define it " - "as a boundary using the `bounds` attribute.".format(name) + f"{name} might be a cell boundary variable but there are no variables that define it " + "as a boundary using the `bounds` attribute." ) result = Result(BaseCheck.LOW, True, self.section_titles["7.1"], [msg]) ret_val.append(result) diff --git a/compliance_checker/cf/cf_1_7.py b/compliance_checker/cf/cf_1_7.py index 14d6430c..e47ca240 100644 --- a/compliance_checker/cf/cf_1_7.py +++ b/compliance_checker/cf/cf_1_7.py @@ -263,21 +263,15 @@ def check_cell_boundaries(self, ds): if boundary_variable.ndim != variable.ndim + 1: valid = False reasoning.append( - "The number of dimensions of the variable {} is {}, but the " - "number of dimensions of the boundary variable {} is {}. The boundary variable " - "should have {} dimensions".format( - variable.name, - variable.ndim, - boundary_variable.name, - boundary_variable.ndim, - variable.ndim + 1, - ), + f"The number of dimensions of the variable {variable.name} is {variable.ndim}, but the " + f"number of dimensions of the boundary variable {boundary_variable.name} is {boundary_variable.ndim}. The boundary variable " + f"should have {variable.ndim + 1} dimensions", ) if variable.dimensions[:] != boundary_variable.dimensions[: variable.ndim]: valid = False reasoning.append( - "Boundary variable coordinates (for {}) are in improper order: {}. Bounds-specific dimensions should be last" - "".format(variable.name, boundary_variable.dimensions), + f"Boundary variable coordinates (for {variable.name}) are in improper order: {boundary_variable.dimensions}. Bounds-specific dimensions should be last" + "", ) # 7.1 Required 2/5: continue @@ -315,15 +309,9 @@ def check_cell_boundaries(self, ds): if getattr(variable, item) != getattr(boundary_variable, item): valid = False reasoning.append( - "'{}' has attr '{}' with value '{}' that does not agree " - "with its associated variable ('{}')'s attr value '{}'" - "".format( - boundary_variable_name, - item, - getattr(boundary_variable, item), - variable.name, - getattr(variable, item), - ), + f"'{boundary_variable_name}' has attr '{item}' with value '{getattr(boundary_variable, item)}' that does not agree " + f"with its associated variable ('{variable.name}')'s attr value '{getattr(variable, item)}'" + "", ) # 7.1 Required 5/5: @@ -396,14 +384,9 @@ def check_cell_boundaries_interval(self, ds): ): valid = False reasoning.append( - "The points specified by the coordinate variable {} ({})" + f"The points specified by the coordinate variable {variable_name} ({variable[ii]})" " lie outside the boundary of the cell specified by the " - "associated boundary variable {} ({})".format( - variable_name, - variable[ii], - boundary_variable_name, - boundary_variable[ii], - ), + f"associated boundary variable {boundary_variable_name} ({boundary_variable[ii]})", ) result = Result( @@ -819,7 +802,7 @@ def check_grid_mapping(self, ds): test_ctx.messages.append( "Cannot have both 'geoid_name' and " "'geopotential_datum_name' attributes in " - "grid mapping variable '{}'".format(var.name), + f"grid mapping variable '{var.name}'", ) elif len_vdatum_name_attrs == 1: # should be one or zero attrs @@ -834,20 +817,16 @@ def check_grid_mapping(self, ds): ) invalid_msg = ( - "Vertical datum value '{}' for " - "attribute '{}' in grid mapping " - "variable '{}' is not valid".format( - v_datum_value, - v_datum_attr, - var.name, - ) + f"Vertical datum value '{v_datum_value}' for " + f"attribute '{v_datum_attr}' in grid mapping " + f"variable '{var.name}' is not valid" ) test_ctx.assert_true(v_datum_str_valid, invalid_msg) except sqlite3.Error as e: # if we hit an error, skip the check warn( "Error occurred while trying to query " - "Proj4 SQLite database at {}: {}".format(proj_db_path, str(e)), + f"Proj4 SQLite database at {proj_db_path}: {str(e)}", stacklevel=2, ) prev_return[var.name] = test_ctx.to_result() diff --git a/compliance_checker/cf/cf_base.py b/compliance_checker/cf/cf_base.py index 422815cf..7fd1d978 100644 --- a/compliance_checker/cf/cf_base.py +++ b/compliance_checker/cf/cf_base.py @@ -185,8 +185,8 @@ def check_grid_mapping(self, ds): ) defines_grid_mapping.assert_true( (isinstance(grid_mapping, str) and grid_mapping), - "{}'s grid_mapping attribute must be a " - "space-separated non-empty string".format(variable.name), + f"{variable.name}'s grid_mapping attribute must be a " + "space-separated non-empty string", ) if isinstance(grid_mapping, str): # TODO (badams): refactor functionality to split functionality @@ -313,7 +313,7 @@ def check_conventions_version(self, ds): else: reasoning = [ "§2.6.1 Conventions global attribute does not contain " - '"{}"'.format(correct_version_string), + f'"{correct_version_string}"', ] else: valid = False @@ -375,8 +375,8 @@ def _check_formula_terms(self, ds, coord, dimless_coords_dict): valid_formula_terms.assert_true( isinstance(formula_terms, str) and formula_terms, - "§4.3.2: {}'s formula_terms is a required attribute and must be a non-empty string" - "".format(coord), + f"§4.3.2: {coord}'s formula_terms is a required attribute and must be a non-empty string" + "", ) # We can't check any more if not formula_terms: @@ -417,16 +417,16 @@ def _check_formula_terms(self, ds, coord, dimless_coords_dict): valid_formula_terms.assert_true( standard_name in dimless_coords_dict, - "unknown standard_name '{}' for dimensionless vertical coordinate {}" - "".format(standard_name, coord), + f"unknown standard_name '{standard_name}' for dimensionless vertical coordinate {coord}" + "", ) if standard_name not in dimless_coords_dict: return valid_formula_terms.to_result() valid_formula_terms.assert_true( no_missing_terms(standard_name, terms, dimless_coords_dict), - "{}'s formula_terms are invalid for {}, please see appendix D of CF 1.6" - "".format(coord, standard_name), + f"{coord}'s formula_terms are invalid for {standard_name}, please see appendix D of CF 1.6" + "", ) return valid_formula_terms.to_result() @@ -476,13 +476,8 @@ def _parent_var_attr_type_check(self, attr_name, var, ctx): ctx.assert_true( type_match, - "Attribute '{}' (type: {}) and parent variable '{}' (type: {}) " - "must have equivalent datatypes".format( - attr_name, - val_type, - var.name, - var.dtype.type, - ), + f"Attribute '{attr_name}' (type: {val_type}) and parent variable '{var.name}' (type: {var.dtype.type}) " + "must have equivalent datatypes", ) def _find_aux_coord_vars(self, ds, refresh=False): @@ -1070,8 +1065,8 @@ def _att_loc_msg(att_loc): test_ctx.out_of += 1 if "G" not in att_loc: test_ctx.messages.append( - '[Appendix A] Attribute "{}" should not be present in global (G) ' - "attributes. {}".format(global_att_name, valid_loc_warn), + f'[Appendix A] Attribute "{global_att_name}" should not be present in global (G) ' + f"attributes. {valid_loc_warn}", ) else: result = self._handle_dtype_check(global_att, global_att_name, att_dict) @@ -1109,13 +1104,8 @@ def _att_loc_msg(att_loc): test_ctx.out_of += 1 if coord_letter not in att_loc: test_ctx.messages.append( - '[Appendix A] Attribute "{}" should not be present in {} ' - 'variable "{}". {}'.format( - att_name, - att_loc_print_helper(coord_letter), - var_name, - valid_loc_warn, - ), + f'[Appendix A] Attribute "{att_name}" should not be present in {att_loc_print_helper(coord_letter)} ' + f'variable "{var_name}". {valid_loc_warn}', ) else: result = self._handle_dtype_check(att, att_name, att_dict, var) diff --git a/compliance_checker/ioos.py b/compliance_checker/ioos.py index 3264843e..733b9e91 100644 --- a/compliance_checker/ioos.py +++ b/compliance_checker/ioos.py @@ -1232,8 +1232,8 @@ def check_creator_and_publisher_type(self, ds): else: pass_stat = False messages.append( - "If specified, {} must be in value list " - "({})".format(global_att_name, sorted(expected_types)), + f"If specified, {global_att_name} must be in value list " + f"({sorted(expected_types)})", ) result_list.append( @@ -1390,14 +1390,14 @@ def check_vertical_coordinates(self, ds): valid_vertical_coord = TestCtx(BaseCheck.HIGH, "Vertical coordinates") units_set_msg = ( - "{}'s units attribute {} is not equivalent to one " - "of {}".format(name, units_str, expected_unit_strs) + f"{name}'s units attribute {units_str} is not equivalent to one " + f"of {expected_unit_strs}" ) valid_vertical_coord.assert_true(pass_stat, units_set_msg) pos_msg = ( - "{}: vertical coordinates must include a positive " - "attribute that is either 'up' or 'down'".format(name) + f"{name}: vertical coordinates must include a positive " + "attribute that is either 'up' or 'down'" ) valid_vertical_coord.assert_true(positive in ("up", "down"), pos_msg) @@ -1685,9 +1685,9 @@ def check_qartod_variables_references(self, ds): attval = getattr(v, "references", None) if attval is None: msg = ( - '"references" attribute not present for variable {}.' + f'"references" attribute not present for variable {v.name}.' "If present, it should be a valid URL." - ).format(v.name) + ) val = False else: msg = f'"references" attribute for variable "{v.name}" must be a valid URL' diff --git a/compliance_checker/suite.py b/compliance_checker/suite.py index 5db72b67..d0e84769 100644 --- a/compliance_checker/suite.py +++ b/compliance_checker/suite.py @@ -346,11 +346,8 @@ def _process_skip_checks(cls, skip_checks): check_max_level = check_lookup[split_check_spec[1]] except KeyError: warnings.warn( - "Skip specifier '{}' on check '{}' not found," - " defaulting to skip entire check".format( - split_check_spec[1], - check_name, - ), + f"Skip specifier '{split_check_spec[1]}' on check '{check_name}' not found," + " defaulting to skip entire check", stacklevel=2, ) check_max_level = BaseCheck.HIGH diff --git a/compliance_checker/tests/test_acdd.py b/compliance_checker/tests/test_acdd.py index 13b84bb0..9661be73 100644 --- a/compliance_checker/tests/test_acdd.py +++ b/compliance_checker/tests/test_acdd.py @@ -439,7 +439,7 @@ def test_geospatial_bounds(self): if result.variable_name == "geospatial_bounds": assert ( "Could not parse WKT from geospatial_bounds," - ' possible bad value: "{}"'.format(empty_ds.geospatial_bounds) + f' possible bad value: "{empty_ds.geospatial_bounds}"' in result.msgs )