Skip to content

Commit

Permalink
MAINT: address Ruff issues
Browse files Browse the repository at this point in the history
  • Loading branch information
redeboer committed Jul 6, 2023
1 parent 035ccc3 commit 66d6dcd
Show file tree
Hide file tree
Showing 7 changed files with 21 additions and 13 deletions.
4 changes: 2 additions & 2 deletions benchmarks/ampform.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,8 +195,8 @@ def test_fit(self, backend, benchmark, model, size):

def print_data_sample(data: DataSample, sample_size: int) -> None:
"""Print a `.DataSample`, so it can be pasted into the expected sample."""
print()
pprint(
print() # noqa: T201
pprint( # noqa: T203
{
i: np.round(four_momenta[:sample_size], decimals=11).tolist()
for i, four_momenta in data.items()
Expand Down
7 changes: 7 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -219,6 +219,7 @@ ignore = [
"D407",
"D416",
"E501",
"RUF012",
"S307",
"SIM108",
]
Expand All @@ -231,6 +232,12 @@ target-version = "py37"
task-tags = ["cspell"]

[tool.ruff.per-file-ignores]
"benchmarks/*" = [
"D",
"PLR0913",
"PLR2004",
"S101",
]
"docs/*" = [
"E402",
"INP001",
Expand Down
5 changes: 3 additions & 2 deletions src/tensorwaves/estimator.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ def create_cached_function(
backend: The computational backend to which in which to express the
input :code:`expression`.
free_parameters: Symbols in the expression that change and should not be cached.
use_cse: See :func:`.create_parametrized_function`.
Returns:
Expand Down Expand Up @@ -116,7 +117,7 @@ class ChiSquared(Estimator):
.. seealso:: :doc:`/usage/chi-squared`
"""

def __init__(
def __init__( # noqa: PLR0913
self,
function: ParametrizedFunction,
domain: DataSample,
Expand Down Expand Up @@ -183,7 +184,7 @@ class UnbinnedNLL(Estimator):
.. seealso:: :doc:`/usage/unbinned-fit`
"""

def __init__(
def __init__( # noqa: PLR0913
self,
function: ParametrizedFunction,
data: DataSample,
Expand Down
4 changes: 2 additions & 2 deletions src/tensorwaves/function/sympy/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ def _lambdify_normal_or_fast(
)


def lambdify(
def lambdify( # noqa: C901, PLR0911
expression: sp.Expr,
symbols: Sequence[sp.Symbol],
backend: str,
Expand Down Expand Up @@ -278,7 +278,7 @@ def _sympy_lambdify(
)


def fast_lambdify(
def fast_lambdify( # noqa: PLR0913
expression: sp.Expr,
symbols: Sequence[sp.Symbol],
backend: str,
Expand Down
8 changes: 4 additions & 4 deletions src/tensorwaves/optimizer/callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ def on_optimize_start(self, logs: dict[str, Any] | None = None) -> None:
if self.__iteration_step_size > 0:
self.__latest_iteration = 0
_close_stream(self.__stream)
self.__stream = open(self.__filename, "w", newline="")
self.__stream = open(self.__filename, "w", newline="") # noqa: SIM115
self.__writer = csv.DictWriter(
self.__stream,
fieldnames=list(self.__log_to_rowdict(logs)),
Expand Down Expand Up @@ -208,7 +208,7 @@ def cast_non_numeric(value: str) -> complex | float | int | str:
if float_value.is_integer():
return int(float_value)
return float_value
return complex_value
return complex_value # noqa: TRY300
except ValueError:
return value

Expand Down Expand Up @@ -296,7 +296,7 @@ def __del__(self) -> None:

def on_optimize_start(self, logs: dict[str, Any] | None = None) -> None:
_close_stream(self.__stream)
self.__stream = open(self.__filename, "w")
self.__stream = open(self.__filename, "w") # noqa: SIM115

def on_optimize_end(self, logs: dict[str, Any] | None = None) -> None:
if logs is None:
Expand Down Expand Up @@ -336,7 +336,7 @@ def __dump_to_yaml(self, logs: dict[str, Any]) -> None:
@staticmethod
def load_latest_parameters(filename: Path | str) -> dict:
with open(filename) as stream:
fit_stats = yaml.load(stream, Loader=yaml.Loader)
fit_stats = yaml.load(stream, Loader=yaml.Loader) # noqa: S506
return fit_stats["parameters"]


Expand Down
4 changes: 2 additions & 2 deletions src/tensorwaves/optimizer/minuit.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ def wrapped_gradient(pars: list) -> Iterable[float]:
name=tuple(flattened_parameters),
)
minuit.errors = tuple(
0.1 * abs(x) if abs(x) != 0.0 else 0.1
0.1 * abs(x) if abs(x) != 0.0 else 0.1 # noqa: PLR2004
for x in flattened_parameters.values()
)
minuit.errordef = (
Expand All @@ -135,7 +135,7 @@ def wrapped_gradient(pars: list) -> Iterable[float]:
parameter_values[name] = par_state.value
parameter_errors[name] = par_state.error

assert minuit.fmin is not None
assert minuit.fmin is not None # noqa: S101
fit_result = FitResult(
minimum_valid=minuit.valid,
execution_time=end_time - start_time,
Expand Down
2 changes: 1 addition & 1 deletion src/tensorwaves/optimizer/scipy.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def __init__(
self.__method = method
self.__minimize_options = scipy_options

def optimize(
def optimize( # noqa: C901
self,
estimator: Estimator,
initial_parameters: Mapping[str, ParameterValue],
Expand Down

0 comments on commit 66d6dcd

Please sign in to comment.