diff --git a/.github/workflows/lint-python.yml b/.github/workflows/lint-python.yml index 6568d52681b4..f5a6479f18b9 100644 --- a/.github/workflows/lint-python.yml +++ b/.github/workflows/lint-python.yml @@ -30,9 +30,9 @@ jobs: - name: Lint Python run: | - ruff --exit-non-zero-on-fix . - black --check . - blackdoc --check . + ruff check --diff . + ruff format --diff . + blackdoc --diff . mypy: runs-on: ubuntu-latest diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 280e5e03e581..e59ddcc8057f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -100,8 +100,8 @@ make pre-commit Note that we do not actually use the [pre-commit](https://pre-commit.com/) tool. We use the Makefile to conveniently run the following formatting and linting tools: -- [black](https://black.readthedocs.io/) and [blackdoc](https://github.com/keewis/blackdoc) - [ruff](https://github.com/charliermarsh/ruff) +- [blackdoc](https://github.com/keewis/blackdoc) - [mypy](http://mypy-lang.org/) - [rustfmt](https://github.com/rust-lang/rustfmt) - [clippy](https://doc.rust-lang.org/nightly/clippy/index.html) diff --git a/docs/development/contributing.md b/docs/development/contributing.md index 471e3d7180f3..51fbeddb8bb1 100644 --- a/docs/development/contributing.md +++ b/docs/development/contributing.md @@ -95,8 +95,8 @@ make pre-commit Note that we do not actually use the [pre-commit](https://pre-commit.com/) tool. We use the Makefile to conveniently run the following formatting and linting tools: -- [black](https://black.readthedocs.io/) and [blackdoc](https://github.com/keewis/blackdoc) - [ruff](https://github.com/charliermarsh/ruff) +- [blackdoc](https://github.com/keewis/blackdoc) - [mypy](http://mypy-lang.org/) - [rustfmt](https://github.com/rust-lang/rustfmt) - [clippy](https://doc.rust-lang.org/nightly/clippy/index.html) diff --git a/py-polars/Makefile b/py-polars/Makefile index f1e89e97643f..16d1a4156f56 100644 --- a/py-polars/Makefile +++ b/py-polars/Makefile @@ -56,8 +56,8 @@ build-release-native: .venv ## Same as build-release, except with native CPU op .PHONY: fmt fmt: .venv ## Run autoformatting and linting - $(VENV_BIN)/ruff . - $(VENV_BIN)/black . + $(VENV_BIN)/ruff check . + $(VENV_BIN)/ruff format . $(VENV_BIN)/blackdoc . $(VENV_BIN)/typos .. cargo fmt --all diff --git a/py-polars/polars/config.py b/py-polars/polars/config.py index 3a5a1f1b07cb..bae829dedf2c 100644 --- a/py-polars/polars/config.py +++ b/py-polars/polars/config.py @@ -333,7 +333,9 @@ def save_to_file(cls, file: Path | str) -> None: @classmethod @deprecate_nonkeyword_arguments(version="0.19.3") def state( - cls, if_set: bool = False, env_only: bool = False # noqa: FBT001 + cls, + if_set: bool = False, # noqa: FBT001 + env_only: bool = False, # noqa: FBT001 ) -> dict[str, str | None]: """ Show the current state of all Config variables as a dict. diff --git a/py-polars/polars/dataframe/frame.py b/py-polars/polars/dataframe/frame.py index a35aabe41524..f5d3d9fc808c 100644 --- a/py-polars/polars/dataframe/frame.py +++ b/py-polars/polars/dataframe/frame.py @@ -23,6 +23,7 @@ NoReturn, Sequence, TypeVar, + Union, cast, overload, ) @@ -174,10 +175,10 @@ # MultiColSelector indexes into the horizontal axis # NOTE: wrapping these as strings is necessary for Python <3.10 - MultiRowSelector: TypeAlias = "slice | range | list[int] | Series" - MultiColSelector: TypeAlias = ( - "slice | range | list[int] | list[str] | list[bool] | Series" - ) + MultiRowSelector: TypeAlias = Union[slice, range, "list[int]", "Series"] + MultiColSelector: TypeAlias = Union[ + slice, range, "list[int]", "list[str]", "list[bool]", "Series" + ] T = TypeVar("T") P = ParamSpec("P") @@ -1276,7 +1277,9 @@ def __array__(self, dtype: Any = None) -> np.ndarray[Any, Any]: return self.to_numpy().__array__() def __dataframe__( - self, nan_as_null: bool = False, allow_copy: bool = True # noqa: FBT001 + self, + nan_as_null: bool = False, # noqa: FBT001 + allow_copy: bool = True, # noqa: FBT001 ) -> PolarsDataFrame: """ Convert to a dataframe object implementing the dataframe interchange protocol. @@ -1911,13 +1914,15 @@ def to_dict(self, as_series: Literal[False]) -> dict[str, list[Any]]: @overload def to_dict( - self, as_series: bool # noqa: FBT001 + self, + as_series: bool, # noqa: FBT001 ) -> dict[str, Series] | dict[str, list[Any]]: ... # TODO: Make `as_series` keyword-only def to_dict( - self, as_series: bool = True # noqa: FBT001 + self, + as_series: bool = True, # noqa: FBT001 ) -> dict[str, Series] | dict[str, list[Any]]: """ Convert DataFrame to a dictionary mapping column name to values. @@ -2029,7 +2034,10 @@ def to_dicts(self) -> list[dict[str, Any]]: @deprecate_nonkeyword_arguments(version="0.19.3") def to_numpy( - self, structured: bool = False, *, order: IndexOrder = "fortran" # noqa: FBT001 + self, + structured: bool = False, # noqa: FBT001 + *, + order: IndexOrder = "fortran", ) -> np.ndarray[Any, Any]: """ Convert DataFrame to a 2D NumPy array. @@ -3094,7 +3102,11 @@ def write_excel( options = {"hidden": True} if column in column_widths: # type: ignore[operator] ws.set_column_pixels( - col_idx, col_idx, column_widths[column], None, options # type: ignore[index] + col_idx, + col_idx, + column_widths[column], # type: ignore[index] + None, + options, ) elif options: ws.set_column(col_idx, col_idx, None, None, options) diff --git a/py-polars/polars/datatypes/convert.py b/py-polars/polars/datatypes/convert.py index 496ed957d427..80f332fd6cce 100644 --- a/py-polars/polars/datatypes/convert.py +++ b/py-polars/polars/datatypes/convert.py @@ -143,9 +143,7 @@ def _map_py_type_to_dtype( if len(nested) == 1: nested = nested[0] return ( - dtype - if nested is None - else dtype(_map_py_type_to_dtype(nested)) # type: ignore[operator] + dtype if nested is None else dtype(_map_py_type_to_dtype(nested)) # type: ignore[operator] ) raise TypeError("invalid type") diff --git a/py-polars/polars/interchange/dataframe.py b/py-polars/polars/interchange/dataframe.py index 417e586c0c34..aa7ac1dac2ab 100644 --- a/py-polars/polars/interchange/dataframe.py +++ b/py-polars/polars/interchange/dataframe.py @@ -36,7 +36,9 @@ def __init__(self, df: DataFrame, *, allow_copy: bool = True): self._allow_copy = allow_copy def __dataframe__( - self, nan_as_null: bool = False, allow_copy: bool = True # noqa: FBT001 + self, + nan_as_null: bool = False, # noqa: FBT001 + allow_copy: bool = True, # noqa: FBT001 ) -> PolarsDataFrame: """ Construct a new dataframe object, potentially changing the parameters. diff --git a/py-polars/polars/interchange/protocol.py b/py-polars/polars/interchange/protocol.py index 98a30770b913..c75e410a0125 100644 --- a/py-polars/polars/interchange/protocol.py +++ b/py-polars/polars/interchange/protocol.py @@ -193,7 +193,9 @@ def version(self) -> int: """Version of the protocol.""" def __dataframe__( - self, nan_as_null: bool = False, allow_copy: bool = True # noqa: FBT001 + self, + nan_as_null: bool = False, # noqa: FBT001 + allow_copy: bool = True, # noqa: FBT001 ) -> DataFrame: """Convert to a dataframe object implementing the dataframe interchange protocol.""" # noqa: W505 @@ -236,7 +238,9 @@ class SupportsInterchange(Protocol): """Dataframe that supports conversion into an interchange dataframe object.""" def __dataframe__( - self, nan_as_null: bool = False, allow_copy: bool = True # noqa: FBT001 + self, + nan_as_null: bool = False, # noqa: FBT001 + allow_copy: bool = True, # noqa: FBT001 ) -> SupportsInterchange: """Convert to a dataframe object implementing the dataframe interchange protocol.""" # noqa: W505 diff --git a/py-polars/polars/testing/parametric/primitives.py b/py-polars/polars/testing/parametric/primitives.py index 27194b56c5e5..a9aba32f5119 100644 --- a/py-polars/polars/testing/parametric/primitives.py +++ b/py-polars/polars/testing/parametric/primitives.py @@ -718,9 +718,7 @@ def draw_frames(draw: DrawFn) -> DataFrame | LazyFrame: schema={repr(schema).replace("', ","', pl.")}, orient={orient!r}, ) - """.replace( - "datetime.", "" - ) + """.replace("datetime.", "") ) # note: this avoids printing the repro twice if failed_frame_init not in _failed_frame_init_msgs_: diff --git a/py-polars/polars/utils/_async.py b/py-polars/polars/utils/_async.py index 3294bca9428f..ea945d1ec7f9 100644 --- a/py-polars/polars/utils/_async.py +++ b/py-polars/polars/utils/_async.py @@ -34,7 +34,9 @@ def __init__(self) -> None: self._watcher.start(self._watcher_callback) def get( - self, block: bool = True, timeout: float | int | None = None # noqa: FBT001 + self, + block: bool = True, # noqa: FBT001 + timeout: float | int | None = None, ) -> T: return self.result.get(block=block, timeout=timeout) diff --git a/py-polars/polars/utils/_construction.py b/py-polars/polars/utils/_construction.py index d4952be71ea2..7c7ff0fdbc8f 100644 --- a/py-polars/polars/utils/_construction.py +++ b/py-polars/polars/utils/_construction.py @@ -352,8 +352,7 @@ def _construct_series_with_fallbacks( if "'float'" in str_exc and ( # we do not accept float values as int/temporal, as it causes silent # information loss; the caller should explicitly cast in this case. - target_dtype - not in (INTEGER_DTYPES | TEMPORAL_DTYPES) + target_dtype not in (INTEGER_DTYPES | TEMPORAL_DTYPES) ): constructor = py_type_to_constructor(float) @@ -741,9 +740,7 @@ def _unpack_schema( else None ) column_dtypes: dict[str, PolarsDataType] = { - lookup.get((name := col[0]), name) - if lookup - else col[0]: dtype # type: ignore[misc] + lookup.get((name := col[0]), name) if lookup else col[0]: dtype # type: ignore[misc] if is_polars_dtype(dtype, include_unknown=True) else py_type_to_dtype(dtype) for col in schema diff --git a/py-polars/pyproject.toml b/py-polars/pyproject.toml index 1693a8b986b6..adc0ea65c40c 100644 --- a/py-polars/pyproject.toml +++ b/py-polars/pyproject.toml @@ -130,7 +130,6 @@ select = [ "SIM", # flake8-simplify "TCH", # flake8-type-checking "TID", # flake8-tidy-imports - "Q", # flake8-quotes "UP", # pyupgrade "PT", # flake8-pytest-style "RUF", # Ruff-specific rules @@ -172,14 +171,14 @@ ignore = [ "TD003", # Missing issue link on the line following this TODO # tryceratops "TRY003", # Avoid specifying long messages outside the exception class + # Lints below are turned off because of conflicts with the ruff formatter + "D206", + "W191", ] [tool.ruff.pycodestyle] max-doc-length = 88 -[tool.ruff.isort] -split-on-trailing-comma = false - [tool.ruff.flake8-tidy-imports] ban-relative-imports = "all" diff --git a/py-polars/requirements-lint.txt b/py-polars/requirements-lint.txt index 40588f518050..fbb7a5395e1d 100644 --- a/py-polars/requirements-lint.txt +++ b/py-polars/requirements-lint.txt @@ -1,5 +1,4 @@ -black==23.10.0 blackdoc==0.3.8 mypy==1.6.0 -ruff==0.1.0 +ruff==0.1.2 typos==1.16.20 diff --git a/py-polars/tests/unit/dataframe/test_df.py b/py-polars/tests/unit/dataframe/test_df.py index 36c0545f88cc..7afea1b9efa8 100644 --- a/py-polars/tests/unit/dataframe/test_df.py +++ b/py-polars/tests/unit/dataframe/test_df.py @@ -2641,9 +2641,7 @@ def test_fill_null_limits() -> None: pl.all().fill_null(strategy="forward", limit=2), pl.all().fill_null(strategy="backward", limit=2).name.suffix("_backward"), ] - ).to_dict( - False - ) == { + ).to_dict(False) == { "a": [1, 1, 1, None, 5, 6, 6, 6, None, 10], "b": ["a", "a", "a", None, "b", "c", "c", "c", None, "d"], "c": [True, True, True, None, False, True, True, True, None, False], @@ -2774,15 +2772,17 @@ def test_selection_regex_and_multicol() -> None: ] -def test_unique_on_sorted() -> None: +@pytest.mark.parametrize("subset", ["a", cs.starts_with("x", "a")]) +def test_unique_on_sorted(subset: Any) -> None: df = pl.DataFrame(data={"a": [1, 1, 3], "b": [1, 2, 3]}) - for subset in ("a", cs.starts_with("x", "a")): - assert df.with_columns([pl.col("a").set_sorted()]).unique( - subset=subset, keep="last" # type: ignore[arg-type] - ).to_dict(False) == { - "a": [1, 3], - "b": [2, 3], - } + + result = df.with_columns([pl.col("a").set_sorted()]).unique( + subset=subset, + keep="last", + ) + + expected = pl.DataFrame({"a": [1, 3], "b": [2, 3]}) + assert_frame_equal(result, expected) def test_len_compute(df: pl.DataFrame) -> None: diff --git a/py-polars/tests/unit/datatypes/test_categorical.py b/py-polars/tests/unit/datatypes/test_categorical.py index 4ab83bc6b160..d7c92e71a9c7 100644 --- a/py-polars/tests/unit/datatypes/test_categorical.py +++ b/py-polars/tests/unit/datatypes/test_categorical.py @@ -299,15 +299,9 @@ def test_nested_categorical_aggregation_7848() -> None: } ).with_columns([pl.col("letter").cast(pl.Categorical)]).group_by( maintain_order=True, by=["group"] - ).all().with_columns( - [pl.col("letter").list.len().alias("c_group")] - ).group_by( + ).all().with_columns(pl.col("letter").list.len().alias("c_group")).group_by( by=["c_group"], maintain_order=True - ).agg( - pl.col("letter") - ).to_dict( - False - ) == { + ).agg(pl.col("letter")).to_dict(False) == { "c_group": [2, 3], "letter": [[["a", "b"], ["f", "g"]], [["c", "d", "e"]]], } diff --git a/py-polars/tests/unit/datatypes/test_decimal.py b/py-polars/tests/unit/datatypes/test_decimal.py index 63f9829e8b22..9c0f134c518c 100644 --- a/py-polars/tests/unit/datatypes/test_decimal.py +++ b/py-polars/tests/unit/datatypes/test_decimal.py @@ -227,6 +227,8 @@ def test_decimal_aggregations() -> None: sum=pl.sum("a"), min=pl.min("a"), max=pl.max("a"), - ).to_dict( - False - ) == {"sum": [D("9110.33")], "min": [D("0.10")], "max": [D("9000.12")]} + ).to_dict(False) == { + "sum": [D("9110.33")], + "min": [D("0.10")], + "max": [D("9000.12")], + } diff --git a/py-polars/tests/unit/datatypes/test_list.py b/py-polars/tests/unit/datatypes/test_list.py index b4ff7cd3a393..a9eedb4213dd 100644 --- a/py-polars/tests/unit/datatypes/test_list.py +++ b/py-polars/tests/unit/datatypes/test_list.py @@ -286,9 +286,7 @@ def test_list_count_matches_deprecated() -> None: {"listcol": [[], [1], [1, 2, 3, 2], [1, 2, 1], [4, 4]]} ).select(pl.col("listcol").list.count_match(2).alias("number_of_twos")).to_dict( False - ) == { - "number_of_twos": [0, 0, 2, 1, 0] - } + ) == {"number_of_twos": [0, 0, 2, 1, 0]} def test_list_count_matches() -> None: diff --git a/py-polars/tests/unit/datatypes/test_struct.py b/py-polars/tests/unit/datatypes/test_struct.py index 542c6f3b1b4d..ac8d3deadced 100644 --- a/py-polars/tests/unit/datatypes/test_struct.py +++ b/py-polars/tests/unit/datatypes/test_struct.py @@ -317,9 +317,7 @@ def test_struct_list_head_tail() -> None: pl.col("list_of_struct").list.head(1).alias("head"), pl.col("list_of_struct").list.tail(1).alias("tail"), ] - ).to_dict( - False - ) == { + ).to_dict(False) == { "list_of_struct": [ [{"a": 1, "b": 4}, {"a": 3, "b": 6}], [{"a": 10, "b": 40}, {"a": 20, "b": 50}, {"a": 30, "b": 60}], @@ -401,9 +399,7 @@ def test_struct_concat_list() -> None: } ).with_columns( [pl.col("list_struct1").list.concat("list_struct2").alias("result")] - )[ - "result" - ].to_list() == [ + )["result"].to_list() == [ [{"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 6, "b": 7}, {"a": 8, "b": 9}], [{"a": 1, "b": 2}, {"a": 6, "b": 7}], ] diff --git a/py-polars/tests/unit/datatypes/test_temporal.py b/py-polars/tests/unit/datatypes/test_temporal.py index 2e5677fa102f..7ec08e8b634e 100644 --- a/py-polars/tests/unit/datatypes/test_temporal.py +++ b/py-polars/tests/unit/datatypes/test_temporal.py @@ -1263,11 +1263,7 @@ def test_unique_counts_on_dates() -> None: pl.col("dt_ns").dt.cast_time_unit("ms").alias("dt_ms"), pl.col("dt_ns").cast(pl.Date).alias("date"), ] - ).select( - pl.all().unique_counts().sum() - ).to_dict( - False - ) == { + ).select(pl.all().unique_counts().sum()).to_dict(False) == { "dt_ns": [3], "dt_us": [3], "dt_ms": [3], @@ -1304,9 +1300,7 @@ def test_rolling_by_ordering() -> None: [ pl.col("val").sum().alias("sum val"), ] - ).to_dict( - False - ) == { + ).to_dict(False) == { "key": ["A", "A", "A", "A", "B", "B", "B"], "dt": [ datetime(2022, 1, 1, 0, 1), @@ -1402,9 +1396,7 @@ def test_sum_duration() -> None: ] ).select( [pl.col("duration").sum(), pl.col("duration").dt.seconds().alias("sec").sum()] - ).to_dict( - False - ) == { + ).to_dict(False) == { "duration": [timedelta(seconds=150)], "sec": [150], } diff --git a/py-polars/tests/unit/functions/as_datatype/test_as_datatype.py b/py-polars/tests/unit/functions/as_datatype/test_as_datatype.py index 6e0ac9925116..b54f4715da05 100644 --- a/py-polars/tests/unit/functions/as_datatype/test_as_datatype.py +++ b/py-polars/tests/unit/functions/as_datatype/test_as_datatype.py @@ -451,7 +451,9 @@ def test_struct_lit_cast() -> None: df = pl.DataFrame({"a": [1, 2, 3]}) schema = {"a": pl.Int64, "b": pl.List(pl.Int64)} - out = df.select(pl.struct([pl.col("a"), pl.lit(None).alias("b")], schema=schema))["a"] # type: ignore[arg-type] + out = df.select( + pl.struct(pl.col("a"), pl.lit(None).alias("b"), schema=schema) # type: ignore[arg-type] + ).get_column("a") expected = pl.Series( "a", @@ -464,7 +466,9 @@ def test_struct_lit_cast() -> None: ) assert_series_equal(out, expected) - out = df.select(pl.struct([pl.col("a"), pl.lit(pl.Series([[]])).alias("b")], schema=schema))["a"] # type: ignore[arg-type] + out = df.select( + pl.struct([pl.col("a"), pl.lit(pl.Series([[]])).alias("b")], schema=schema) # type: ignore[arg-type] + ).get_column("a") expected = pl.Series( "a", diff --git a/py-polars/tests/unit/io/test_hive.py b/py-polars/tests/unit/io/test_hive.py index fb3a55ceb1d8..4ba047b1f634 100644 --- a/py-polars/tests/unit/io/test_hive.py +++ b/py-polars/tests/unit/io/test_hive.py @@ -115,7 +115,9 @@ def test_hive_partitioned_projection_pushdown( # the projection contains only hive partition columns (11796) for parallel in ("row_groups", "columns"): q = pl.scan_parquet( - root / "**/*.parquet", hive_partitioning=True, parallel=parallel # type: ignore[arg-type] + root / "**/*.parquet", + hive_partitioning=True, + parallel=parallel, # type: ignore[arg-type] ) expect = q.collect().select("category") diff --git a/py-polars/tests/unit/namespaces/test_binary.py b/py-polars/tests/unit/namespaces/test_binary.py index 1519ca10bcba..ca4cfb1727c1 100644 --- a/py-polars/tests/unit/namespaces/test_binary.py +++ b/py-polars/tests/unit/namespaces/test_binary.py @@ -85,9 +85,7 @@ def test_starts_ends_with() -> None: pl.col("a").bin.ends_with(pl.lit(None)).alias("start_none"), pl.col("a").bin.starts_with(pl.col("start")).alias("start_expr"), ] - ).to_dict( - False - ) == { + ).to_dict(False) == { "end_lit": [False, False, True, None], "end_none": [None, None, None, None], "end_expr": [True, False, None, None], diff --git a/py-polars/tests/unit/namespaces/test_list.py b/py-polars/tests/unit/namespaces/test_list.py index eab6db73c837..7caf4572a9e0 100644 --- a/py-polars/tests/unit/namespaces/test_list.py +++ b/py-polars/tests/unit/namespaces/test_list.py @@ -39,9 +39,7 @@ def test_list_arr_get() -> None: {"a": [[1], [2], [3], [4, 5, 6], [7, 8, 9], [None, 11]]} ).with_columns( [pl.col("a").list.get(i).alias(f"get_{i}") for i in range(4)] - ).to_dict( - False - ) == { + ).to_dict(False) == { "a": [[1], [2], [3], [4, 5, 6], [7, 8, 9], [None, 11]], "get_0": [1, 2, 3, 4, 7, None], "get_1": [None, None, None, 5, 8, 11], diff --git a/py-polars/tests/unit/operations/map/test_map_elements.py b/py-polars/tests/unit/operations/map/test_map_elements.py index a60bf6ad09af..1137c5db1ff8 100644 --- a/py-polars/tests/unit/operations/map/test_map_elements.py +++ b/py-polars/tests/unit/operations/map/test_map_elements.py @@ -188,9 +188,7 @@ def test_map_elements_object_dtypes() -> None: .map_elements(lambda x: isinstance(x, (int, float))) .alias("is_numeric_infer"), ] - ).to_dict( - False - ) == { + ).to_dict(False) == { "a": [2, 4, "aa", 8, 10], "is_numeric1": [True, True, False, True, True], "is_numeric_infer": [True, True, False, True, True], diff --git a/py-polars/tests/unit/operations/test_group_by_dynamic.py b/py-polars/tests/unit/operations/test_group_by_dynamic.py index 4702b1c9e224..6f39c7ba63b7 100644 --- a/py-polars/tests/unit/operations/test_group_by_dynamic.py +++ b/py-polars/tests/unit/operations/test_group_by_dynamic.py @@ -336,9 +336,7 @@ def test_rolling_kernels_group_by_dynamic_7548() -> None: pl.col("value").min().alias("min_value"), pl.col("value").max().alias("max_value"), pl.col("value").sum().alias("sum_value"), - ).to_dict( - False - ) == { + ).to_dict(False) == { "time": [-1, 0, 1, 2, 3], "value": [[0, 1], [0, 1, 2], [1, 2, 3], [2, 3], [3]], "min_value": [0, 0, 1, 2, 3], @@ -413,9 +411,7 @@ def test_group_by_dynamic_elementwise_following_mean_agg_6904( df = ( pl.DataFrame( { - "a": [ - datetime(2021, 1, 1) + timedelta(seconds=2**i) for i in range(5) - ], + "a": [datetime(2021, 1, 1) + timedelta(seconds=2**i) for i in range(5)], "b": [float(i) for i in range(5)], } ) @@ -623,9 +619,7 @@ def test_groupy_by_dynamic_median_10695() -> None: index_column="timestamp", every="60s", period="3m", - ).agg( - pl.col("foo").median() - ).to_dict(False) == { + ).agg(pl.col("foo").median()).to_dict(False) == { "timestamp": [ datetime(2023, 8, 22, 15, 43), datetime(2023, 8, 22, 15, 44), diff --git a/py-polars/tests/unit/operations/test_group_by_rolling.py b/py-polars/tests/unit/operations/test_group_by_rolling.py index 435b7c34bdec..29ae54a1f0a2 100644 --- a/py-polars/tests/unit/operations/test_group_by_rolling.py +++ b/py-polars/tests/unit/operations/test_group_by_rolling.py @@ -38,9 +38,7 @@ def test_rolling_group_by_overlapping_groups() -> None: ) .agg( # trigger the apply on the expression engine - pl.col("a") - .map_elements(lambda x: x) - .sum() + pl.col("a").map_elements(lambda x: x).sum() ) )["a"], df["a"].rolling_sum(window_size=5, min_periods=1), diff --git a/py-polars/tests/unit/series/test_series.py b/py-polars/tests/unit/series/test_series.py index 5efb1600cab4..c14a55fff0a5 100644 --- a/py-polars/tests/unit/series/test_series.py +++ b/py-polars/tests/unit/series/test_series.py @@ -431,7 +431,7 @@ def test_power() -> None: assert_series_equal(a**a, pl.Series([1.0, 4.0], dtype=Float64)) assert_series_equal(b**b, pl.Series([None, 4.0], dtype=Float64)) assert_series_equal(a**b, pl.Series([None, 4.0], dtype=Float64)) - assert_series_equal(a**None, pl.Series([None] * len(a), dtype=Float64)) + assert_series_equal(a ** None, pl.Series([None] * len(a), dtype=Float64)) with pytest.raises(TypeError): c**2 with pytest.raises(pl.ColumnNotFoundError): diff --git a/py-polars/tests/unit/test_lazy.py b/py-polars/tests/unit/test_lazy.py index 9bcc7aa7845f..1805a2221e8b 100644 --- a/py-polars/tests/unit/test_lazy.py +++ b/py-polars/tests/unit/test_lazy.py @@ -82,7 +82,9 @@ def test_apply() -> None: for strategy in ["thread_local", "threading"]: ldf = pl.LazyFrame({"a": [1, 2, 3] * 20, "b": [1.0, 2.0, 3.0] * 20}) new = ldf.with_columns( - pl.col("a").map_elements(lambda s: s * 2, strategy=strategy).alias("foo") # type: ignore[arg-type] + pl.col("a") + .map_elements(lambda s: s * 2, strategy=strategy) # type: ignore[arg-type] + .alias("foo") ) expected = ldf.clone().with_columns((pl.col("a") * 2).alias("foo")) assert_frame_equal(new.collect(), expected.collect())