diff --git a/py-polars/polars/api.py b/py-polars/polars/api.py index c12abdb05c92..44ba02084778 100644 --- a/py-polars/polars/api.py +++ b/py-polars/polars/api.py @@ -2,7 +2,7 @@ from functools import reduce from operator import or_ -from typing import TYPE_CHECKING, Callable, TypeVar +from typing import TYPE_CHECKING, Callable, Generic, TypeVar from warnings import warn import polars._reexport as pl @@ -11,8 +11,6 @@ if TYPE_CHECKING: from polars import DataFrame, Expr, LazyFrame, Series - NS = TypeVar("NS") - __all__ = [ "register_expr_namespace", @@ -24,14 +22,14 @@ # do not allow override of polars' own namespaces (as registered by '_accessors') _reserved_namespaces: set[str] = reduce( or_, - ( - cls._accessors # type: ignore[attr-defined] - for cls in (pl.DataFrame, pl.Expr, pl.LazyFrame, pl.Series) - ), + (cls._accessors for cls in (pl.DataFrame, pl.Expr, pl.LazyFrame, pl.Series)), ) -class NameSpace: +NS = TypeVar("NS") + + +class NameSpace(Generic[NS]): """Establish property-like namespace object for user-defined functionality.""" def __init__(self, name: str, namespace: type[NS]) -> None: diff --git a/py-polars/polars/convert/general.py b/py-polars/polars/convert/general.py index cc059ccba55e..011054169d04 100644 --- a/py-polars/polars/convert/general.py +++ b/py-polars/polars/convert/general.py @@ -708,7 +708,7 @@ def _from_dataframe_repr(m: re.Match[str]) -> DataFrame: if el in headers: idx = headers.index(el) for table_elem in (headers, dtypes): - table_elem.pop(idx) # type: ignore[attr-defined] + table_elem.pop(idx) if coldata: coldata.pop(idx) diff --git a/py-polars/polars/dataframe/frame.py b/py-polars/polars/dataframe/frame.py index 9fb2140f5cfb..90f78be3fa7e 100644 --- a/py-polars/polars/dataframe/frame.py +++ b/py-polars/polars/dataframe/frame.py @@ -1123,7 +1123,7 @@ def __add__( other = _prepare_other_arg(other) return self._from_pydf(self._df.add(other._s)) - def __radd__( # type: ignore[misc] + def __radd__( self, other: DataFrame | Series | int | float | bool | str ) -> DataFrame: if isinstance(other, str): diff --git a/py-polars/polars/selectors.py b/py-polars/polars/selectors.py index 968500824799..74a8690d9d73 100644 --- a/py-polars/polars/selectors.py +++ b/py-polars/polars/selectors.py @@ -87,10 +87,7 @@ @overload -def is_selector(obj: _selector_proxy_) -> Literal[True]: # type: ignore[overload-overlap] - ... - - +def is_selector(obj: _selector_proxy_) -> Literal[True]: ... @overload def is_selector(obj: Any) -> Literal[False]: ... diff --git a/py-polars/polars/series/series.py b/py-polars/polars/series/series.py index 07af04997841..7e847b7b1a28 100644 --- a/py-polars/polars/series/series.py +++ b/py-polars/polars/series/series.py @@ -761,7 +761,7 @@ def _comp(self, other: Any, op: ComparisonOperator) -> Series: return self._from_pyseries(f(other)) @overload # type: ignore[override] - def __eq__(self, other: Expr) -> Expr: ... # type: ignore[overload-overlap] + def __eq__(self, other: Expr) -> Expr: ... @overload def __eq__(self, other: Any) -> Series: ... @@ -773,8 +773,7 @@ def __eq__(self, other: Any) -> Series | Expr: return self._comp(other, "eq") @overload # type: ignore[override] - def __ne__(self, other: Expr) -> Expr: # type: ignore[overload-overlap] - ... + def __ne__(self, other: Expr) -> Expr: ... @overload def __ne__(self, other: Any) -> Series: ... @@ -786,8 +785,7 @@ def __ne__(self, other: Any) -> Series | Expr: return self._comp(other, "neq") @overload - def __gt__(self, other: Expr) -> Expr: # type: ignore[overload-overlap] - ... + def __gt__(self, other: Expr) -> Expr: ... @overload def __gt__(self, other: Any) -> Series: ... @@ -799,8 +797,7 @@ def __gt__(self, other: Any) -> Series | Expr: return self._comp(other, "gt") @overload - def __lt__(self, other: Expr) -> Expr: # type: ignore[overload-overlap] - ... + def __lt__(self, other: Expr) -> Expr: ... @overload def __lt__(self, other: Any) -> Series: ... @@ -812,8 +809,7 @@ def __lt__(self, other: Any) -> Series | Expr: return self._comp(other, "lt") @overload - def __ge__(self, other: Expr) -> Expr: # type: ignore[overload-overlap] - ... + def __ge__(self, other: Expr) -> Expr: ... @overload def __ge__(self, other: Any) -> Series: ... @@ -825,8 +821,7 @@ def __ge__(self, other: Any) -> Series | Expr: return self._comp(other, "gt_eq") @overload - def __le__(self, other: Expr) -> Expr: # type: ignore[overload-overlap] - ... + def __le__(self, other: Expr) -> Expr: ... @overload def __le__(self, other: Any) -> Series: ... @@ -838,8 +833,7 @@ def __le__(self, other: Any) -> Series | Expr: return self._comp(other, "lt_eq") @overload - def le(self, other: Expr) -> Expr: # type: ignore[overload-overlap] - ... + def le(self, other: Expr) -> Expr: ... @overload def le(self, other: Any) -> Series: ... @@ -849,8 +843,7 @@ def le(self, other: Any) -> Series | Expr: return self.__le__(other) @overload - def lt(self, other: Expr) -> Expr: # type: ignore[overload-overlap] - ... + def lt(self, other: Expr) -> Expr: ... @overload def lt(self, other: Any) -> Series: ... @@ -860,8 +853,7 @@ def lt(self, other: Any) -> Series | Expr: return self.__lt__(other) @overload - def eq(self, other: Expr) -> Expr: # type: ignore[overload-overlap] - ... + def eq(self, other: Expr) -> Expr: ... @overload def eq(self, other: Any) -> Series: ... @@ -871,8 +863,7 @@ def eq(self, other: Any) -> Series | Expr: return self.__eq__(other) @overload - def eq_missing(self, other: Expr) -> Expr: # type: ignore[overload-overlap] - ... + def eq_missing(self, other: Expr) -> Expr: ... @overload def eq_missing(self, other: Any) -> Series: ... @@ -919,8 +910,7 @@ def eq_missing(self, other: Any) -> Series | Expr: return self.to_frame().select(F.col(self.name).eq_missing(other)).to_series() @overload - def ne(self, other: Expr) -> Expr: # type: ignore[overload-overlap] - ... + def ne(self, other: Expr) -> Expr: ... @overload def ne(self, other: Any) -> Series: ... @@ -930,8 +920,7 @@ def ne(self, other: Any) -> Series | Expr: return self.__ne__(other) @overload - def ne_missing(self, other: Expr) -> Expr: # type: ignore[overload-overlap] - ... + def ne_missing(self, other: Expr) -> Expr: ... @overload def ne_missing(self, other: Any) -> Series: ... @@ -978,8 +967,7 @@ def ne_missing(self, other: Any) -> Series | Expr: return self.to_frame().select(F.col(self.name).ne_missing(other)).to_series() @overload - def ge(self, other: Expr) -> Expr: # type: ignore[overload-overlap] - ... + def ge(self, other: Expr) -> Expr: ... @overload def ge(self, other: Any) -> Series: ... @@ -989,8 +977,7 @@ def ge(self, other: Any) -> Series | Expr: return self.__ge__(other) @overload - def gt(self, other: Expr) -> Expr: # type: ignore[overload-overlap] - ... + def gt(self, other: Expr) -> Expr: ... @overload def gt(self, other: Any) -> Series: ... @@ -1043,12 +1030,10 @@ def _arithmetic(self, other: Any, op_s: str, op_ffi: str) -> Self: return self._from_pyseries(f(other)) @overload - def __add__(self, other: DataFrame) -> DataFrame: # type: ignore[overload-overlap] - ... + def __add__(self, other: DataFrame) -> DataFrame: ... @overload - def __add__(self, other: Expr) -> Expr: # type: ignore[overload-overlap] - ... + def __add__(self, other: Expr) -> Expr: ... @overload def __add__(self, other: Any) -> Self: ... @@ -1063,8 +1048,7 @@ def __add__(self, other: Any) -> Self | DataFrame | Expr: return self._arithmetic(other, "add", "add_<>") @overload - def __sub__(self, other: Expr) -> Expr: # type: ignore[overload-overlap] - ... + def __sub__(self, other: Expr) -> Expr: ... @overload def __sub__(self, other: Any) -> Self: ... @@ -1075,8 +1059,7 @@ def __sub__(self, other: Any) -> Self | Expr: return self._arithmetic(other, "sub", "sub_<>") @overload - def __truediv__(self, other: Expr) -> Expr: # type: ignore[overload-overlap] - ... + def __truediv__(self, other: Expr) -> Expr: ... @overload def __truediv__(self, other: Any) -> Series: ... @@ -1095,8 +1078,7 @@ def __truediv__(self, other: Any) -> Series | Expr: return self.cast(Float64) / other @overload - def __floordiv__(self, other: Expr) -> Expr: # type: ignore[overload-overlap] - ... + def __floordiv__(self, other: Expr) -> Expr: ... @overload def __floordiv__(self, other: Any) -> Series: ... @@ -1116,12 +1098,10 @@ def __invert__(self) -> Series: return self.not_() @overload - def __mul__(self, other: Expr) -> Expr: # type: ignore[overload-overlap] - ... + def __mul__(self, other: Expr) -> Expr: ... @overload - def __mul__(self, other: DataFrame) -> DataFrame: # type: ignore[overload-overlap] - ... + def __mul__(self, other: DataFrame) -> DataFrame: ... @overload def __mul__(self, other: Any) -> Series: ... @@ -1138,8 +1118,7 @@ def __mul__(self, other: Any) -> Series | DataFrame | Expr: return self._arithmetic(other, "mul", "mul_<>") @overload - def __mod__(self, other: Expr) -> Expr: # type: ignore[overload-overlap] - ... + def __mod__(self, other: Expr) -> Expr: ... @overload def __mod__(self, other: Any) -> Series: ... diff --git a/py-polars/requirements-lint.txt b/py-polars/requirements-lint.txt index 122374998a22..4119d9a23fb3 100644 --- a/py-polars/requirements-lint.txt +++ b/py-polars/requirements-lint.txt @@ -1,3 +1,3 @@ -mypy==1.10.1 +mypy==1.11.1 ruff==0.5.0 typos==1.23.5 diff --git a/py-polars/tests/unit/constructors/test_constructors.py b/py-polars/tests/unit/constructors/test_constructors.py index aeecca03752e..ffda370aa538 100644 --- a/py-polars/tests/unit/constructors/test_constructors.py +++ b/py-polars/tests/unit/constructors/test_constructors.py @@ -216,10 +216,10 @@ class TradeNT(NamedTuple): columns = ["timestamp", "ticker", "price", "size"] for TradeClass in (TradeDC, TradeNT, TradePD): - trades = [TradeClass(**dict(zip(columns, values))) for values in raw_data] + trades = [TradeClass(**dict(zip(columns, values))) for values in raw_data] # type: ignore[arg-type] for DF in (pl.DataFrame, pl.from_records): - df = DF(data=trades) # type: ignore[operator] + df = DF(data=trades) assert df.schema == { "timestamp": pl.Datetime("us"), "ticker": pl.String, @@ -229,7 +229,7 @@ class TradeNT(NamedTuple): assert df.rows() == raw_data # partial dtypes override - df = DF( # type: ignore[operator] + df = DF( data=trades, schema_overrides={"timestamp": pl.Datetime("ms"), "size": pl.Int32}, ) @@ -1041,13 +1041,13 @@ def test_init_records_schema_order() -> None: shuffle(data) shuffle(cols) - df = constructor(data, schema=cols) # type: ignore[operator] + df = constructor(data, schema=cols) for col in df.columns: assert all(value in (None, lookup[col]) for value in df[col].to_list()) # have schema override inferred types, omit some columns, add a new one schema = {"a": pl.Int8, "c": pl.Int16, "e": pl.Int32} - df = constructor(data, schema=schema) # type: ignore[operator] + df = constructor(data, schema=schema) assert df.schema == schema for col in df.columns: diff --git a/py-polars/tests/unit/dataframe/test_df.py b/py-polars/tests/unit/dataframe/test_df.py index 315c7e1a6cbe..fe0b0fb04f82 100644 --- a/py-polars/tests/unit/dataframe/test_df.py +++ b/py-polars/tests/unit/dataframe/test_df.py @@ -51,14 +51,15 @@ def test_null_count() -> None: assert df.null_count().row(np.int64(0)) == (0, 1) # type: ignore[call-overload] -def test_init_empty() -> None: +@pytest.mark.parametrize("input", [None, (), [], {}, pa.Table.from_arrays([])]) +def test_init_empty(input: Any) -> None: # test various flavours of empty init - for empty in (None, (), [], {}, pa.Table.from_arrays([])): - df = pl.DataFrame(empty) - assert df.shape == (0, 0) - assert df.is_empty() + df = pl.DataFrame(input) + assert df.shape == (0, 0) + assert df.is_empty() + - # note: cannot use df (empty or otherwise) in boolean context +def test_df_bool_ambiguous() -> None: empty_df = pl.DataFrame() with pytest.raises(TypeError, match="ambiguous"): not empty_df @@ -1287,7 +1288,7 @@ def test_from_rows_of_dicts() -> None: {"id": 2, "value": 101, "_meta": "b"}, ] df_init: Callable[..., Any] - for df_init in (pl.from_dicts, pl.DataFrame): # type:ignore[assignment] + for df_init in (pl.from_dicts, pl.DataFrame): df1 = df_init(records) assert df1.rows() == [(1, 100, "a"), (2, 101, "b")] @@ -2270,12 +2271,12 @@ def test_selection_misc() -> None: # literal values (as scalar/list) for zero in (0, [0]): - assert df.select(zero)["literal"].to_list() == [0] # type: ignore[arg-type] + assert df.select(zero)["literal"].to_list() == [0] assert df.select(literal=0)["literal"].to_list() == [0] # expect string values to be interpreted as cols for x in ("x", ["x"], pl.col("x")): - assert df.select(x).rows() == [("abc",)] # type: ignore[arg-type] + assert df.select(x).rows() == [("abc",)] # string col + lit assert df.with_columns(["x", 0]).to_dicts() == [{"x": "abc", "literal": 0}] @@ -2578,7 +2579,7 @@ def test_init_datetimes_with_timezone() -> None: } }, ): - result = pl.DataFrame( # type: ignore[arg-type] + result = pl.DataFrame( data={ "d1": [dtm.replace(tzinfo=ZoneInfo(tz_us))], "d2": [dtm.replace(tzinfo=ZoneInfo(tz_europe))], @@ -2841,7 +2842,7 @@ def test_unstack() -> None: assert df.unstack( step=3, how="horizontal", - columns=column_subset, # type: ignore[arg-type] + columns=column_subset, ).to_dict(as_series=False) == { "col2_0": [0, 3, 6], "col2_1": [1, 4, 7], diff --git a/py-polars/tests/unit/datatypes/test_decimal.py b/py-polars/tests/unit/datatypes/test_decimal.py index 13acb7d66741..64b0d5fe5068 100644 --- a/py-polars/tests/unit/datatypes/test_decimal.py +++ b/py-polars/tests/unit/datatypes/test_decimal.py @@ -60,7 +60,7 @@ class Y: row_data = [(d,) for d in data] for cls in (X, Y): for ctor in (pl.DataFrame, pl.from_records): - df = ctor(data=list(map(cls, data))) # type: ignore[operator] + df = ctor(data=list(map(cls, data))) assert df.schema == { "a": pl.Decimal(scale=7), } diff --git a/py-polars/tests/unit/datatypes/test_struct.py b/py-polars/tests/unit/datatypes/test_struct.py index 0e5c559656b1..265cc71d07c4 100644 --- a/py-polars/tests/unit/datatypes/test_struct.py +++ b/py-polars/tests/unit/datatypes/test_struct.py @@ -119,10 +119,10 @@ def test_struct_unnesting() -> None: } ) for cols in ("foo", cs.ends_with("oo")): - out_eager = df.unnest(cols) # type: ignore[arg-type] + out_eager = df.unnest(cols) assert_frame_equal(out_eager, expected) - out_lazy = df.lazy().unnest(cols) # type: ignore[arg-type] + out_lazy = df.lazy().unnest(cols) assert_frame_equal(out_lazy, expected.lazy()) out = ( @@ -653,7 +653,16 @@ def test_empty_series_nested_dtype(dtype: PolarsDataType) -> None: assert s.to_list() == [] -def test_empty_with_schema_struct() -> None: +@pytest.mark.parametrize( + "data", + [ + [{}, {}], + [{}, None], + [None, {}], + [None, None], + ], +) +def test_empty_with_schema_struct(data: list[dict[str, object] | None]) -> None: # Empty structs, with schema struct_schema = {"a": pl.Date, "b": pl.Boolean, "c": pl.Float64} frame_schema = {"x": pl.Int8, "y": pl.Struct(struct_schema)} @@ -661,42 +670,31 @@ def test_empty_with_schema_struct() -> None: @dataclass class TestData: x: int - y: dict # type: ignore[type-arg] + y: dict[str, object] | None - # validate empty struct, null, and a mix of both - for empty_structs in ( - [{}, {}], - [{}, None], - [None, {}], - [None, None], - ): - # test init from rows, dicts, and dataclasses - dict_data = {"x": [10, 20], "y": empty_structs} - dataclass_data = [ - TestData(10, empty_structs[0]), # type: ignore[index] - TestData(20, empty_structs[1]), # type: ignore[index] + # test init from rows, dicts, and dataclasses + dict_data = {"x": [10, 20], "y": data} + dataclass_data = [ + TestData(10, data[0]), + TestData(20, data[1]), + ] + for frame_data in (dict_data, dataclass_data): + df = pl.DataFrame( + data=frame_data, + schema=frame_schema, # type: ignore[arg-type] + ) + assert df.schema == frame_schema + assert df.unnest("y").columns == ["x", "a", "b", "c"] + assert df.rows() == [ + ( + 10, + {"a": None, "b": None, "c": None} if data[0] is not None else None, + ), + ( + 20, + {"a": None, "b": None, "c": None} if data[1] is not None else None, + ), ] - for frame_data in (dict_data, dataclass_data): - df = pl.DataFrame( - data=frame_data, - schema=frame_schema, # type: ignore[arg-type] - ) - assert df.schema == frame_schema - assert df.unnest("y").columns == ["x", "a", "b", "c"] - assert df.rows() == [ - ( - 10, - {"a": None, "b": None, "c": None} - if empty_structs[0] is not None # type: ignore[index] - else None, - ), - ( - 20, - {"a": None, "b": None, "c": None} - if empty_structs[1] is not None # type: ignore[index] - else None, - ), - ] def test_struct_null_cast() -> None: diff --git a/py-polars/tests/unit/datatypes/test_temporal.py b/py-polars/tests/unit/datatypes/test_temporal.py index 02a80701630c..d2df4f51b69c 100644 --- a/py-polars/tests/unit/datatypes/test_temporal.py +++ b/py-polars/tests/unit/datatypes/test_temporal.py @@ -39,8 +39,8 @@ def test_fill_null() -> None: dtm = datetime.strptime("2021-01-01", "%Y-%m-%d") s = pl.Series("A", [dtm, None]) - for fill_val in (dtm, pl.lit(dtm)): - out = s.fill_null(fill_val) + for fill_val_datetime in (dtm, pl.lit(dtm)): + out = s.fill_null(fill_val_datetime) assert out.null_count() == 0 assert out.dt[0] == dtm @@ -53,8 +53,8 @@ def test_fill_null() -> None: s = pl.Series("a", [dt1, dt2, dt3, None]) dt_2 = date(2001, 1, 4) - for fill_val in (dt_2, pl.lit(dt_2)): - out = s.fill_null(fill_val) + for fill_val_date in (dt_2, pl.lit(dt_2)): + out = s.fill_null(fill_val_date) assert out.null_count() == 0 assert out.dt[0] == dt1 @@ -597,7 +597,7 @@ def test_rolling_mean_3020() -> None: ).with_columns(pl.col("Date").str.strptime(pl.Date).set_sorted()) period: str | timedelta - for period in ("1w", timedelta(days=7)): # type: ignore[assignment] + for period in ("1w", timedelta(days=7)): result = df.rolling(index_column="Date", period=period).agg( pl.col("val").mean().alias("val_mean") ) diff --git a/py-polars/tests/unit/functions/test_functions.py b/py-polars/tests/unit/functions/test_functions.py index 5599688c324c..a5ee29ae530f 100644 --- a/py-polars/tests/unit/functions/test_functions.py +++ b/py-polars/tests/unit/functions/test_functions.py @@ -511,7 +511,7 @@ def test_count() -> None: pl.count("b", "a"), [pl.count("b"), pl.count("a")], ): - out = df.select(count_expr) # type: ignore[arg-type] + out = df.select(count_expr) assert out.rows() == [(2, 3)] diff --git a/py-polars/tests/unit/io/test_hive.py b/py-polars/tests/unit/io/test_hive.py index 618cef391b51..c677b24e6e07 100644 --- a/py-polars/tests/unit/io/test_hive.py +++ b/py-polars/tests/unit/io/test_hive.py @@ -305,7 +305,7 @@ def test_read_parquet_hive_schema_with_pyarrow() -> None: ) def test_hive_partition_directory_scan( tmp_path: Path, - scan_func: Callable[[Any], pl.LazyFrame], + scan_func: Callable[..., pl.LazyFrame], write_func: Callable[[pl.DataFrame, Path], None], glob: bool, ) -> None: diff --git a/py-polars/tests/unit/io/test_scan.py b/py-polars/tests/unit/io/test_scan.py index b5d389a850d8..a1094ec778f0 100644 --- a/py-polars/tests/unit/io/test_scan.py +++ b/py-polars/tests/unit/io/test_scan.py @@ -490,7 +490,7 @@ def test_scan_limit_0_does_not_panic( ) def test_scan_directory( tmp_path: Path, - scan_func: Callable[[Any], pl.LazyFrame], + scan_func: Callable[..., pl.LazyFrame], write_func: Callable[[pl.DataFrame, Path], None], glob: bool, ) -> None: @@ -635,7 +635,7 @@ def test_scan_nonexistent_path(format: str) -> None: ) def test_scan_include_file_name( tmp_path: Path, - scan_func: Callable[[Any], pl.LazyFrame], + scan_func: Callable[..., pl.LazyFrame], write_func: Callable[[pl.DataFrame, Path], None], streaming: bool, ) -> None: @@ -658,13 +658,13 @@ def test_scan_include_file_name( pl.exceptions.DuplicateError, match=r'column name for file paths "x" conflicts with column name from file', ): - scan_func(tmp_path, include_file_paths="x").collect(streaming=streaming) # type: ignore[call-arg] + scan_func(tmp_path, include_file_paths="x").collect(streaming=streaming) f = scan_func if scan_func in [pl.scan_csv, pl.scan_ndjson]: f = partial(f, schema=df.drop("path").schema) - lf: pl.LazyFrame = f(tmp_path, include_file_paths="path") # type: ignore[call-arg] + lf: pl.LazyFrame = f(tmp_path, include_file_paths="path") assert_frame_equal(lf.collect(streaming=streaming), df) # TODO: Support this with CSV diff --git a/py-polars/tests/unit/io/test_spreadsheet.py b/py-polars/tests/unit/io/test_spreadsheet.py index d1c3aae42186..10280a7f23be 100644 --- a/py-polars/tests/unit/io/test_spreadsheet.py +++ b/py-polars/tests/unit/io/test_spreadsheet.py @@ -104,7 +104,7 @@ def test_read_spreadsheet( ) -> None: sheet_params: dict[str, Any] - for sheet_params in ( # type: ignore[assignment] + for sheet_params in ( {"sheet_name": None, "sheet_id": None}, {"sheet_name": "test1"}, {"sheet_id": 1}, @@ -643,7 +643,7 @@ def test_excel_round_trip(write_params: dict[str, Any]) -> None: ) engine: ExcelSpreadsheetEngine - for engine in ("calamine", "xlsx2csv"): # type: ignore[assignment] + for engine in ("calamine", "xlsx2csv"): read_options = ( {} if write_params.get("include_header", True) @@ -930,7 +930,7 @@ def test_excel_type_inference_with_nulls(engine: ExcelSpreadsheetEngine) -> None reversed_cols = list(reversed(df.columns)) read_cols: Sequence[str] | Sequence[int] - for read_cols in ( # type: ignore[assignment] + for read_cols in ( reversed_cols, [5, 4, 3, 2, 1, 0], ): diff --git a/py-polars/tests/unit/operations/test_explode.py b/py-polars/tests/unit/operations/test_explode.py index c6a5049319a3..24e65ac1dc6d 100644 --- a/py-polars/tests/unit/operations/test_explode.py +++ b/py-polars/tests/unit/operations/test_explode.py @@ -230,7 +230,7 @@ def test_explode_array() -> None: ) expected = pl.DataFrame({"a": [1, 2, 2, 3], "b": [1, 1, 2, 2]}) for ex in ("a", ~cs.integer()): - out = df.explode(ex).collect() # type: ignore[arg-type] + out = df.explode(ex).collect() assert_frame_equal(out, expected) diff --git a/py-polars/tests/unit/operations/test_rolling.py b/py-polars/tests/unit/operations/test_rolling.py index b67cb441bc56..b5f21dbaa01e 100644 --- a/py-polars/tests/unit/operations/test_rolling.py +++ b/py-polars/tests/unit/operations/test_rolling.py @@ -30,7 +30,7 @@ def test_rolling() -> None: ) period: str | timedelta - for period in ("2d", timedelta(days=2)): # type: ignore[assignment] + for period in ("2d", timedelta(days=2)): out = df.rolling(index_column="dt", period=period).agg( [ pl.sum("a").alias("sum_a"), diff --git a/py-polars/tests/unit/series/test_scatter.py b/py-polars/tests/unit/series/test_scatter.py index a80597543081..c3c0b38d6805 100644 --- a/py-polars/tests/unit/series/test_scatter.py +++ b/py-polars/tests/unit/series/test_scatter.py @@ -1,4 +1,5 @@ from datetime import date, datetime +from typing import Any import numpy as np import pytest @@ -8,19 +9,24 @@ from polars.testing import assert_series_equal -def test_scatter() -> None: - s = pl.Series("s", [1, 2, 3]) - - # no-op (empty sequences) - for x in ( +@pytest.mark.parametrize( + "input", + [ (), [], pl.Series(), pl.Series(dtype=pl.Int8), np.array([]), - ): - s.scatter(x, 8) # type: ignore[arg-type] - assert s.to_list() == [1, 2, 3] + ], +) +def test_scatter_noop(input: Any) -> None: + s = pl.Series("s", [1, 2, 3]) + s.scatter(input, 8) + assert s.to_list() == [1, 2, 3] + + +def test_scatter() -> None: + s = pl.Series("s", [1, 2, 3]) # set new values, one index at a time s.scatter(0, 8) diff --git a/py-polars/tests/unit/test_string_cache.py b/py-polars/tests/unit/test_string_cache.py index f08441d8bfc2..b54b08d48a86 100644 --- a/py-polars/tests/unit/test_string_cache.py +++ b/py-polars/tests/unit/test_string_cache.py @@ -176,7 +176,7 @@ def test_string_cache_eager_lazy() -> None: "schema_overrides": {"region_ids": pl.Categorical}, }, ): - df3 = pl.DataFrame( # type: ignore[arg-type] + df3 = pl.DataFrame( data=[["reg1"], ["reg2"], ["reg3"], ["reg4"], ["reg5"]], orient="row", **params,