Skip to content

Commit

Permalink
re-adding 84 too-many-arguments
Browse files Browse the repository at this point in the history
  • Loading branch information
svaningelgem committed Oct 9, 2023
1 parent 79fff14 commit 5863a1c
Show file tree
Hide file tree
Showing 60 changed files with 84 additions and 1 deletion.
1 change: 0 additions & 1 deletion crates/clippy.toml
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
disallowed-types = ["std::collections::HashMap", "std::collections::HashSet"]
too-many-arguments-threshold = 99
1 change: 1 addition & 0 deletions crates/nano-arrow/src/io/ipc/read/array/binary.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ use crate::datatypes::DataType;
use crate::error::{Error, Result};
use crate::offset::Offset;

#[allow(clippy::too_many_arguments)]
pub fn read_binary<O: Offset, R: Read + Seek>(
field_nodes: &mut VecDeque<Node>,
data_type: DataType,
Expand Down
1 change: 1 addition & 0 deletions crates/nano-arrow/src/io/ipc/read/array/boolean.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ use crate::array::BooleanArray;
use crate::datatypes::DataType;
use crate::error::{Error, Result};

#[allow(clippy::too_many_arguments)]
pub fn read_boolean<R: Read + Seek>(
field_nodes: &mut VecDeque<Node>,
data_type: DataType,
Expand Down
1 change: 1 addition & 0 deletions crates/nano-arrow/src/io/ipc/read/array/dictionary.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ use crate::array::{DictionaryArray, DictionaryKey};
use crate::datatypes::DataType;
use crate::error::{Error, Result};

#[allow(clippy::too_many_arguments)]
pub fn read_dictionary<T: DictionaryKey, R: Read + Seek>(
field_nodes: &mut VecDeque<Node>,
data_type: DataType,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ use crate::array::FixedSizeBinaryArray;
use crate::datatypes::DataType;
use crate::error::{Error, Result};

#[allow(clippy::too_many_arguments)]
pub fn read_fixed_size_binary<R: Read + Seek>(
field_nodes: &mut VecDeque<Node>,
data_type: DataType,
Expand Down
1 change: 1 addition & 0 deletions crates/nano-arrow/src/io/ipc/read/array/fixed_size_list.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ use crate::array::FixedSizeListArray;
use crate::datatypes::DataType;
use crate::error::{Error, Result};

#[allow(clippy::too_many_arguments)]
pub fn read_fixed_size_list<R: Read + Seek>(
field_nodes: &mut VecDeque<Node>,
data_type: DataType,
Expand Down
1 change: 1 addition & 0 deletions crates/nano-arrow/src/io/ipc/read/array/list.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ use crate::datatypes::DataType;
use crate::error::{Error, Result};
use crate::offset::Offset;

#[allow(clippy::too_many_arguments)]
pub fn read_list<O: Offset, R: Read + Seek>(
field_nodes: &mut VecDeque<Node>,
data_type: DataType,
Expand Down
1 change: 1 addition & 0 deletions crates/nano-arrow/src/io/ipc/read/array/map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ use crate::buffer::Buffer;
use crate::datatypes::DataType;
use crate::error::{Error, Result};

#[allow(clippy::too_many_arguments)]
pub fn read_map<R: Read + Seek>(
field_nodes: &mut VecDeque<Node>,
data_type: DataType,
Expand Down
1 change: 1 addition & 0 deletions crates/nano-arrow/src/io/ipc/read/array/primitive.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ use crate::datatypes::DataType;
use crate::error::{Error, Result};
use crate::types::NativeType;

#[allow(clippy::too_many_arguments)]
pub fn read_primitive<T: NativeType, R: Read + Seek>(
field_nodes: &mut VecDeque<Node>,
data_type: DataType,
Expand Down
1 change: 1 addition & 0 deletions crates/nano-arrow/src/io/ipc/read/array/struct_.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ use crate::array::StructArray;
use crate::datatypes::DataType;
use crate::error::{Error, Result};

#[allow(clippy::too_many_arguments)]
pub fn read_struct<R: Read + Seek>(
field_nodes: &mut VecDeque<Node>,
data_type: DataType,
Expand Down
1 change: 1 addition & 0 deletions crates/nano-arrow/src/io/ipc/read/array/union.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ use crate::datatypes::DataType;
use crate::datatypes::UnionMode::Dense;
use crate::error::{Error, Result};

#[allow(clippy::too_many_arguments)]
pub fn read_union<R: Read + Seek>(
field_nodes: &mut VecDeque<Node>,
data_type: DataType,
Expand Down
1 change: 1 addition & 0 deletions crates/nano-arrow/src/io/ipc/read/array/utf8.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ use crate::datatypes::DataType;
use crate::error::{Error, Result};
use crate::offset::Offset;

#[allow(clippy::too_many_arguments)]
pub fn read_utf8<O: Offset, R: Read + Seek>(
field_nodes: &mut VecDeque<Node>,
data_type: DataType,
Expand Down
2 changes: 2 additions & 0 deletions crates/nano-arrow/src/io/ipc/read/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ impl<'a, A, I: Iterator<Item = A>> Iterator for ProjectionIter<'a, A, I> {
/// Returns a [`Chunk`] from a reader.
/// # Panic
/// Panics iff the projection is not in increasing order (e.g. `[1, 0]` nor `[0, 1, 1]` are valid)
#[allow(clippy::too_many_arguments)]
pub fn read_record_batch<R: Read + Seek>(
batch: arrow_format::ipc::RecordBatchRef,
fields: &[Field],
Expand Down Expand Up @@ -226,6 +227,7 @@ pub(crate) fn first_dict_field<'a>(

/// Reads a dictionary from the reader,
/// updating `dictionaries` with the resulting dictionary
#[allow(clippy::too_many_arguments)]
pub fn read_dictionary<R: Read + Seek>(
batch: arrow_format::ipc::DictionaryBatchRef,
fields: &[Field],
Expand Down
1 change: 1 addition & 0 deletions crates/nano-arrow/src/io/ipc/read/deserialize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ use crate::datatypes::{DataType, Field, PhysicalType};
use crate::error::Result;
use crate::io::ipc::IpcField;

#[allow(clippy::too_many_arguments)]
pub fn read<R: Read + Seek>(
field_nodes: &mut VecDeque<Node>,
field: &Field,
Expand Down
1 change: 1 addition & 0 deletions crates/nano-arrow/src/io/ipc/read/file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -256,6 +256,7 @@ pub(crate) fn get_record_batch(
/// certain parts of the file.
/// # Panics
/// This function panics iff `index >= metadata.blocks.len()`
#[allow(clippy::too_many_arguments)]
pub fn read_batch<R: Read + Seek>(
reader: &mut R,
dictionaries: &Dictionaries,
Expand Down
1 change: 1 addition & 0 deletions crates/nano-arrow/src/io/ipc/read/file_async.rs
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,7 @@ where
deserialize_footer(&footer, u64::MAX)
}

#[allow(clippy::too_many_arguments)]
async fn read_batch<R>(
mut reader: R,
dictionaries: &mut Dictionaries,
Expand Down
1 change: 1 addition & 0 deletions crates/nano-arrow/src/io/ipc/read/read_basic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -255,6 +255,7 @@ pub fn read_bitmap<R: Read + Seek>(
Bitmap::try_new(buffer, length)
}

#[allow(clippy::too_many_arguments)]
pub fn read_validity<R: Read + Seek>(
buffers: &mut VecDeque<IpcBuffer>,
field_node: Node,
Expand Down
2 changes: 2 additions & 0 deletions crates/nano-arrow/src/io/ipc/write/serialize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ fn write_boolean(
);
}

#[allow(clippy::too_many_arguments)]
fn write_generic_binary<O: Offset>(
validity: Option<&Bitmap>,
offsets: &OffsetsBuffer<O>,
Expand Down Expand Up @@ -388,6 +389,7 @@ fn write_fixed_size_list(
}

// use `write_keys` to either write keys or values
#[allow(clippy::too_many_arguments)]
pub(super) fn write_dictionary<K: DictionaryKey>(
array: &DictionaryArray<K>,
buffers: &mut Vec<ipc::Buffer>,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,7 @@ impl<'a, K: DictionaryKey> NestedDecoder<'a> for DictionaryDecoder<K> {
fn deserialize_dict(&self, _: &DictPage) -> Self::Dictionary {}
}

#[allow(clippy::too_many_arguments)]
pub fn next_dict<K: DictionaryKey, I: Pages, F: Fn(&DictPage) -> Box<dyn Array>>(
iter: &mut I,
items: &mut VecDeque<(NestedState, (Vec<K>, MutableBitmap))>,
Expand Down
1 change: 1 addition & 0 deletions crates/nano-arrow/src/io/parquet/write/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ pub fn write_def_levels(
}
}

#[allow(clippy::too_many_arguments)]
pub fn build_plain_page(
buffer: Vec<u8>,
num_values: usize,
Expand Down
4 changes: 4 additions & 0 deletions crates/nano-arrow/src/mmap/array.rs
Original file line number Diff line number Diff line change
Expand Up @@ -257,6 +257,7 @@ fn mmap_primitive<P: NativeType, T: AsRef<[u8]>>(
})
}

#[allow(clippy::too_many_arguments)]
fn mmap_list<O: Offset, T: AsRef<[u8]>>(
data: Arc<T>,
node: &Node,
Expand Down Expand Up @@ -309,6 +310,7 @@ fn mmap_list<O: Offset, T: AsRef<[u8]>>(
})
}

#[allow(clippy::too_many_arguments)]
fn mmap_fixed_size_list<T: AsRef<[u8]>>(
data: Arc<T>,
node: &Node,
Expand Down Expand Up @@ -360,6 +362,7 @@ fn mmap_fixed_size_list<T: AsRef<[u8]>>(
})
}

#[allow(clippy::too_many_arguments)]
fn mmap_struct<T: AsRef<[u8]>>(
data: Arc<T>,
node: &Node,
Expand Down Expand Up @@ -416,6 +419,7 @@ fn mmap_struct<T: AsRef<[u8]>>(
})
}

#[allow(clippy::too_many_arguments)]
fn mmap_dict<K: DictionaryKey, T: AsRef<[u8]>>(
data: Arc<T>,
node: &Node,
Expand Down
1 change: 1 addition & 0 deletions crates/polars-arrow/src/kernels/ewm/variance.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ use num_traits::Float;
use crate::trusted_len::TrustedLen;
use crate::utils::CustomIterTools;

#[allow(clippy::too_many_arguments)]
fn ewm_cov_internal<I, T>(
xs: I,
ys: I,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -195,6 +195,7 @@ where
}
}

#[allow(clippy::too_many_arguments)]
fn rolling_apply_weighted_quantile<T, Fo>(
values: &[T],
p: f64,
Expand Down
1 change: 1 addition & 0 deletions crates/polars-io/src/csv/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -346,6 +346,7 @@ fn skip_this_line(bytes: &[u8], quote: Option<u8>, eol_char: u8) -> &[u8] {
/// * `projection` - Indices of the columns to project.
/// * `buffers` - Parsed output will be written to these buffers. Except for UTF8 data. The offsets of the
/// fields are written to the buffers. The UTF8 data will be parsed later.
#[allow(clippy::too_many_arguments)]
pub(super) fn parse_lines<'a>(
mut bytes: &'a [u8],
offset: usize,
Expand Down
1 change: 1 addition & 0 deletions crates/polars-io/src/csv/read_impl/batched_mmap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ use crate::csv::CsvReader;
use crate::mmap::MmapBytesReader;
use crate::prelude::update_row_counts2;

#[allow(clippy::too_many_arguments)]
pub(crate) fn get_file_chunks_iterator(
offsets: &mut VecDeque<(usize, usize)>,
last_pos: &mut usize,
Expand Down
1 change: 1 addition & 0 deletions crates/polars-io/src/csv/read_impl/batched_read.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ use crate::csv::CsvReader;
use crate::mmap::MmapBytesReader;
use crate::prelude::update_row_counts2;

#[allow(clippy::too_many_arguments)]
pub(crate) fn get_offsets(
offsets: &mut VecDeque<(usize, usize)>,
n_chunks: usize,
Expand Down
2 changes: 2 additions & 0 deletions crates/polars-io/src/csv/read_impl/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -184,6 +184,7 @@ impl RunningSize {
}

impl<'a> CoreReader<'a> {
#[allow(clippy::too_many_arguments)]
pub(crate) fn new(
reader_bytes: ReaderBytes<'a>,
n_rows: Option<usize>,
Expand Down Expand Up @@ -790,6 +791,7 @@ fn update_string_stats(
Ok(())
}

#[allow(clippy::too_many_arguments)]
fn read_chunk(
bytes: &[u8],
separator: u8,
Expand Down
2 changes: 2 additions & 0 deletions crates/polars-io/src/csv/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,7 @@ pub(crate) fn parse_bytes_with_encoding(
})
}

#[allow(clippy::too_many_arguments)]
pub fn infer_file_schema_inner(
reader_bytes: &ReaderBytes,
separator: u8,
Expand Down Expand Up @@ -461,6 +462,7 @@ pub fn infer_file_schema_inner(
/// - inferred schema
/// - number of rows used for inference.
/// - bytes read
#[allow(clippy::too_many_arguments)]
pub fn infer_file_schema(
reader_bytes: &ReaderBytes,
separator: u8,
Expand Down
1 change: 1 addition & 0 deletions crates/polars-io/src/ndjson/core.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,7 @@ pub(crate) struct CoreJsonReader<'a> {
ignore_errors: bool,
}
impl<'a> CoreJsonReader<'a> {
#[allow(clippy::too_many_arguments)]
pub(crate) fn new(
reader_bytes: ReaderBytes<'a>,
n_rows: Option<usize>,
Expand Down
5 changes: 5 additions & 0 deletions crates/polars-io/src/parquet/read_impl.rs
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,7 @@ fn materialize_hive_partitions(
}
}

#[allow(clippy::too_many_arguments)]
fn rg_to_dfs(
store: &mmap::ColumnStore,
previous_row_count: &mut IdxSize,
Expand Down Expand Up @@ -157,6 +158,7 @@ fn rg_to_dfs(
}
}

#[allow(clippy::too_many_arguments)]
// might parallelize over columns
fn rg_to_dfs_optionally_par_over_columns(
store: &mmap::ColumnStore,
Expand Down Expand Up @@ -235,6 +237,7 @@ fn rg_to_dfs_optionally_par_over_columns(
Ok(dfs)
}

#[allow(clippy::too_many_arguments)]
// parallelizes over row groups
fn rg_to_dfs_par_over_rg(
store: &mmap::ColumnStore,
Expand Down Expand Up @@ -306,6 +309,7 @@ fn rg_to_dfs_par_over_rg(
Ok(dfs.into_iter().flatten().collect())
}

#[allow(clippy::too_many_arguments)]
pub fn read_parquet<R: MmapBytesReader>(
mut reader: R,
mut limit: usize,
Expand Down Expand Up @@ -465,6 +469,7 @@ pub struct BatchedParquetReader {
}

impl BatchedParquetReader {
#[allow(clippy::too_many_arguments)]
pub fn new(
row_group_fetcher: RowGroupFetcher,
metadata: Arc<FileMetaData>,
Expand Down
2 changes: 2 additions & 0 deletions crates/polars-lazy/src/physical_plan/executors/group_by.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ pub struct GroupByExec {
}

impl GroupByExec {
#[allow(clippy::too_many_arguments)]
pub(crate) fn new(
input: Box<dyn Executor>,
keys: Vec<Arc<dyn PhysicalExpr>>,
Expand All @@ -52,6 +53,7 @@ impl GroupByExec {
}
}

#[allow(clippy::too_many_arguments)]
pub(super) fn group_by_helper(
mut df: DataFrame,
keys: Vec<Series>,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ pub struct PartitionGroupByExec {
}

impl PartitionGroupByExec {
#[allow(clippy::too_many_arguments)]
pub(crate) fn new(
input: Box<dyn Executor>,
phys_keys: Vec<Arc<dyn PhysicalExpr>>,
Expand Down
1 change: 1 addition & 0 deletions crates/polars-lazy/src/physical_plan/executors/join.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ pub struct JoinExec {
}

impl JoinExec {
#[allow(clippy::too_many_arguments)]
pub(crate) fn new(
input_left: Box<dyn Executor>,
input_right: Box<dyn Executor>,
Expand Down
1 change: 1 addition & 0 deletions crates/polars-lazy/src/physical_plan/expressions/window.rs
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@ impl WindowExpr {
unsafe { Ok(flattened.take_unchecked(&idx)) }
}

#[allow(clippy::too_many_arguments)]
fn map_by_arg_sort(
&self,
df: &DataFrame,
Expand Down
4 changes: 4 additions & 0 deletions crates/polars-ops/src/frame/join/asof/groups.rs
Original file line number Diff line number Diff line change
Expand Up @@ -241,6 +241,7 @@ pub(super) unsafe fn join_asof_nearest_with_indirection<
// we don't process a group at once but per `index_left` we find the `right_index` and keep track
// of the offsets we have already processed in a separate hashmap. Then on a next iteration we can
// continue from that offsets location.
#[allow(clippy::too_many_arguments)]
#[allow(clippy::type_complexity)]
fn process_group<K, T>(
k: K,
Expand Down Expand Up @@ -689,6 +690,7 @@ where
})
}

#[allow(clippy::too_many_arguments)]
fn dispatch_join<T: PolarsNumericType>(
left_asof: &ChunkedArray<T>,
right_asof: &ChunkedArray<T>,
Expand Down Expand Up @@ -754,6 +756,7 @@ fn dispatch_join<T: PolarsNumericType>(
}

pub trait AsofJoinBy: IntoDf {
#[allow(clippy::too_many_arguments)]
#[doc(hidden)]
fn _join_asof_by(
&self,
Expand Down Expand Up @@ -850,6 +853,7 @@ pub trait AsofJoinBy: IntoDf {
/// This is similar to a left-join except that we match on nearest key rather than equal keys.
/// The keys must be sorted to perform an asof join. This is a special implementation of an asof join
/// that searches for the nearest keys within a subgroup set by `by`.
#[allow(clippy::too_many_arguments)]
fn join_asof_by<I, S>(
&self,
other: &DataFrame,
Expand Down
Loading

0 comments on commit 5863a1c

Please sign in to comment.