Skip to content

Commit

Permalink
a few clippy fixes for the new version (#33)
Browse files Browse the repository at this point in the history
  • Loading branch information
maximedion2 authored Nov 30, 2024
1 parent 743c452 commit 4260412
Show file tree
Hide file tree
Showing 4 changed files with 29 additions and 12 deletions.
4 changes: 2 additions & 2 deletions src/async_reader/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -544,7 +544,7 @@ pub struct ZarrRecordBatchStreamNonBlocking<'a, T: ZarrStream> {
_mask: Option<BooleanArray>,
}

impl<'a, T: ZarrStream> ZarrRecordBatchStreamNonBlocking<'a, T> {
impl<T: ZarrStream> ZarrRecordBatchStreamNonBlocking<'_, T> {
fn new(meta: ZarrStoreMetadata, filter: Option<ZarrChunkFilter>, store: T) -> Self {
Self {
meta,
Expand All @@ -557,7 +557,7 @@ impl<'a, T: ZarrStream> ZarrRecordBatchStreamNonBlocking<'a, T> {
}
}

impl<'a, T> Stream for ZarrRecordBatchStreamNonBlocking<'a, T>
impl<T> Stream for ZarrRecordBatchStreamNonBlocking<'_, T>
where
T: ZarrStream + Unpin + Send + 'static,
{
Expand Down
2 changes: 1 addition & 1 deletion src/datafusion/helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ impl<'a> ZarrFilterCandidateBuilder<'a> {
}
}

impl<'a> TreeNodeRewriter for ZarrFilterCandidateBuilder<'a> {
impl TreeNodeRewriter for ZarrFilterCandidateBuilder<'_> {
type N = Arc<dyn PhysicalExpr>;

fn pre_visit(&mut self, node: &Arc<dyn PhysicalExpr>) -> DataFusionResult<RewriteRecursion> {
Expand Down
31 changes: 24 additions & 7 deletions src/reader/codecs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -602,7 +602,7 @@ fn broadcast_array<T: Clone>(
// a time, and the "flat" data from the inner shard is read in small
// chunks and written to the correct position in the array for the entire
// outer chunk.
fn fill_data_from_shard<T: Copy>(
fn fill_data_from_shard<T: Clone>(
data: &mut [T],
shard_data: &[T],
chunk_real_dims: &[usize],
Expand All @@ -622,7 +622,7 @@ fn fill_data_from_shard<T: Copy>(
// from the start of the 1D outer chunk.
1 => {
let stride = inner_dims[0];
data[pos * stride..(pos + 1) * stride].copy_from_slice(shard_data);
data[pos * stride..(pos + 1) * stride].clone_from_slice(shard_data);
}
// The 2D case is trickier, we need to keep track of the inner shard
// position within the outer chunk, as well the where we're reading
Expand All @@ -649,7 +649,7 @@ fn fill_data_from_shard<T: Copy>(
// within the chunk, times the number of columns per shard.
let chunk_offset = (shard_pos[0] * inner_dims[0] + row_idx) * chunk_real_dims[1]
+ shard_pos[1] * inner_dims[1];
data[chunk_offset..chunk_offset + stride].copy_from_slice(shard_row);
data[chunk_offset..chunk_offset + stride].clone_from_slice(shard_row);
}
}
// similar to the 2D case, but a but more complicated, for 3D arrays.
Expand All @@ -672,7 +672,7 @@ fn fill_data_from_shard<T: Copy>(
+ (shard_pos[1] * inner_dims[1] + row_idx) * chunk_real_dims[2]
+ shard_pos[2] * inner_dims[2];

data[chunk_offset..chunk_offset + stride].copy_from_slice(shard_row);
data[chunk_offset..chunk_offset + stride].clone_from_slice(shard_row);
}
}
}
Expand Down Expand Up @@ -802,7 +802,7 @@ fn decode_string_chunk(
bytes = bytes.iter().step_by(PY_UNICODE_SIZE).copied().collect();
}

let mut data = Vec::new();
let mut data;
if let Some(sharding_params) = sharding_params.as_ref() {
let mut index_size: usize = 2 * 8 * sharding_params.n_chunks.iter().product::<usize>();
index_size += sharding_params
Expand All @@ -815,18 +815,35 @@ fn decode_string_chunk(
};
let (offsets, nbytes) = extract_sharding_index(&sharding_params.index_codecs, index_bytes)?;

data = vec![String::default(); bytes.len()];
let mut total_length = 0;
for (pos, (o, n)) in offsets.iter().zip(nbytes.iter()).enumerate() {
// the below condition indicates an empty shard
if o == &NULL_OFFSET && n == &NULL_NBYTES {
continue;
}
let inner_real_dims = get_inner_chunk_real_dims(sharding_params, real_dims, pos);
let inner_data = decode_string_chunk(
bytes[*o..o + n].to_vec(),
str_len,
&sharding_params.chunk_shape,
&get_inner_chunk_real_dims(sharding_params, real_dims, pos), // TODO: fix this to real dims
&inner_real_dims,
&sharding_params.codecs,
None,
pyunicode,
)?;
data.extend(inner_data);
total_length += inner_data.len();
fill_data_from_shard(
&mut data,
&inner_data,
real_dims,
chunk_dims,
&inner_real_dims,
&sharding_params.chunk_shape,
pos,
)?;
}
data = data[0..total_length].to_vec();
} else if let Some(ZarrCodec::Bytes(_)) = array_to_bytes_codec {
data = bytes
.chunks(str_len)
Expand Down
4 changes: 2 additions & 2 deletions src/reader/zarr_read.rs
Original file line number Diff line number Diff line change
Expand Up @@ -156,11 +156,11 @@ impl ZarrProjection {
ProjectionType::Null => Ok(all_cols.to_owned()),
ProjectionType::Skip => {
let col_names = self.col_names.as_ref().unwrap();
return Ok(all_cols
Ok(all_cols
.iter()
.filter(|x| !col_names.contains(x))
.map(|x| x.to_string())
.collect());
.collect())
}
ProjectionType::Select => {
let col_names = self.col_names.as_ref().unwrap();
Expand Down

0 comments on commit 4260412

Please sign in to comment.