Skip to content

Commit

Permalink
chore: add lints to papyrus_storage
Browse files Browse the repository at this point in the history
Lior banned `as` repo-wide, unless absolutely necessary.

Notable changes, the rest is either `From` or usize->u64 casts:
1. storage_benchmark: all usages are usize, not u128, so switching.
2. db_stats: to usages needed `as f64` so added exception, last usage
   used f64 pow, no need to pass through i32.
3. serializers: multi-type use macro, added exception.
4. utils.rs: metrics need as f64.
  • Loading branch information
Gilad Chase committed Nov 7, 2024
1 parent 3739dda commit b230c2d
Show file tree
Hide file tree
Showing 9 changed files with 41 additions and 32 deletions.
6 changes: 2 additions & 4 deletions crates/papyrus_storage/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,5 @@ test-case.workspace = true
test-log.workspace = true
tokio = { workspace = true, features = ["full", "sync"] }

[lints.rust]
# See [here](https://github.com/taiki-e/cargo-llvm-cov/issues/370) for a discussion on why this is
# needed (from rust 1.80).
unexpected_cfgs = { level = "warn", check-cfg = ['cfg(coverage_nightly)'] }
[lints]
workspace = true
8 changes: 4 additions & 4 deletions crates/papyrus_storage/src/bin/storage_benchmark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ impl Times {
results.push(Entry {
name: "get_class_hash_at".to_string(),
unit: "Microseconds".to_string(),
value: get_class_hash_at_median as usize,
value: get_class_hash_at_median,
});

let get_nonce_at_median = if self.get_nonce_at.is_empty() {
Expand All @@ -114,7 +114,7 @@ impl Times {
results.push(Entry {
name: "get_nonce_at".to_string(),
unit: "Microseconds".to_string(),
value: get_nonce_at_median as usize,
value: get_nonce_at_median,
});

let get_storage_at_median = if self.get_storage_at.is_empty() {
Expand All @@ -126,7 +126,7 @@ impl Times {
results.push(Entry {
name: "get_storage_at".to_string(),
unit: "Microseconds".to_string(),
value: get_storage_at_median as usize,
value: get_storage_at_median,
});

results
Expand Down Expand Up @@ -155,7 +155,7 @@ impl Times {
struct Entry {
name: String,
unit: String,
value: usize,
value: u128,
}

struct CliParams {
Expand Down
6 changes: 4 additions & 2 deletions crates/papyrus_storage/src/db/db_stats.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ impl DbReader {
leaf_pages: stat.leaf_pages(),
overflow_pages: stat.overflow_pages(),
total_size: stat.total_size(),
#[allow(clippy::as_conversions)]
db_portion: stat.total_size() as f64 / self.env.stat()?.total_size() as f64,
})
}
Expand Down Expand Up @@ -97,6 +98,7 @@ fn readable_bytes<S>(bytes_num: &u64, s: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
#[allow(clippy::as_conversions)]
s.serialize_str(&human_bytes(*bytes_num as f64))
}

Expand All @@ -106,7 +108,7 @@ fn float_precision<S>(float: &f64, s: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
const PRECISION: u32 = 4;
let power = u32::pow(10, PRECISION) as f64;
const PRECISION: i32 = 4;
let power = 10_f64.powi(PRECISION);
s.serialize_f64((*float * power).round() / power)
}
26 changes: 13 additions & 13 deletions crates/papyrus_storage/src/mmap_file/mmap_file_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -151,32 +151,32 @@ fn grow_file() {
open_file::<NoVersionValueWrapper<Vec<u8>>>(config.clone(), file_path.clone(), offset)
.unwrap();
// file_size = 4 (growth_step), offset = 0
let mut file_size = file.metadata().unwrap().len();
assert_eq!(file_size, config.growth_step as u64);
let mut file_size = usize::try_from(file.metadata().unwrap().len()).unwrap();
assert_eq!(file_size, config.growth_step);
assert_eq!(offset, 0);

offset += writer.append(&data).len;
// file_size = 8 (2 * growth_step), offset = 3 (serialization_size)
file_size = file.metadata().unwrap().len();
assert_eq!(file_size, 2 * config.growth_step as u64);
file_size = file.metadata().unwrap().len().try_into().unwrap();
assert_eq!(file_size, 2 * config.growth_step);
assert_eq!(offset, serialization_size);

offset += writer.append(&data).len;
// file_size = 12 (3 * growth_step), offset = 6 (2 * serialization_size)
file_size = file.metadata().unwrap().len();
assert_eq!(file_size, 3 * config.growth_step as u64);
file_size = file.metadata().unwrap().len().try_into().unwrap();
assert_eq!(file_size, 3 * config.growth_step);
assert_eq!(offset, 2 * serialization_size);

offset += writer.append(&data).len;
// file_size = 12 (3 * growth_step), offset = 9 (3 * serialization_size)
file_size = file.metadata().unwrap().len();
assert_eq!(file_size, 3 * config.growth_step as u64);
file_size = file.metadata().unwrap().len().try_into().unwrap();
assert_eq!(file_size, 3 * config.growth_step);
assert_eq!(offset, 3 * serialization_size);

offset += writer.append(&data).len;
// file_size = 16 (4 * growth_step), offset = 12 (4 * serialization_size)
file_size = file.metadata().unwrap().len();
assert_eq!(file_size, 4 * config.growth_step as u64);
file_size = file.metadata().unwrap().len().try_into().unwrap();
assert_eq!(file_size, 4 * config.growth_step);
assert_eq!(offset, 4 * serialization_size);
}

Expand All @@ -187,9 +187,9 @@ fn grow_file() {
.truncate(false)
.open(file_path.clone())
.unwrap();
assert_eq!(file.metadata().unwrap().len(), 4 * config.growth_step as u64);
assert_eq!(usize::try_from(file.metadata().unwrap().len()).unwrap(), 4 * config.growth_step);
let _ = open_file::<NoVersionValueWrapper<Vec<u8>>>(config.clone(), file_path, offset).unwrap();
assert_eq!(file.metadata().unwrap().len(), 4 * config.growth_step as u64);
assert_eq!(usize::try_from(file.metadata().unwrap().len()).unwrap(), 4 * config.growth_step);

dir.close().unwrap();
}
Expand Down Expand Up @@ -240,7 +240,7 @@ async fn write_read_different_locations() {
writer.append(&data);
writer.flush();
{
*lock.write().await = round as usize;
*lock.write().await = round.into();
}
barrier.wait().await;
data = data.into_iter().map(|x| x + 2).collect();
Expand Down
3 changes: 2 additions & 1 deletion crates/papyrus_storage/src/mmap_file/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -150,8 +150,9 @@ impl<V: ValueSerde> MMapFile<V> {
fn grow(&mut self) {
self.flush();
let new_size = self.size + self.config.growth_step;
let new_size_u64 = u64::try_from(new_size).expect("usize should fit in u64");
debug!("Growing file to size: {}", new_size);
self.file.set_len(new_size as u64).expect("Failed to set the file size");
self.file.set_len(new_size_u64).expect("Failed to set the file size");
self.size = new_size;
}

Expand Down
5 changes: 3 additions & 2 deletions crates/papyrus_storage/src/serialization/serializers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -593,6 +593,7 @@ macro_rules! auto_storage_serde {
($(pub)? enum $name:ident { $($variant:ident $( ($ty:ty) )? = $num:expr ,)* } $($rest:tt)*) => {
impl StorageSerde for $name {
fn serialize_into(&self, res: &mut impl std::io::Write) -> Result<(), StorageSerdeError> {
#[allow(clippy::as_conversions)]
match self {
$(
variant!( value, $variant $( ($ty) )?) => {
Expand Down Expand Up @@ -764,7 +765,7 @@ impl StorageSerde for u8 {
// TODO(dan): get rid of usize.
impl StorageSerde for usize {
fn serialize_into(&self, res: &mut impl std::io::Write) -> Result<(), StorageSerdeError> {
(*self as u64).serialize_into(res)
(u64::try_from(*self).expect("usize should fit in u64")).serialize_into(res)
}

fn deserialize_from(bytes: &mut impl std::io::Read) -> Option<Self> {
Expand Down Expand Up @@ -929,7 +930,7 @@ impl StorageSerde for BlockNumber {
}

fn deserialize_from(bytes: &mut impl std::io::Read) -> Option<Self> {
Some(BlockNumber(u32::deserialize_from(bytes)? as u64))
Some(BlockNumber(u32::deserialize_from(bytes)?.into()))
}
}

Expand Down
4 changes: 2 additions & 2 deletions crates/papyrus_storage/src/state/state_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -563,7 +563,7 @@ fn get_nonce_key_serialization() {
deprecated_declared_classes: Vec::new(),
nonces: IndexMap::from([(
contract_address,
Nonce(StarkHash::from(block_number as u128 + 1)),
Nonce(StarkHash::from(u128::from(block_number) + 1)),
)]),
replaced_classes: IndexMap::new(),
};
Expand Down Expand Up @@ -598,7 +598,7 @@ fn get_nonce_key_serialization() {
println!("{nonce:?}");
let nonce = nonce.unwrap();

assert_eq!(nonce, Nonce(StarkHash::from(block_number as u128)));
assert_eq!(nonce, Nonce(StarkHash::from(u128::from(block_number))));
}
}

Expand Down
11 changes: 9 additions & 2 deletions crates/papyrus_storage/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -93,11 +93,18 @@ fn dump_declared_classes_table_by_block_range_internal(
// TODO(dvir): relocate all the storage metrics in one module and export them (also in other
// crates).
/// Updates storage metrics about the state of the storage.
#[allow(clippy::as_conversions)]
pub fn update_storage_metrics(reader: &StorageReader) -> StorageResult<()> {
debug!("updating storage metrics");
gauge!("storage_free_pages_number", reader.db_reader.get_free_pages()? as f64);
let info = reader.db_reader.get_db_info()?;
absolute_counter!("storage_last_page_number", info.last_pgno() as u64);
absolute_counter!("storage_last_transaction_index", info.last_txnid() as u64);
absolute_counter!(
"storage_last_page_number",
u64::try_from(info.last_pgno()).expect("usize should fit in u64")
);
absolute_counter!(
"storage_last_transaction_index",
u64::try_from(info.last_txnid()).expect("usize should fit in u64")
);
Ok(())
}
4 changes: 2 additions & 2 deletions crates/papyrus_storage/src/utils_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ fn test_dump_declared_classes() {
let mut state_diffs = vec![];
let ((reader, mut writer), _temp_dir) = get_test_storage();
for i in 0..5 {
let i_felt = Felt::from(i as u128);
let i_felt = Felt::from(u128::try_from(i).expect("usize should fit in u128"));
declared_classes.push((
ClassHash(i_felt),
ContractClass {
Expand All @@ -46,7 +46,7 @@ fn test_dump_declared_classes() {
nonces: indexmap!(),
replaced_classes: indexmap!(),
});
let block_number = BlockNumber(i as u64);
let block_number = BlockNumber(u64::try_from(i).expect("usize should fit in u64"));
let txn = writer.begin_rw_txn().unwrap();
txn.append_state_diff(block_number, state_diffs[i].clone())
.unwrap()
Expand Down

0 comments on commit b230c2d

Please sign in to comment.