From 755dc0b03c89bd924d287cf453c8f1631ad8df45 Mon Sep 17 00:00:00 2001 From: Arpad Borsos Date: Tue, 30 Jul 2024 10:55:29 +0200 Subject: [PATCH] Fix various `clippy` suggestions Some of the suggestions were applied automatically, though some resulted in broken code. Other suggestions were applied manually, including fixing some tests that were not actually asserting anything. --- examples/parse_pyreport.rs | 2 +- examples/sql_to_pyreport.rs | 2 +- src/parsers/pyreport/chunks.rs | 23 ++++++--- src/parsers/pyreport/utils.rs | 14 ++--- src/report/mod.rs | 1 + src/report/models.rs | 2 +- src/report/pyreport/chunks.rs | 11 ++-- src/report/sqlite/models.rs | 1 - src/report/sqlite/report.rs | 54 ++++++++++---------- src/report/sqlite/report_builder.rs | 79 ++++++++--------------------- tests/test_pyreport_shim.rs | 6 +-- 11 files changed, 82 insertions(+), 113 deletions(-) diff --git a/examples/parse_pyreport.rs b/examples/parse_pyreport.rs index 1ef5637..10b5d19 100644 --- a/examples/parse_pyreport.rs +++ b/examples/parse_pyreport.rs @@ -5,7 +5,7 @@ use codecov_rs::{error::Result, parsers::pyreport::parse_pyreport}; fn usage_error() -> ! { println!("Usage:"); println!(" cargo run --example parse_pyreport -- [REPORT_JSON_PATH] [CHUNKS_PATH] [OUT_PATH]"); - println!(""); + println!(); println!("Example:"); println!(" cargo run --example parse_pyreport -- tests/common/sample_data/codecov-rs-reports-json-d2a9ba1.txt tests/common/sample_data/codecov-rs-chunks-d2a9ba1.txt d2a9ba1.sqlite"); diff --git a/examples/sql_to_pyreport.rs b/examples/sql_to_pyreport.rs index 641c437..4f373e3 100644 --- a/examples/sql_to_pyreport.rs +++ b/examples/sql_to_pyreport.rs @@ -10,7 +10,7 @@ fn usage_error() -> ! { println!( " cargo run --example parse_pyreport -- [SQLITE_PATH] [REPORT_JSON_PATH] [CHUNKS_PATH]" ); - println!(""); + println!(); println!("Example:"); println!( " cargo run --example parse_pyreport -- d2a9ba1.sqlite ~/report_json.json ~/chunks.txt" diff --git a/src/parsers/pyreport/chunks.rs b/src/parsers/pyreport/chunks.rs index cee03ed..c32c46d 100644 --- a/src/parsers/pyreport/chunks.rs +++ b/src/parsers/pyreport/chunks.rs @@ -177,10 +177,11 @@ pub fn complexity>( /// [There may yet be more ways this shows /// up](https://github.com/codecov/worker/blob/07405e0ae925f00aa7bb3e2d828537010901154b/services/report/languages/cobertura.py#L112-L114). /// We'll try our best, and that'll have to do. -pub fn missing_branches<'a, S: StrStream, R: Report, B: ReportBuilder>( +pub fn missing_branches<'a, S, R: Report, B: ReportBuilder>( buf: &mut ReportOutputStream, ) -> PResult> where + S: StrStream, S: Stream, { let block_and_branch = separated_pair(parse_u32, ':', parse_u32); @@ -254,10 +255,11 @@ pub fn partial_spans>( /// that session. /// /// Trailing null fields may be omitted. -pub fn line_session<'a, S: StrStream, R: Report, B: ReportBuilder>( +pub fn line_session<'a, S, R: Report, B: ReportBuilder>( buf: &mut ReportOutputStream, ) -> PResult where + S: StrStream, S: Stream, { seq! {LineSession { @@ -278,10 +280,11 @@ where /// No idea what this field contains. Guessing it's JSON so if we ever encounter /// it we can at least consume it off the stream and continue parsing. -pub fn messages<'a, S: StrStream, R: Report, B: ReportBuilder>( +pub fn messages<'a, S, R: Report, B: ReportBuilder>( buf: &mut ReportOutputStream, ) -> PResult where + S: StrStream, S: Stream, { json_value.parse_next(buf) @@ -360,10 +363,11 @@ pub fn coverage_datapoint>( /// This parser performs all the writes it can to the output /// stream and only returns a `ReportLine` for tests. The `report_line_or_empty` /// parser which wraps this and supports empty lines returns `Ok(())`. -pub fn report_line<'a, S: StrStream, R: Report, B: ReportBuilder>( +pub fn report_line<'a, S, R: Report, B: ReportBuilder>( buf: &mut ReportOutputStream, ) -> PResult where + S: StrStream, S: Stream, { let line_no = buf.state.chunk.current_line; @@ -407,10 +411,11 @@ where /// /// The `report_line` parser writes all the data it can to the output /// stream so we don't actually need to return anything to our caller. -pub fn report_line_or_empty<'a, S: StrStream, R: Report, B: ReportBuilder>( +pub fn report_line_or_empty<'a, S, R: Report, B: ReportBuilder>( buf: &mut ReportOutputStream, ) -> PResult> where + S: StrStream, S: Stream, { buf.state.chunk.current_line += 1; @@ -440,10 +445,11 @@ pub fn chunk_header>( /// Each new chunk will reset `buf.state.chunk.current_line` to 0 when it starts /// and increment `buf.state.chunk.index` when it ends so that the next chunk /// can associate its data with the correct file. -pub fn chunk<'a, S: StrStream, R: Report, B: ReportBuilder>( +pub fn chunk<'a, S, R: Report, B: ReportBuilder>( buf: &mut ReportOutputStream, ) -> PResult<()> where + S: StrStream, S: Stream, { // New chunk, start back at line 0. @@ -501,10 +507,11 @@ pub fn chunks_file_header>( /// Parses a chunks file. A chunks file contains an optional header and a series /// of 1 or more "chunks" separated by an `CHUNKS_FILE_END_OF_CHUNK` terminator. -pub fn parse_chunks_file<'a, S: StrStream, R: Report, B: ReportBuilder>( +pub fn parse_chunks_file<'a, S, R: Report, B: ReportBuilder>( buf: &mut ReportOutputStream, ) -> PResult<()> where + S: StrStream, S: Stream, { let _: Vec<_> = preceded( @@ -1692,7 +1699,7 @@ mod tests { ]; for test_case in test_cases { - buf.input = &test_case.0; + buf.input = test_case.0; assert_eq!(chunks_file_header.parse_next(&mut buf), test_case.1); } assert_eq!(buf.state.labels_index.len(), 2); diff --git a/src/parsers/pyreport/utils.rs b/src/parsers/pyreport/utils.rs index cda4615..e4ceacc 100644 --- a/src/parsers/pyreport/utils.rs +++ b/src/parsers/pyreport/utils.rs @@ -275,6 +275,8 @@ pub fn save_report_lines>( models .iter_mut() .filter_map(|LineSessionModels { sample, method, .. }| { + // See https://github.com/rust-lang/rust-clippy/issues/13185 + #[allow(clippy::manual_inspect)] method.as_mut().map(|method| { method.local_sample_id = sample.local_sample_id; method @@ -928,7 +930,7 @@ mod tests { source_file_id: 123, line_no: 1, hits: Some(10), - coverage_type: coverage_type, + coverage_type, ..Default::default() }, ..Default::default() @@ -939,7 +941,7 @@ mod tests { source_file_id: 123, line_no: 1, hits: Some(10), - coverage_type: coverage_type, + coverage_type, ..Default::default() }, ..Default::default() @@ -950,7 +952,7 @@ mod tests { source_file_id: 123, line_no: 1, hits: Some(10), - coverage_type: coverage_type, + coverage_type, ..Default::default() }, ..Default::default() @@ -1025,7 +1027,7 @@ mod tests { source_file_id: 123, line_no: 1, hits: Some(10), - coverage_type: coverage_type, + coverage_type, ..Default::default() }, assocs: vec![ @@ -1048,7 +1050,7 @@ mod tests { source_file_id: 123, line_no: 1, hits: Some(10), - coverage_type: coverage_type, + coverage_type, ..Default::default() }, ..Default::default() @@ -1059,7 +1061,7 @@ mod tests { source_file_id: 123, line_no: 1, hits: Some(10), - coverage_type: coverage_type, + coverage_type, ..Default::default() }, assocs: vec![models::ContextAssoc { diff --git a/src/report/mod.rs b/src/report/mod.rs index f09cdd6..16e6fc8 100644 --- a/src/report/mod.rs +++ b/src/report/mod.rs @@ -48,6 +48,7 @@ pub trait Report { /// An interface for creating a new coverage report. #[cfg_attr(test, automock)] +#[allow(clippy::needless_lifetimes)] // `automock` requires these pub trait ReportBuilder { /// Create a [`models::SourceFile`] record and return it. fn insert_file(&mut self, path: String) -> Result; diff --git a/src/report/models.rs b/src/report/models.rs index dffbe8a..f6eba5d 100644 --- a/src/report/models.rs +++ b/src/report/models.rs @@ -133,7 +133,7 @@ pub enum ContextType { /// Each source file represented in the coverage data should have a /// [`SourceFile`] record with its path relative to the project's root. -#[derive(PartialEq, Debug, Default)] +#[derive(PartialEq, Debug, Default, Clone)] pub struct SourceFile { /// Should be a hash of the path. pub id: i64, diff --git a/src/report/pyreport/chunks.rs b/src/report/pyreport/chunks.rs index ab3172b..4528c91 100644 --- a/src/report/pyreport/chunks.rs +++ b/src/report/pyreport/chunks.rs @@ -755,7 +755,7 @@ mod tests { let db_path = ctx.temp_dir.path().join("test.db"); let report = SqliteReport::new(db_path).unwrap(); - let test_cases: &[(&[&dyn rusqlite::ToSql], (i64, JsonVal))] = &[ + let test_cases = &[ ( rusqlite::params![ 1, @@ -827,22 +827,19 @@ mod tests { // Malformed assert!( format_coverage(&None, &Some(2), &None).is_err_and(|e| match e { - CodecovError::PyreportConversionError(s) => - s == "incomplete coverage data".to_string(), + CodecovError::PyreportConversionError(s) => s == "incomplete coverage data", _ => false, }) ); assert!( format_coverage(&None, &None, &Some(4)).is_err_and(|e| match e { - CodecovError::PyreportConversionError(s) => - s == "incomplete coverage data".to_string(), + CodecovError::PyreportConversionError(s) => s == "incomplete coverage data", _ => false, }) ); assert!( format_coverage(&None, &None, &None).is_err_and(|e| match e { - CodecovError::PyreportConversionError(s) => - s == "incomplete coverage data".to_string(), + CodecovError::PyreportConversionError(s) => s == "incomplete coverage data", _ => false, }) ); diff --git a/src/report/sqlite/models.rs b/src/report/sqlite/models.rs index bff17d2..40e481b 100644 --- a/src/report/sqlite/models.rs +++ b/src/report/sqlite/models.rs @@ -668,7 +668,6 @@ mod tests { raw_upload_id: raw_upload.id, local_sample_id: Some(rand::random()), local_span_id: None, - ..Default::default() }; >::insert(&model, &report.conn).unwrap(); diff --git a/src/report/sqlite/report.rs b/src/report/sqlite/report.rs index ca44ca4..d86a398 100644 --- a/src/report/sqlite/report.rs +++ b/src/report/sqlite/report.rs @@ -1,4 +1,4 @@ -use std::path::PathBuf; +use std::{fmt, path::PathBuf}; use rusqlite::{Connection, OptionalExtension}; @@ -13,6 +13,12 @@ pub struct SqliteReport { pub conn: Connection, } +impl fmt::Debug for SqliteReport { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("SqliteReport").finish_non_exhaustive() + } +} + impl SqliteReport { pub fn new(filename: PathBuf) -> Result { let conn = open_database(&filename)?; @@ -309,7 +315,7 @@ mod tests { let line_6 = right_report_builder .insert_coverage_sample(models::CoverageSample { raw_upload_id: upload_2.id, - source_file_id: file_2.id, + source_file_id: file_3.id, line_no: 2, coverage_type: models::CoverageType::Method, hits: Some(2), @@ -336,38 +342,32 @@ mod tests { let mut left = left_report_builder.build().unwrap(); let right = right_report_builder.build().unwrap(); left.merge(&right).unwrap(); + + // NOTE: the assertions here are sensitive to the sort order: assert_eq!( - left.list_files().unwrap().sort_by_key(|f| f.id), - [&file_1, &file_2, &file_3].sort_by_key(|f| f.id), - ); - assert_eq!( - left.list_contexts().unwrap().sort_by_key(|c| c.id), - [&test_case_1, &test_case_2].sort_by_key(|c| c.id), - ); - assert_eq!( - left.list_coverage_samples() - .unwrap() - .sort_by_key(|s| s.local_sample_id), - [&line_1, &line_2, &line_3, &line_4, &line_5, &line_6] - .sort_by_key(|s| s.local_sample_id), + left.list_files().unwrap(), + &[file_1.clone(), file_2.clone(), file_3.clone()] ); + assert_eq!(left.list_contexts().unwrap(), &[test_case_2, test_case_1]); assert_eq!( - left.list_samples_for_file(&file_1) - .unwrap() - .sort_by_key(|s| s.local_sample_id), - [&line_1].sort_by_key(|s| s.local_sample_id), + left.list_coverage_samples().unwrap(), + &[ + line_1.clone(), + line_4.clone(), + line_2.clone(), + line_5.clone(), + line_3.clone(), + line_6.clone() + ] ); + assert_eq!(left.list_samples_for_file(&file_1).unwrap(), &[line_1]); assert_eq!( - left.list_samples_for_file(&file_2) - .unwrap() - .sort_by_key(|s| s.local_sample_id), - [&line_2, &line_3, &line_4].sort_by_key(|s| s.local_sample_id), + left.list_samples_for_file(&file_2).unwrap(), + &[line_2, line_3, line_4] ); assert_eq!( - left.list_samples_for_file(&file_3) - .unwrap() - .sort_by_key(|s| s.local_sample_id), - [&line_5, &line_6].sort_by_key(|s| s.local_sample_id), + left.list_samples_for_file(&file_3).unwrap(), + &[line_5, line_6] ); } diff --git a/src/report/sqlite/report_builder.rs b/src/report/sqlite/report_builder.rs index 3889cdd..3dced1c 100644 --- a/src/report/sqlite/report_builder.rs +++ b/src/report/sqlite/report_builder.rs @@ -437,20 +437,10 @@ mod tests { assert_eq!(actual_file, expected_file); let duplicate_result = report_builder.insert_file(expected_file.path.clone()); - match duplicate_result { - Err(CodecovError::SqliteError(rusqlite::Error::SqliteFailure( - rusqlite::ffi::Error { - code: rusqlite::ffi::ErrorCode::ConstraintViolation, - extended_code: 1555, - }, - Some(s), - ))) => { - assert_eq!(s, String::from("UNIQUE constraint failed: source_file.id")); - } - _ => { - assert!(false); - } - } + assert_eq!( + duplicate_result.unwrap_err().to_string(), + "sqlite failure: 'UNIQUE constraint failed: source_file.id'" + ); } #[test] @@ -471,20 +461,10 @@ mod tests { let duplicate_result = report_builder.insert_context(expected_context.context_type, &expected_context.name); - match duplicate_result { - Err(CodecovError::SqliteError(rusqlite::Error::SqliteFailure( - rusqlite::ffi::Error { - code: rusqlite::ffi::ErrorCode::ConstraintViolation, - extended_code: 1555, - }, - Some(s), - ))) => { - assert_eq!(s, String::from("UNIQUE constraint failed: context.id")); - } - _ => { - assert!(false); - } - } + assert_eq!( + duplicate_result.unwrap_err().to_string(), + "sqlite failure: 'UNIQUE constraint failed: context.id'" + ); } #[test] @@ -518,7 +498,7 @@ mod tests { actual_sample.local_sample_id ); assert_eq!(actual_sample.local_sample_id, 0); - expected_sample.local_sample_id = actual_sample.local_sample_id.clone(); + expected_sample.local_sample_id = actual_sample.local_sample_id; assert_eq!(actual_sample, expected_sample); let second_sample = report_builder @@ -530,7 +510,7 @@ mod tests { ); assert_ne!(actual_sample.local_sample_id, second_sample.local_sample_id); assert_eq!(second_sample.local_sample_id, 1); - expected_sample.local_sample_id = second_sample.local_sample_id.clone(); + expected_sample.local_sample_id = second_sample.local_sample_id; assert_eq!(second_sample, expected_sample); } @@ -631,7 +611,6 @@ mod tests { hits: 0, branch_format: models::BranchFormat::Condition, branch: "0:jump".to_string(), - ..Default::default() }; let actual_branch = report_builder .insert_branches_data(expected_branch.clone()) @@ -1038,7 +1017,7 @@ mod tests { .unwrap(); let context = report_builder - .insert_context(models::ContextType::TestCase, &"test_case".to_string()) + .insert_context(models::ContextType::TestCase, "test_case") .unwrap(); let expected_assoc = models::ContextAssoc { @@ -1058,25 +1037,10 @@ mod tests { assert_eq!(actual_assoc, expected_assoc); let duplicate_result = report_builder.associate_context(expected_assoc.clone()); - match duplicate_result { - Err(CodecovError::SqliteError(rusqlite::Error::SqliteFailure( - rusqlite::ffi::Error { - code: rusqlite::ffi::ErrorCode::ConstraintViolation, - extended_code: 1555, - }, - Some(s), - ))) => { - assert_eq!( - s, - String::from( - "UNIQUE constraint failed: context_assoc.context_id, context_assoc.raw_upload_id, context_assoc.local_sample_id, context_assoc.local_span_id" - ) - ); - } - _ => { - assert!(false); - } - } + assert_eq!( + duplicate_result.unwrap_err().to_string(), + "sqlite failure: 'UNIQUE constraint failed: context_assoc.context_id, context_assoc.raw_upload_id, context_assoc.local_sample_id, context_assoc.local_span_id'" + ); } #[test] @@ -1124,7 +1088,7 @@ mod tests { }) .collect(); - let _ = report_builder + report_builder .multi_associate_context(assocs.iter_mut().collect()) .unwrap(); @@ -1178,12 +1142,11 @@ mod tests { let mut report_builder = SqliteReportBuilder::new(db_file).unwrap(); let tx = report_builder.transaction().unwrap(); - match tx.build() { - Err(CodecovError::ReportBuilderError(s)) => { - assert_eq!(s, "called `build()` on a transaction".to_string()) - } - _ => assert!(false), - } + + assert_eq!( + tx.build().unwrap_err().to_string(), + "report builder error: 'called `build()` on a transaction'" + ); } #[test] diff --git a/tests/test_pyreport_shim.rs b/tests/test_pyreport_shim.rs index d761df0..d3d6fea 100644 --- a/tests/test_pyreport_shim.rs +++ b/tests/test_pyreport_shim.rs @@ -262,7 +262,7 @@ fn test_parse_pyreport() { session_extras: Some(json!({})), }; - let expected_files = vec![ + let expected_files = [ models::SourceFile { id: hash_id("src/report.rs"), path: "src/report.rs".to_string(), @@ -383,8 +383,8 @@ fn test_sql_to_pyreport_to_sql_totals_match() { let original_totals = report.totals().unwrap(); - let _ = report_json_output_file.rewind().unwrap(); - let _ = chunks_output_file.rewind().unwrap(); + report_json_output_file.rewind().unwrap(); + chunks_output_file.rewind().unwrap(); let roundtrip_db_path = test_ctx.temp_dir.path().join("roundtrip.sqlite"); let report = pyreport::parse_pyreport(