Skip to content

Commit

Permalink
Fix various clippy suggestions (#9)
Browse files Browse the repository at this point in the history
Some of the suggestions were applied automatically, though some resulted in broken code.
Other suggestions were applied manually, including fixing some tests that were not actually asserting anything.
  • Loading branch information
Swatinem authored Jul 31, 2024
1 parent c0a4b65 commit 329b3e0
Show file tree
Hide file tree
Showing 11 changed files with 82 additions and 113 deletions.
2 changes: 1 addition & 1 deletion examples/parse_pyreport.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use codecov_rs::{error::Result, parsers::pyreport::parse_pyreport};
fn usage_error() -> ! {
println!("Usage:");
println!(" cargo run --example parse_pyreport -- [REPORT_JSON_PATH] [CHUNKS_PATH] [OUT_PATH]");
println!("");
println!();
println!("Example:");
println!(" cargo run --example parse_pyreport -- tests/common/sample_data/codecov-rs-reports-json-d2a9ba1.txt tests/common/sample_data/codecov-rs-chunks-d2a9ba1.txt d2a9ba1.sqlite");

Expand Down
2 changes: 1 addition & 1 deletion examples/sql_to_pyreport.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ fn usage_error() -> ! {
println!(
" cargo run --example parse_pyreport -- [SQLITE_PATH] [REPORT_JSON_PATH] [CHUNKS_PATH]"
);
println!("");
println!();
println!("Example:");
println!(
" cargo run --example parse_pyreport -- d2a9ba1.sqlite ~/report_json.json ~/chunks.txt"
Expand Down
23 changes: 15 additions & 8 deletions src/parsers/pyreport/chunks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -177,10 +177,11 @@ pub fn complexity<S: StrStream, R: Report, B: ReportBuilder<R>>(
/// [There may yet be more ways this shows
/// up](https://github.com/codecov/worker/blob/07405e0ae925f00aa7bb3e2d828537010901154b/services/report/languages/cobertura.py#L112-L114).
/// We'll try our best, and that'll have to do.
pub fn missing_branches<'a, S: StrStream, R: Report, B: ReportBuilder<R>>(
pub fn missing_branches<'a, S, R: Report, B: ReportBuilder<R>>(
buf: &mut ReportOutputStream<S, R, B>,
) -> PResult<Vec<MissingBranch>>
where
S: StrStream,
S: Stream<Slice = &'a str>,
{
let block_and_branch = separated_pair(parse_u32, ':', parse_u32);
Expand Down Expand Up @@ -254,10 +255,11 @@ pub fn partial_spans<S: StrStream, R: Report, B: ReportBuilder<R>>(
/// that session.
///
/// Trailing null fields may be omitted.
pub fn line_session<'a, S: StrStream, R: Report, B: ReportBuilder<R>>(
pub fn line_session<'a, S, R: Report, B: ReportBuilder<R>>(
buf: &mut ReportOutputStream<S, R, B>,
) -> PResult<LineSession>
where
S: StrStream,
S: Stream<Slice = &'a str>,
{
seq! {LineSession {
Expand All @@ -278,10 +280,11 @@ where

/// No idea what this field contains. Guessing it's JSON so if we ever encounter
/// it we can at least consume it off the stream and continue parsing.
pub fn messages<'a, S: StrStream, R: Report, B: ReportBuilder<R>>(
pub fn messages<'a, S, R: Report, B: ReportBuilder<R>>(
buf: &mut ReportOutputStream<S, R, B>,
) -> PResult<JsonVal>
where
S: StrStream,
S: Stream<Slice = &'a str>,
{
json_value.parse_next(buf)
Expand Down Expand Up @@ -360,10 +363,11 @@ pub fn coverage_datapoint<S: StrStream, R: Report, B: ReportBuilder<R>>(
/// This parser performs all the writes it can to the output
/// stream and only returns a `ReportLine` for tests. The `report_line_or_empty`
/// parser which wraps this and supports empty lines returns `Ok(())`.
pub fn report_line<'a, S: StrStream, R: Report, B: ReportBuilder<R>>(
pub fn report_line<'a, S, R: Report, B: ReportBuilder<R>>(
buf: &mut ReportOutputStream<S, R, B>,
) -> PResult<ReportLine>
where
S: StrStream,
S: Stream<Slice = &'a str>,
{
let line_no = buf.state.chunk.current_line;
Expand Down Expand Up @@ -407,10 +411,11 @@ where
///
/// The `report_line` parser writes all the data it can to the output
/// stream so we don't actually need to return anything to our caller.
pub fn report_line_or_empty<'a, S: StrStream, R: Report, B: ReportBuilder<R>>(
pub fn report_line_or_empty<'a, S, R: Report, B: ReportBuilder<R>>(
buf: &mut ReportOutputStream<S, R, B>,
) -> PResult<Option<ReportLine>>
where
S: StrStream,
S: Stream<Slice = &'a str>,
{
buf.state.chunk.current_line += 1;
Expand Down Expand Up @@ -440,10 +445,11 @@ pub fn chunk_header<S: StrStream, R: Report, B: ReportBuilder<R>>(
/// Each new chunk will reset `buf.state.chunk.current_line` to 0 when it starts
/// and increment `buf.state.chunk.index` when it ends so that the next chunk
/// can associate its data with the correct file.
pub fn chunk<'a, S: StrStream, R: Report, B: ReportBuilder<R>>(
pub fn chunk<'a, S, R: Report, B: ReportBuilder<R>>(
buf: &mut ReportOutputStream<S, R, B>,
) -> PResult<()>
where
S: StrStream,
S: Stream<Slice = &'a str>,
{
// New chunk, start back at line 0.
Expand Down Expand Up @@ -501,10 +507,11 @@ pub fn chunks_file_header<S: StrStream, R: Report, B: ReportBuilder<R>>(

/// Parses a chunks file. A chunks file contains an optional header and a series
/// of 1 or more "chunks" separated by an `CHUNKS_FILE_END_OF_CHUNK` terminator.
pub fn parse_chunks_file<'a, S: StrStream, R: Report, B: ReportBuilder<R>>(
pub fn parse_chunks_file<'a, S, R: Report, B: ReportBuilder<R>>(
buf: &mut ReportOutputStream<S, R, B>,
) -> PResult<()>
where
S: StrStream,
S: Stream<Slice = &'a str>,
{
let _: Vec<_> = preceded(
Expand Down Expand Up @@ -1692,7 +1699,7 @@ mod tests {
];

for test_case in test_cases {
buf.input = &test_case.0;
buf.input = test_case.0;
assert_eq!(chunks_file_header.parse_next(&mut buf), test_case.1);
}
assert_eq!(buf.state.labels_index.len(), 2);
Expand Down
14 changes: 8 additions & 6 deletions src/parsers/pyreport/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -275,6 +275,8 @@ pub fn save_report_lines<R: Report, B: ReportBuilder<R>>(
models
.iter_mut()
.filter_map(|LineSessionModels { sample, method, .. }| {
// See https://github.com/rust-lang/rust-clippy/issues/13185
#[allow(clippy::manual_inspect)]
method.as_mut().map(|method| {
method.local_sample_id = sample.local_sample_id;
method
Expand Down Expand Up @@ -928,7 +930,7 @@ mod tests {
source_file_id: 123,
line_no: 1,
hits: Some(10),
coverage_type: coverage_type,
coverage_type,
..Default::default()
},
..Default::default()
Expand All @@ -939,7 +941,7 @@ mod tests {
source_file_id: 123,
line_no: 1,
hits: Some(10),
coverage_type: coverage_type,
coverage_type,
..Default::default()
},
..Default::default()
Expand All @@ -950,7 +952,7 @@ mod tests {
source_file_id: 123,
line_no: 1,
hits: Some(10),
coverage_type: coverage_type,
coverage_type,
..Default::default()
},
..Default::default()
Expand Down Expand Up @@ -1025,7 +1027,7 @@ mod tests {
source_file_id: 123,
line_no: 1,
hits: Some(10),
coverage_type: coverage_type,
coverage_type,
..Default::default()
},
assocs: vec![
Expand All @@ -1048,7 +1050,7 @@ mod tests {
source_file_id: 123,
line_no: 1,
hits: Some(10),
coverage_type: coverage_type,
coverage_type,
..Default::default()
},
..Default::default()
Expand All @@ -1059,7 +1061,7 @@ mod tests {
source_file_id: 123,
line_no: 1,
hits: Some(10),
coverage_type: coverage_type,
coverage_type,
..Default::default()
},
assocs: vec![models::ContextAssoc {
Expand Down
1 change: 1 addition & 0 deletions src/report/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ pub trait Report {

/// An interface for creating a new coverage report.
#[cfg_attr(test, automock)]
#[allow(clippy::needless_lifetimes)] // `automock` requires these
pub trait ReportBuilder<R: Report> {
/// Create a [`models::SourceFile`] record and return it.
fn insert_file(&mut self, path: String) -> Result<models::SourceFile>;
Expand Down
2 changes: 1 addition & 1 deletion src/report/models.rs
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ pub enum ContextType {

/// Each source file represented in the coverage data should have a
/// [`SourceFile`] record with its path relative to the project's root.
#[derive(PartialEq, Debug, Default)]
#[derive(PartialEq, Debug, Default, Clone)]
pub struct SourceFile {
/// Should be a hash of the path.
pub id: i64,
Expand Down
11 changes: 4 additions & 7 deletions src/report/pyreport/chunks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -755,7 +755,7 @@ mod tests {
let db_path = ctx.temp_dir.path().join("test.db");
let report = SqliteReport::new(db_path).unwrap();

let test_cases: &[(&[&dyn rusqlite::ToSql], (i64, JsonVal))] = &[
let test_cases = &[
(
rusqlite::params![
1,
Expand Down Expand Up @@ -827,22 +827,19 @@ mod tests {
// Malformed
assert!(
format_coverage(&None, &Some(2), &None).is_err_and(|e| match e {
CodecovError::PyreportConversionError(s) =>
s == "incomplete coverage data".to_string(),
CodecovError::PyreportConversionError(s) => s == "incomplete coverage data",
_ => false,
})
);
assert!(
format_coverage(&None, &None, &Some(4)).is_err_and(|e| match e {
CodecovError::PyreportConversionError(s) =>
s == "incomplete coverage data".to_string(),
CodecovError::PyreportConversionError(s) => s == "incomplete coverage data",
_ => false,
})
);
assert!(
format_coverage(&None, &None, &None).is_err_and(|e| match e {
CodecovError::PyreportConversionError(s) =>
s == "incomplete coverage data".to_string(),
CodecovError::PyreportConversionError(s) => s == "incomplete coverage data",
_ => false,
})
);
Expand Down
1 change: 0 additions & 1 deletion src/report/sqlite/models.rs
Original file line number Diff line number Diff line change
Expand Up @@ -668,7 +668,6 @@ mod tests {
raw_upload_id: raw_upload.id,
local_sample_id: Some(rand::random()),
local_span_id: None,
..Default::default()
};

<ContextAssoc as Insertable<4>>::insert(&model, &report.conn).unwrap();
Expand Down
54 changes: 27 additions & 27 deletions src/report/sqlite/report.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use std::path::PathBuf;
use std::{fmt, path::PathBuf};

use rusqlite::{Connection, OptionalExtension};

Expand All @@ -13,6 +13,12 @@ pub struct SqliteReport {
pub conn: Connection,
}

impl fmt::Debug for SqliteReport {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("SqliteReport").finish_non_exhaustive()
}
}

impl SqliteReport {
pub fn new(filename: PathBuf) -> Result<SqliteReport> {
let conn = open_database(&filename)?;
Expand Down Expand Up @@ -309,7 +315,7 @@ mod tests {
let line_6 = right_report_builder
.insert_coverage_sample(models::CoverageSample {
raw_upload_id: upload_2.id,
source_file_id: file_2.id,
source_file_id: file_3.id,
line_no: 2,
coverage_type: models::CoverageType::Method,
hits: Some(2),
Expand All @@ -336,38 +342,32 @@ mod tests {
let mut left = left_report_builder.build().unwrap();
let right = right_report_builder.build().unwrap();
left.merge(&right).unwrap();

// NOTE: the assertions here are sensitive to the sort order:
assert_eq!(
left.list_files().unwrap().sort_by_key(|f| f.id),
[&file_1, &file_2, &file_3].sort_by_key(|f| f.id),
);
assert_eq!(
left.list_contexts().unwrap().sort_by_key(|c| c.id),
[&test_case_1, &test_case_2].sort_by_key(|c| c.id),
);
assert_eq!(
left.list_coverage_samples()
.unwrap()
.sort_by_key(|s| s.local_sample_id),
[&line_1, &line_2, &line_3, &line_4, &line_5, &line_6]
.sort_by_key(|s| s.local_sample_id),
left.list_files().unwrap(),
&[file_1.clone(), file_2.clone(), file_3.clone()]
);
assert_eq!(left.list_contexts().unwrap(), &[test_case_2, test_case_1]);
assert_eq!(
left.list_samples_for_file(&file_1)
.unwrap()
.sort_by_key(|s| s.local_sample_id),
[&line_1].sort_by_key(|s| s.local_sample_id),
left.list_coverage_samples().unwrap(),
&[
line_1.clone(),
line_4.clone(),
line_2.clone(),
line_5.clone(),
line_3.clone(),
line_6.clone()
]
);
assert_eq!(left.list_samples_for_file(&file_1).unwrap(), &[line_1]);
assert_eq!(
left.list_samples_for_file(&file_2)
.unwrap()
.sort_by_key(|s| s.local_sample_id),
[&line_2, &line_3, &line_4].sort_by_key(|s| s.local_sample_id),
left.list_samples_for_file(&file_2).unwrap(),
&[line_2, line_3, line_4]
);
assert_eq!(
left.list_samples_for_file(&file_3)
.unwrap()
.sort_by_key(|s| s.local_sample_id),
[&line_5, &line_6].sort_by_key(|s| s.local_sample_id),
left.list_samples_for_file(&file_3).unwrap(),
&[line_5, line_6]
);
}

Expand Down
Loading

0 comments on commit 329b3e0

Please sign in to comment.