Skip to content

Commit

Permalink
stop taking an RNG as an argument for parse_pyreport and SqliteReport…
Browse files Browse the repository at this point in the history
…Builder
  • Loading branch information
matt-codecov committed Sep 9, 2024
1 parent 5fefe51 commit 8771eda
Show file tree
Hide file tree
Showing 9 changed files with 215 additions and 182 deletions.
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ testing = []
include_dir = "0.7.3"
memmap2 = "0.9.4"
rand = "0.8.5"
rusqlite = { version = "0.31.0", features = ["bundled", "limits"] }
rusqlite = { version = "0.31.0", features = ["bundled", "limits", "serde_json"] }
rusqlite_migration = { version = "1.2.0", features = ["from-directory"] }
seahash = "4.1.0"
serde = { version = "1.0.204", features = ["derive"] }
Expand Down
27 changes: 1 addition & 26 deletions core/src/parsers/pyreport/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,32 +40,7 @@ pub fn parse_pyreport(
chunks_file: &File,
out_path: PathBuf,
) -> Result<SqliteReport> {
parse_pyreport_with_builder(
report_json_file,
chunks_file,
SqliteReportBuilder::new(out_path)?,
)
}

/// See [`parse_pyreport`]
pub fn parse_pyreport_with_seed(
report_json_file: &File,
chunks_file: &File,
out_path: PathBuf,
seed: u64,
) -> Result<SqliteReport> {
parse_pyreport_with_builder(
report_json_file,
chunks_file,
SqliteReportBuilder::new_with_seed(out_path, seed)?,
)
}

fn parse_pyreport_with_builder(
report_json_file: &File,
chunks_file: &File,
mut report_builder: SqliteReportBuilder,
) -> Result<SqliteReport> {
let mut report_builder = SqliteReportBuilder::new(out_path)?;
// Encapsulate all of this in a block so that `report_builder_tx` gets torn down
// at the end. Otherwise, it'll hold onto a reference to `report_builder`
// and prevent us from consuming `report_builder` to actually build a
Expand Down
50 changes: 31 additions & 19 deletions core/src/report/pyreport/chunks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -319,7 +319,10 @@ mod tests {
use tempfile::TempDir;

use super::*;
use crate::report::{sqlite::SqliteReportBuilder, ReportBuilder};
use crate::report::{
sqlite::{Insertable, SqliteReportBuilder},
ReportBuilder,
};

struct Ctx {
temp_dir: TempDir,
Expand All @@ -332,11 +335,12 @@ mod tests {
}

fn build_sample_report(path: PathBuf) -> Result<SqliteReport> {
let mut builder = SqliteReportBuilder::new_with_seed(path, 5)?;
let mut builder = SqliteReportBuilder::new(path)?;
let file_1 = builder.insert_file("src/report/report.rs")?;
let file_2 = builder.insert_file("src/report/models.rs")?;

let upload_1 = builder.insert_raw_upload(models::RawUpload {
let upload_1 = models::RawUpload {
id: 5,
timestamp: Some(123),
raw_upload_url: Some("upload 1 url".to_string()),
flags: Some(json!(["flag on upload 1"])),
Expand All @@ -350,8 +354,13 @@ mod tests {
session_type: Some("type upload 1".to_string()),
session_extras: Some(json!({"k1": "v1"})),
..Default::default()
})?;
let upload_2 = builder.insert_raw_upload(models::RawUpload {
};
// Insert directly, not through report builder, because we don't want a random
// ID
upload_1.insert(&builder.conn)?;

let upload_2 = models::RawUpload {
id: 10,
timestamp: Some(456),
raw_upload_url: Some("upload 2 url".to_string()),
flags: Some(json!(["flag on upload 2"])),
Expand All @@ -365,7 +374,10 @@ mod tests {
session_type: Some("type upload 2".to_string()),
session_extras: Some(json!({"k2": "v2"})),
..Default::default()
})?;
};
// Insert directly, not through report builder, because we don't want a random
// ID
upload_2.insert(&builder.conn)?;

let line_1 = builder.insert_coverage_sample(models::CoverageSample {
raw_upload_id: upload_1.id,
Expand Down Expand Up @@ -911,47 +923,47 @@ mod tests {

let chunks_header = json!({"labels_index": {"1": "test-case", "2": "test-case 2"}});
// line_1 variable in build_sample_report()
let file_1_header = json!({"present_sessions": [1]});
let file_1_header = json!({"present_sessions": [0]});
let file_1_line_1 = json!([
3,
null,
[[1, 3]],
[[0, 3]],
null,
null,
[[1, 3, null, ["test-case", "test-case 2"]]]
[[0, 3, null, ["test-case", "test-case 2"]]]
]);
// method_sample_1 variable in build_sample_report()
let file_1_line_2 = json!([
2,
"m",
[[1, 2, null, null, [2, 4]]],
[[0, 2, null, null, [2, 4]]],
null,
[2, 4],
[[1, 2, "m", ["test-case 2"]]]
[[0, 2, "m", ["test-case 2"]]]
]);
// branch_sample_1 variable in build_sample_report()
let file_1_line_3 = json!(["2/2", "b", [[1, "2/2"]]]);
let file_1_line_3 = json!(["2/2", "b", [[0, "2/2"]]]);
// line_with_partial_1 variable in build_sample_report()
let file_1_line_8 = json!([3, null, [[1, 3, null, [[3, null, 3]]]]]);
let file_1_line_8 = json!([3, null, [[0, 3, null, [[3, null, 3]]]]]);

let file_2_header = json!({"present_sessions": [0, 1]});
// line_2 variable in build_sample_report()
let file_2_line_1 = json!([
4,
null,
[[1, 4]],
[[0, 4]],
null,
null,
[[1, 4, null, ["test-case", "test-case 2"]]]
[[0, 4, null, ["test-case", "test-case 2"]]]
]);
// method_sample_2 variable in build_sample_report()
let file_2_line_2 = json!([5, "m", [[1, 5]]]);
let file_2_line_2 = json!([5, "m", [[0, 5]]]);
// line_3 variable in build_sample_report()
let file_2_line_3 = json!([0, null, [[0, 0]],]);
let file_2_line_3 = json!([0, null, [[1, 0]],]);
// method_sample_3 variable in build_sample_report()
let file_2_line_5 = json!([0, "m", [[0, 0, null, null, [2, 4]]], null, [2, 4]]);
let file_2_line_5 = json!([0, "m", [[1, 0, null, null, [2, 4]]], null, [2, 4]]);
// branch_sample_2 variable in build_sample_report()
let file_2_line_6 = json!(["2/4", "b", [[1, "2/4", ["2", "3"]]],]);
let file_2_line_6 = json!(["2/4", "b", [[0, "2/4", ["2", "3"]]],]);

let expected = format!(
"{chunks_header}
Expand Down
112 changes: 62 additions & 50 deletions core/src/report/pyreport/report_json.rs
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,10 @@ mod tests {
use tempfile::TempDir;

use super::*;
use crate::report::{sqlite::SqliteReportBuilder, ReportBuilder};
use crate::report::{
sqlite::{Insertable, SqliteReportBuilder},
ReportBuilder,
};

struct Ctx {
temp_dir: TempDir,
Expand All @@ -256,11 +259,12 @@ mod tests {
}

fn build_sample_report(path: PathBuf) -> Result<SqliteReport> {
let mut builder = SqliteReportBuilder::new_with_seed(path, 5)?;
let mut builder = SqliteReportBuilder::new(path)?;
let file_1 = builder.insert_file("src/report/report.rs")?;
let file_2 = builder.insert_file("src/report/models.rs")?;

let upload_1 = builder.insert_raw_upload(models::RawUpload {
let upload_1 = models::RawUpload {
id: 5,
timestamp: Some(123),
raw_upload_url: Some("upload 1 url".to_string()),
flags: Some(json!(["flag on upload 1"])),
Expand All @@ -274,8 +278,13 @@ mod tests {
session_type: Some("type upload 1".to_string()),
session_extras: Some(json!({"k1": "v1"})),
..Default::default()
})?;
let upload_2 = builder.insert_raw_upload(models::RawUpload {
};
// Insert directly, not through report builder, because we don't want a random
// ID
upload_1.insert(&builder.conn)?;

let upload_2 = models::RawUpload {
id: 10,
timestamp: Some(456),
raw_upload_url: Some("upload 2 url".to_string()),
flags: Some(json!(["flag on upload 2"])),
Expand All @@ -289,7 +298,10 @@ mod tests {
session_type: Some("type upload 2".to_string()),
session_extras: Some(json!({"k2": "v2"})),
..Default::default()
})?;
};
// Insert directly, not through report builder, because we don't want a random
// ID
upload_2.insert(&builder.conn)?;

let line_1 = builder.insert_coverage_sample(models::CoverageSample {
raw_upload_id: upload_1.id,
Expand Down Expand Up @@ -572,35 +584,6 @@ mod tests {
let expected = json!({
"sessions": {
"0": {
"t": [
1, // file count
2, // line count
0, // hits
2, // misses
0, // partials
"0", // coverage %
0, // branch count
1, // method count
0, // messages
0, // sessions
2, // hit_complexity_paths
4, // total_complexity
0 // diff
],
"d": 456,
"a": "upload 2 url",
"f": ["flag on upload 2"],
"c": "provider upload 2",
"n": "build upload 2",
"N": "name upload 2",
"j": "job name upload 2",
"u": "ci run url upload 2",
"p": "state upload 2",
"e": "env upload 2",
"st": "type upload 2",
"se": {"k2": "v2"},
},
"1": {
"t": [
2, // file count
7, // line count
Expand Down Expand Up @@ -628,6 +611,35 @@ mod tests {
"e": "env upload 1",
"st": "type upload 1",
"se": {"k1": "v1"},
},
"1": {
"t": [
1, // file count
2, // line count
0, // hits
2, // misses
0, // partials
"0", // coverage %
0, // branch count
1, // method count
0, // messages
0, // sessions
2, // hit_complexity_paths
4, // total_complexity
0 // diff
],
"d": 456,
"a": "upload 2 url",
"f": ["flag on upload 2"],
"c": "provider upload 2",
"n": "build upload 2",
"N": "name upload 2",
"j": "job name upload 2",
"u": "ci run url upload 2",
"p": "state upload 2",
"e": "env upload 2",
"st": "type upload 2",
"se": {"k2": "v2"},
}
}
});
Expand Down Expand Up @@ -663,21 +675,6 @@ mod tests {
},
"sessions": {
"0": {
"t": [1, 2, 0, 2, 0, "0", 0, 1, 0, 0, 2, 4, 0],
"d": 456,
"a": "upload 2 url",
"f": ["flag on upload 2"],
"c": "provider upload 2",
"n": "build upload 2",
"N": "name upload 2",
"j": "job name upload 2",
"u": "ci run url upload 2",
"p": "state upload 2",
"e": "env upload 2",
"st": "type upload 2",
"se": {"k2": "v2"},
},
"1": {
"t": [2, 7, 6, 0, 1, "85.71429", 2, 2, 0, 0, 2, 4, 0],
"d": 123,
"a": "upload 1 url",
Expand All @@ -691,6 +688,21 @@ mod tests {
"e": "env upload 1",
"st": "type upload 1",
"se": {"k1": "v1"},
},
"1": {
"t": [1, 2, 0, 2, 0, "0", 0, 1, 0, 0, 2, 4, 0],
"d": 456,
"a": "upload 2 url",
"f": ["flag on upload 2"],
"c": "provider upload 2",
"n": "build upload 2",
"N": "name upload 2",
"j": "job name upload 2",
"u": "ci run url upload 2",
"p": "state upload 2",
"e": "env upload 2",
"st": "type upload 2",
"se": {"k2": "v2"},
}
}
});
Expand Down
Loading

0 comments on commit 8771eda

Please sign in to comment.