Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Replace elastic by flow record #65

Merged
merged 6 commits into from
Nov 1, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1,471 changes: 384 additions & 1,087 deletions Cargo.lock

Large diffs are not rendered by default.

23 changes: 6 additions & 17 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ license = "GPL-3.0"

# this version is required, because earlier versions handle missing `created`
# timestamps as `Uncategorized`, instead of `Unsupported`
rust-version = "1.78"
rust-version = "1.80"

[package.metadata.deb]
maintainer-scripts = "scripts/maintainer"
Expand Down Expand Up @@ -49,11 +49,6 @@ name = "pol_export"
path = "src/bin/pol_export/main.rs"
required-features = ["pol_export"]

[[bin]]
name = "es4forensics"
path = "src/bin/es4forensics/main.rs"
required-features = ["elastic"]

[[bin]]
name = "regdump"
path = "src/bin/regdump/main.rs"
Expand Down Expand Up @@ -98,15 +93,14 @@ required-features = ["zip2bodyfile"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[features]
default = ["pol_export", "mactime2", "evtxtools", "regdump", "hivescan", "cleanhive", "ipgrep", "ts2date", "lnk2bodyfile", "pf2bodyfile", "zip2bodyfile"]
mactime2 = ["gzip", "elastic", "chrono-tz", "thiserror", "bitflags", "encoding_rs_io", "color-print"]
mactime2 = ["gzip", "chrono-tz", "thiserror", "bitflags", "encoding_rs_io", "color-print", "strum", "strum_macros", "sha2"]
gzip = ["flate2"]
elastic = ["elasticsearch", "tokio", "futures", "serde_json", "sha2", "base64", "num-traits", "num-derive", "strum", "strum_macros", "tokio-async-drop"]
evtxtools = ["evtxscan", "evtxcat", "evtxls", "evtxanalyze", "evtx2bodyfile"]
pol_export = []
evtxscan = ["evtx"]
evtxcat = ["evtx", "colored_json", "term-table", "termsize"]
evtxls = ["evtx", "colored", "lazy-regex", "regex", "sigpipe", "dfirtk-eventdata"]
evtxanalyze = ["evtx", "dfirtk-sessionevent-derive", "dfirtk-eventdata", "exitcode", "walkdir"]
evtxanalyze = ["evtx", "dfirtk-sessionevent-derive", "dfirtk-eventdata", "exitcode", "walkdir", "serde_json"]
evtx2bodyfile = ["evtx", "getset", "ouroboros", "indicatif"]
ipgrep = []
ts2date = ["regex"]
Expand All @@ -126,6 +120,8 @@ clap = {version = "4.5", features = ["derive", "wrap_help", "cargo"] }
clap-verbosity-flag = "2.0.0"
csv = "1.2.2"
encoding_rs = "0.8"
flow-record = "0.4.7"
#flow-record = {path="../flow-record"}

## setting release_max_level_info conflicts with evtx
# log = {version = "0.4", features = [ "release_max_level_info" ]}
Expand All @@ -142,7 +138,7 @@ clio = {version="0.3", features=["clap-parse"] }
#clio = {path="../clio", features=["clap-parse"]}

# mactime2
chrono-tz = {version="0.8", optional=true}
chrono-tz = {version="0", optional=true}
serde_json = {version = "1", optional=true}
flate2 = {version="1", optional=true}
thiserror = {version="1", optional=true}
Expand Down Expand Up @@ -172,13 +168,6 @@ ouroboros = {version="0.18", optional=true}
# bodyfile, es4forensics
duplicate = "1"

# es4forensics
# requires libssl-dev
elasticsearch = {version="8.4.0-alpha.1", optional=true}
tokio = { version = "1", features = ["full"], optional=true }
tokio-async-drop = {version="0", optional=true}
futures = {version="0.3", optional=true }

sha2 = {version="0.10", optional=true}
base64 = {version="0.21", optional=true}
num-traits = {version="0.2", optional=true}
Expand Down
86 changes: 0 additions & 86 deletions src/bin/es4forensics/cli.rs

This file was deleted.

108 changes: 0 additions & 108 deletions src/bin/es4forensics/main.rs

This file was deleted.

45 changes: 2 additions & 43 deletions src/bin/evtx2bodyfile/bf_data.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
use std::collections::HashMap;

use anyhow::{anyhow, bail, Result};
use anyhow::Result;
use chrono::{DateTime, Utc};
use dfir_toolkit::common::bodyfile::{Bodyfile3Line, Modified};
use dfir_toolkit::es4forensics::{objects::WindowsEvent, TimelineObject};
use evtx::SerializedEvtxRecord;
use getset::{Getters, Setters};
use serde::Serialize;
Expand All @@ -23,10 +22,6 @@ pub(crate) struct BfData<'a> {
channel_name: &'a Value,
activity_id: Option<&'a Value>,
custom_data: HashMap<&'a String, &'a Value>,

#[serde(skip)]
#[getset(set = "pub (crate)")]
enable_json_output: bool,
}

impl<'a> BfData<'a> {
Expand All @@ -36,26 +31,13 @@ impl<'a> BfData<'a> {
.with_owned_name(json!(self).to_string());
Ok(bf_line.to_string())
}

pub(crate) fn try_into_json(&self) -> Result<String> {
let event: WindowsEvent = self.try_into()?;
let values: Vec<Value> = event.into_values().collect();
if values.len() != 1 {
bail!("this event resolved to an invalid number of JSON documents");
}
serde_json::to_string(&values[0]).map_err(|why| anyhow!(why))
}
}

impl<'a> TryFrom<&BfData<'a>> for String {
type Error = anyhow::Error;

fn try_from(value: &BfData<'a>) -> Result<Self, Self::Error> {
if value.enable_json_output {
value.try_into_json()
} else {
value.try_into_mactime()
}
value.try_into_mactime()
}
}

Expand Down Expand Up @@ -111,29 +93,6 @@ impl<'a> TryFrom<&'a SerializedEvtxRecord<Value>> for BfData<'a> {
channel_name,
activity_id,
custom_data,
enable_json_output: false,
})
}
}

impl<'a> TryFrom<&BfData<'a>> for WindowsEvent<'a> {
type Error = anyhow::Error;

fn try_from(bfdata: &BfData<'a>) -> Result<Self, Self::Error> {
let event_id = match bfdata.event_id.as_u64() {
Some(id) => id,
_ => bail!("invalid event id: {:?}", bfdata.event_id),
};
Ok(WindowsEvent::new(
bfdata.event_record_id,
bfdata.timestamp,
event_id,
bfdata.level.try_into()?,
bfdata.computer,
bfdata.provider_name,
bfdata.channel_name,
bfdata.activity_id,
bfdata.custom_data.clone(),
))
}
}
23 changes: 10 additions & 13 deletions src/bin/evtx2bodyfile/evtx_file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use indicatif::{ProgressBar, ProgressDrawTarget, ProgressStyle};
use ouroboros::self_referencing;
use serde_json::Value;

use crate::output_formatter::OutputFormatter;
use crate::output_writer::OutputWriter;

pub(crate) struct EvtxFile(Input);

Expand Down Expand Up @@ -58,24 +58,21 @@ impl From<&Input> for EvtxFile {
}

impl EvtxFile {
pub(crate) fn print_records<F>(self, formatter: F, treat_errors_as_warnings: bool) -> Result<()>
pub(crate) fn print_records<F>(self, treat_errors_as_warnings: bool) -> Result<()>
where
F: OutputFormatter,
F: OutputWriter<std::io::Stdout>,
{
let mut formatter = F::from(std::io::stdout());
let bar = self.create_progress_bar().unwrap();
for value in self.into_iter() {
match formatter.record_to_string(&value) {
Ok(s) => println!("{s}"),
Err(why) => {
if treat_errors_as_warnings {
log::warn!("Error while reading record: {why}");
} else {
bar.finish_and_clear();
return Err(why);
}
if let Err(why) = formatter.output(&value) {
if treat_errors_as_warnings {
log::warn!("Error while reading record: {why}");
} else {
bar.finish_and_clear();
return Err(why);
}
}

bar.inc(1);
}
bar.finish_and_clear();
Expand Down
Loading
Loading