From caf8e5f1c8c11a1bdbb5ed0a7f16fddbe172aea0 Mon Sep 17 00:00:00 2001 From: luna Date: Sat, 11 Nov 2023 20:10:32 -0800 Subject: [PATCH] Timings fixes, Logging Level Fix, All init params optional --- Cargo.toml | 2 +- src/cli/mod.rs | 30 +++++++++----------- src/farmer/config.rs | 17 +++++++---- src/farmer/mod.rs | 12 ++++---- src/gui/mod.rs | 50 +++++++++++++-------------------- src/harvesters/druid_garden.rs | 4 +-- src/main.rs | 24 +++++++++++----- src/tasks/pool_state_updater.rs | 2 +- 8 files changed, 73 insertions(+), 68 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 424054c..dfbfa6d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -39,5 +39,5 @@ simple_logger = "4.2.0" sysinfo = "0.29.2" tokio = {version = "1.34.0", features=["rt-multi-thread", "sync", "signal", "macros", "process", "time", "fs", "net"]} tokio-tungstenite = {version = "0.20.1", features = ["rustls-tls-webpki-roots", "rustls"] } -tui-logger = {version = "0.10.0"} +tui-logger = {version = "0.10.0", default-features = false, features=["examples-ratatui-crossterm"]} uuid = {version="1.5.0", features=["v4"]} diff --git a/src/cli/mod.rs b/src/cli/mod.rs index 778dfdd..1192769 100644 --- a/src/cli/mod.rs +++ b/src/cli/mod.rs @@ -33,9 +33,9 @@ pub enum Action { #[arg(short, long)] mnemonic: String, #[arg(short = 'f', long)] - fullnode_host: String, + fullnode_ws_host: Option, #[arg(short = 'p', long)] - fullnode_port: u16, + fullnode_ws_port: Option, #[arg(short = 'r', long)] fullnode_rpc_host: Option, #[arg(short = 'o', long)] @@ -55,8 +55,8 @@ impl Default for Action { pub struct GenerateConfig<'a> { pub output_path: Option, pub mnemonic: &'a str, - pub fullnode_host: &'a str, - pub fullnode_port: u16, + pub fullnode_ws_host: Option, + pub fullnode_ws_port: Option, pub fullnode_rpc_host: Option, pub fullnode_rpc_port: Option, pub fullnode_ssl: Option, @@ -98,20 +98,18 @@ pub async fn generate_config_from_mnemonic( .unwrap_or("mainnet".to_string()); config.selected_network = network; let master_key = key_from_mnemonic(gen_settings.mnemonic)?; - config.fullnode_host = gen_settings.fullnode_host.to_string(); - config.fullnode_port = if gen_settings.fullnode_port == 8555 { - 8444 - } else { - gen_settings.fullnode_port - }; + config.fullnode_ws_host = gen_settings + .fullnode_ws_host + .unwrap_or(String::from("localhost")); + config.fullnode_rpc_host = gen_settings + .fullnode_rpc_host + .unwrap_or(String::from("localhost")); + config.fullnode_ws_port = gen_settings.fullnode_ws_port.unwrap_or(8444); + config.fullnode_rpc_port = gen_settings.fullnode_rpc_port.unwrap_or(8555); config.ssl_root_path = gen_settings.fullnode_ssl.clone(); let client = FullnodeClient::new( - &gen_settings - .fullnode_rpc_host - .unwrap_or(gen_settings.fullnode_host.to_string()), - gen_settings - .fullnode_rpc_port - .unwrap_or(gen_settings.fullnode_port), + &config.fullnode_rpc_host, + config.fullnode_rpc_port, gen_settings.fullnode_ssl, &gen_settings.additional_headers, ); diff --git a/src/farmer/config.rs b/src/farmer/config.rs index f7fd374..eab46a2 100644 --- a/src/farmer/config.rs +++ b/src/farmer/config.rs @@ -41,8 +41,10 @@ pub struct HarvesterConfig { pub struct Config { pub selected_network: String, pub ssl_root_path: Option, - pub fullnode_host: String, - pub fullnode_port: u16, + pub fullnode_ws_host: String, + pub fullnode_ws_port: u16, + pub fullnode_rpc_host: String, + pub fullnode_rpc_port: u16, pub farmer_info: Vec, pub pool_info: Vec, pub payout_address: String, @@ -60,7 +62,10 @@ impl Config { CONSENSUS_CONSTANTS_MAP .get(&self.selected_network) .is_some() - && !self.fullnode_host.is_empty() + && !self.fullnode_ws_host.is_empty() + && !self.fullnode_rpc_host.is_empty() + && self.fullnode_ws_port != 0 + && self.fullnode_rpc_port != 0 && !self.farmer_info.is_empty() && decode_puzzle_hash(&self.payout_address).is_ok() && self.pool_info.iter().all(|c| { @@ -76,8 +81,10 @@ impl Default for Config { Config { selected_network: "mainnet".to_string(), ssl_root_path: None, - fullnode_host: "localhost".to_string(), - fullnode_port: 8555, + fullnode_rpc_host: "localhost".to_string(), + fullnode_rpc_port: 8555, + fullnode_ws_host: "localhost".to_string(), + fullnode_ws_port: 8444, farmer_info: vec![], pool_info: vec![], payout_address: "".to_string(), diff --git a/src/farmer/mod.rs b/src/farmer/mod.rs index 338ed23..5ae6b87 100644 --- a/src/farmer/mod.rs +++ b/src/farmer/mod.rs @@ -164,7 +164,7 @@ impl Farmer { } info!( "Starting Farmer FullNode Connection to: {}:{}", - &s.shared_state.config.fullnode_host, s.shared_state.config.fullnode_port + &s.shared_state.config.fullnode_ws_host, s.shared_state.config.fullnode_ws_port ); loop { if !s.shared_state.run.load(Ordering::Relaxed) { @@ -229,8 +229,8 @@ impl Farmer { if let Some(c) = &*s.shared_state.full_node_client.lock().await { info!( "Shutting Down old Farmer Client: {}:{}", - s.shared_state.config.fullnode_host, - s.shared_state.config.fullnode_host + s.shared_state.config.fullnode_ws_host, + s.shared_state.config.fullnode_ws_host ); client_run.store(false, Ordering::Relaxed); c.client.lock().await.shutdown().await.unwrap_or_default(); @@ -278,7 +278,7 @@ impl Farmer { info!("Farmer Stopping"); break 'retry; } - tokio::time::sleep(Duration::from_millis(250)).await; + tokio::time::sleep(Duration::from_millis(25)).await; } } } @@ -292,8 +292,8 @@ impl Farmer { let ssl_path = get_ssl_root_path(shared_state); create_all_ssl(&ssl_path, false)?; FarmerClient::new_ssl( - &shared_state.config.fullnode_host, - shared_state.config.fullnode_port, + &shared_state.config.fullnode_ws_host, + shared_state.config.fullnode_ws_port, ClientSSLConfig { ssl_crt_path: &ssl_path.join(PUBLIC_CRT).to_string_lossy(), ssl_key_path: &ssl_path.join(PUBLIC_KEY).to_string_lossy(), diff --git a/src/gui/mod.rs b/src/gui/mod.rs index bf3fa94..7ab8543 100644 --- a/src/gui/mod.rs +++ b/src/gui/mod.rs @@ -109,8 +109,8 @@ pub async fn bootstrap(config: Arc) -> Result<(), Error> { let fullnode_state = gui_state.clone(); let fullnode_thread = tokio::spawn(async move { let full_node_rpc = FullnodeClient::new( - &config.fullnode_host, - config.fullnode_port, + &config.fullnode_rpc_host, + config.fullnode_rpc_port, config.ssl_root_path.clone(), &None, ); @@ -133,7 +133,7 @@ pub async fn bootstrap(config: Arc) -> Result<(), Error> { if !fullnode_state.farmer_state.run.load(Ordering::Relaxed) { break; } - tokio::time::sleep(Duration::from_millis(250)).await; + tokio::time::sleep(Duration::from_millis(25)).await; } }); let sys_info_gui_state = gui_state.clone(); @@ -144,7 +144,8 @@ pub async fn bootstrap(config: Arc) -> Result<(), Error> { system.refresh_system(); loop { let (sys, sys_info) = match spawn_blocking(move || { - system.refresh_system(); + system.refresh_cpu(); + system.refresh_memory(); let si = SysInfo { cpu_usage: system.global_cpu_info().cpu_usage() as u16, ram_usage: ((system.used_memory() as f32 / system.total_memory() as f32) @@ -167,7 +168,7 @@ pub async fn bootstrap(config: Arc) -> Result<(), Error> { if !sys_info_gui_state.farmer_state.run.load(Ordering::Relaxed) { break; } - tokio::time::sleep(Duration::from_millis(250)).await; + tokio::time::sleep(Duration::from_millis(25)).await; } }); let (fn_res, sys_res, gui_res, farmer_res) = join!( @@ -209,7 +210,7 @@ async fn run_gui( let fullnode_state = gui_state.fullnode_state.lock().await.clone(); terminal.draw(|f| ui(f, farmer_state, fullnode_state, sys_info))?; } - if event::poll(Duration::from_millis(50))? { + if event::poll(Duration::from_millis(25))? { if let Event::Key(event) = event::read()? { match event.code { KeyCode::Esc => { @@ -341,11 +342,11 @@ fn ui( .borders(Borders::ALL), ); f.render_widget(fullnode_content, overview_chunks[2]); - let cpu_usage_widget = draw_cpu_usage(sys_info.cpu_usage); + let cpu_usage_widget = draw_gauge("CPU Usage", sys_info.cpu_usage); f.render_widget(cpu_usage_widget, overview_chunks[3]); - let ram_usage_widget = draw_ram_usage(sys_info.ram_usage); + let ram_usage_widget = draw_gauge("RAM Usage", sys_info.ram_usage); f.render_widget(ram_usage_widget, overview_chunks[4]); - let swap_usage_widget = draw_swap_usage(sys_info.swap_usage); + let swap_usage_widget = draw_gauge("Swap Usage", sys_info.swap_usage); f.render_widget(swap_usage_widget, overview_chunks[5]); let logs_widget = draw_logs(); @@ -368,26 +369,15 @@ fn draw_logs<'a>() -> TuiLoggerWidget<'a> { .style(Style::default().fg(Color::White).bg(Color::Black)) } -fn draw_cpu_usage<'a>(total_cpu_usage: u16) -> Gauge<'a> { +fn draw_gauge(title: &str, value: u16) -> Gauge { let gauge = Gauge::default() - .block(Block::default().title("CPU Usage").borders(Borders::ALL)) - .gauge_style(Style::default().fg(Color::LightGreen)) - .percent(total_cpu_usage); - gauge -} - -fn draw_ram_usage<'a>(total_ram_usage: u16) -> Gauge<'a> { - let gauge = Gauge::default() - .block(Block::default().title("RAM Usage").borders(Borders::ALL)) - .gauge_style(Style::default().fg(Color::LightBlue)) - .percent(total_ram_usage); - gauge -} - -fn draw_swap_usage<'a>(total_swap_usage: u16) -> Gauge<'a> { - let gauge = Gauge::default() - .block(Block::default().title("Swap Usage").borders(Borders::ALL)) - .gauge_style(Style::default().fg(Color::LightYellow)) - .percent(total_swap_usage); - gauge + .block(Block::default().title(title).borders(Borders::ALL)) + .percent(value); + if value > 50 { + gauge.gauge_style(Style::default().fg(Color::LightRed)) + } else if value > 80 { + gauge.gauge_style(Style::default().fg(Color::LightYellow)) + } else { + gauge.gauge_style(Style::default().fg(Color::LightGreen)) + } } diff --git a/src/harvesters/druid_garden.rs b/src/harvesters/druid_garden.rs index d16b37f..5f2bc88 100644 --- a/src/harvesters/druid_garden.rs +++ b/src/harvesters/druid_garden.rs @@ -82,7 +82,7 @@ impl Harvester for DruidGardenHarvester { let constants_arc = constants.clone(); let plot_counts = plot_counts.clone(); let mut responses = vec![]; - let plot_handle = timeout(Duration::from_secs(15), tokio::spawn(async move { + let plot_handle = timeout(Duration::from_secs(20), tokio::spawn(async move { let (plot_id, k, memo, c_level) = match plot_info.reader.header() { PlotHeader::V1(h) => (h.id, h.k, h.memo, 0), PlotHeader::V2(h) => (h.id, h.k, h.memo, h.compression_level), @@ -385,7 +385,7 @@ impl DruidGardenHarvester { } } } - tokio::time::sleep(Duration::from_secs(1)).await; + tokio::time::sleep(Duration::from_millis(25)).await; } }); Ok(Self { diff --git a/src/main.rs b/src/main.rs index 34fb9a7..148367a 100644 --- a/src/main.rs +++ b/src/main.rs @@ -8,7 +8,7 @@ use dg_xch_core::consensus::constants::{CONSENSUS_CONSTANTS_MAP, MAINNET}; use dg_xch_keys::decode_puzzle_hash; use hex::encode; use home::home_dir; -use log::info; +use log::{info, LevelFilter}; use once_cell::sync::Lazy; use reqwest::header::USER_AGENT; use simple_logger::SimpleLogger; @@ -109,7 +109,12 @@ async fn main() -> Result<(), Error> { ); return Ok(()); } - SimpleLogger::new().env().init().unwrap_or_default(); + SimpleLogger::new() + .with_colors(true) + .with_level(LevelFilter::Info) + .env() + .init() + .unwrap_or_default(); let config = Config::try_from(&config_path).unwrap_or_default(); let config_arc = Arc::new(config); let constants = CONSENSUS_CONSTANTS_MAP @@ -156,19 +161,24 @@ async fn main() -> Result<(), Error> { } Action::Init { mnemonic, - fullnode_host, - fullnode_port, + fullnode_ws_host, + fullnode_ws_port, fullnode_rpc_host, fullnode_rpc_port, fullnode_ssl, network, } => { - SimpleLogger::new().env().init().unwrap_or_default(); + SimpleLogger::new() + .with_colors(true) + .with_level(LevelFilter::Info) + .env() + .init() + .unwrap_or_default(); generate_config_from_mnemonic(GenerateConfig { output_path: Some(config_path), mnemonic: &mnemonic, - fullnode_host: &fullnode_host, - fullnode_port, + fullnode_ws_host, + fullnode_ws_port, fullnode_rpc_host, fullnode_rpc_port, fullnode_ssl, diff --git a/src/tasks/pool_state_updater.rs b/src/tasks/pool_state_updater.rs index a18982a..254a253 100644 --- a/src/tasks/pool_state_updater.rs +++ b/src/tasks/pool_state_updater.rs @@ -76,7 +76,7 @@ pub async fn pool_updater(shared_state: Arc) { .force_pool_update .store(false, Ordering::Relaxed); } - tokio::time::sleep(Duration::from_millis(250)).await; + tokio::time::sleep(Duration::from_millis(25)).await; } info!("Pool Handle Stopped"); }