From 6da25ac2136f327ad93dbee0b7d9a5811d4b8758 Mon Sep 17 00:00:00 2001 From: Brock Wilcox Date: Wed, 25 Dec 2024 15:25:27 -0500 Subject: [PATCH] Add log entry; disable lots of debugging --- README.md | 6 ++++++ src/llm_engine/anthropic.rs | 4 ++-- src/llm_engine/openai.rs | 4 ++-- src/main.rs | 4 ++-- src/segmenter.rs | 20 ++++++++++---------- 5 files changed, 22 insertions(+), 16 deletions(-) diff --git a/README.md b/README.md index bf5507c..6dd6c78 100644 --- a/README.md +++ b/README.md @@ -123,6 +123,12 @@ Draw some stuff on your screen, and then trigger the assistant by *touching/tapp * Oh -- I think it's rather important to run each set a few times assuming there is some temperature involved * To scale this even further we of course would want to bring in a JUDGE-BOT! * Then I could say things like "my new segmentation algorithm improved output quality by 17% per the JUDGE-BOT" etc +* **2024-12-25** -- CLI simplify and expand + * Now you can pass just `-m gpt-4o-mini` and it will guess the engine is `openai` + * You can also pass `--engine-api-key` and `--engine-url-base` + * So now to use [Groq](https://groq.com/): `./ghostwriter -m llama-3.2-90b-vision-preview --engine-api-key $GROQ_API_KEY --engine openai --engine-base-url https://api.groq.com/openai` + * ... but so far Llama 3.2 90b vision is still quite bad with this interface + * I turned off a bunch of debugging. Now I'll need to go back and introduce log-level or something ## Ideas * [DONE] Matt showed me his iOS super calc that just came out, take inspiration from that! diff --git a/src/llm_engine/anthropic.rs b/src/llm_engine/anthropic.rs index 97d460b..043b446 100644 --- a/src/llm_engine/anthropic.rs +++ b/src/llm_engine/anthropic.rs @@ -98,7 +98,7 @@ impl LLMEngine for Anthropic { }); // print body for debugging - println!("Request: {}", body); + // println!("Request: {}", body); let raw_response = ureq::post(&format!("{}/v1/messages", self.base_url)) .set("x-api-key", self.api_key.as_str()) @@ -118,7 +118,7 @@ impl LLMEngine for Anthropic { }; let json: json = response.into_json().unwrap(); - println!("Response: {}", json); + // println!("Response: {}", json); let tool_calls = &json["content"]; if let Some(tool_call) = tool_calls.get(0) { let function_name = tool_call["name"].as_str().unwrap(); diff --git a/src/llm_engine/openai.rs b/src/llm_engine/openai.rs index e3e8585..ade92b2 100644 --- a/src/llm_engine/openai.rs +++ b/src/llm_engine/openai.rs @@ -97,7 +97,7 @@ impl LLMEngine for OpenAI { }); // print body for debugging - println!("Request: {}", body); + // println!("Request: {}", body); let raw_response = ureq::post(format!("{}/v1/chat/completions", self.base_url).as_str()) .set("Authorization", &format!("Bearer {}", self.api_key)) .set("Content-Type", "application/json") @@ -115,7 +115,7 @@ impl LLMEngine for OpenAI { }; let json: json = response.into_json().unwrap(); - println!("Response: {}", json); + // println!("Response: {}", json); let tool_calls = &json["choices"][0]["message"]["tool_calls"]; diff --git a/src/main.rs b/src/main.rs index 257b2d3..0879a33 100644 --- a/src/main.rs +++ b/src/main.rs @@ -134,7 +134,7 @@ fn draw_svg( } fn load_config(filename: &str) -> String { - println!("Loading config from {}", filename); + // println!("Loading config from {}", filename); if std::path::Path::new(filename).exists() { std::fs::read_to_string(filename).unwrap() @@ -271,7 +271,7 @@ fn ghostwriter(args: &Args) -> Result<()> { } else { String::new() }; - println!("Segmentation description: {}", segmentation_description); + // println!("Segmentation description: {}", segmentation_description); let prompt_general_raw = load_config(&args.prompt); let prompt_general_json = diff --git a/src/segmenter.rs b/src/segmenter.rs index ae8fb54..9b3db15 100644 --- a/src/segmenter.rs +++ b/src/segmenter.rs @@ -33,12 +33,12 @@ impl ImageAnalyzer { &self, image_path: &str, ) -> Result> { - println!("Reading image from: {}", image_path); + // println!("Reading image from: {}", image_path); // Read image and convert to grayscale let img = image::open(image_path)?.to_rgb8(); let (width, height) = img.dimensions(); - println!("Image loaded: {}x{}", width, height); + // println!("Image loaded: {}x{}", width, height); // Convert to grayscale let gray: GrayImage = image::imageops::grayscale(&img); @@ -55,7 +55,7 @@ impl ImageAnalyzer { // Find contours let contours = find_contours(&binary); - println!("Found {} contours", contours.len()); + // println!("Found {} contours", contours.len()); // Process regions let mut regions = Vec::new(); @@ -91,7 +91,7 @@ impl ImageAnalyzer { regions.sort_by(|a, b| b.area.partial_cmp(&a.area).unwrap()); regions.truncate(self.max_regions); - println!("Processed {} significant regions", regions.len()); + // println!("Processed {} significant regions", regions.len()); Ok(SegmentationResult { regions, @@ -156,12 +156,12 @@ impl ImageAnalyzer { } pub fn analyze_image(image_path: &str) -> Result> { - println!("Reading image from: {}", image_path); + // println!("Reading image from: {}", image_path); // Read image and convert to grayscale let img = image::open(image_path)?.to_rgb8(); let (width, height) = img.dimensions(); - println!("Image loaded: {}x{}", width, height); + // println!("Image loaded: {}x{}", width, height); // Convert to grayscale let gray: GrayImage = image::imageops::grayscale(&img); @@ -178,16 +178,16 @@ pub fn analyze_image(image_path: &str) -> Result= min_area { let bounds = min_area_rect(&contour.points); @@ -216,7 +216,7 @@ pub fn analyze_image(image_path: &str) -> Result