Skip to content

Commit

Permalink
Merge pull request #113 from firstbatchxyz/erhant/update-workflows-fo…
Browse files Browse the repository at this point in the history
…r-error

updated workflows with error handling, added new model
  • Loading branch information
erhant authored Sep 2, 2024
2 parents aac2150 + 6238437 commit 7ab68fd
Show file tree
Hide file tree
Showing 6 changed files with 26 additions and 20 deletions.
4 changes: 2 additions & 2 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "dkn-compute"
version = "0.1.6"
version = "0.1.7"
edition = "2021"
license = "Apache-2.0"
readme = "README.md"
Expand Down Expand Up @@ -46,7 +46,7 @@ sha3 = "0.10.8"
fastbloom-rs = "0.5.9"

# workflows
ollama-workflows = { git = "https://github.com/andthattoo/ollama-workflows", rev = "d6b2e1e" }
ollama-workflows = { git = "https://github.com/andthattoo/ollama-workflows", rev = "ba038f7" }

# peer-to-peer
libp2p = { git = "https://github.com/anilaltuner/rust-libp2p.git", rev = "3c55e95", features = [
Expand Down
1 change: 1 addition & 0 deletions docs/NODE_GUIDE.md
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,7 @@ Based on the resources of your machine, you must decide which models that you wi
- `phi3:14b-medium-128k-instruct-q4_1`
- `phi3:3.8b`
- `llama3.1:latest`
- `llama3.1:8b-instruct-q8_0`
- `phi3.5:3.8b`
- `phi3.5:3.8b-mini-instruct-fp16`
Expand Down
15 changes: 9 additions & 6 deletions src/config/ollama.rs
Original file line number Diff line number Diff line change
Expand Up @@ -210,11 +210,14 @@ impl OllamaConfig {
log::warn!("Ignoring model {}: Workflow timed out", model);
},
result = executor.execute(None, workflow, &mut memory) => {
if result.is_empty() {
log::warn!("Ignoring model {}: Workflow returned empty result", model);
} else {
log::info!("Accepting model {}", model);
return true;
match result {
Ok(_) => {
log::info!("Accepting model {}", model);
return true;
}
Err(e) => {
log::warn!("Ignoring model {}: Workflow failed with error {}", model, e);
}
}
}
};
Expand Down Expand Up @@ -292,6 +295,6 @@ mod tests {
let mut memory = ProgramMemory::new();

let result = exe.execute(None, workflow, &mut memory).await;
println!("Result: {}", result);
println!("Result: {}", result.unwrap());
}
}
20 changes: 11 additions & 9 deletions src/handlers/workflow.rs
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ impl ComputeHandler for WorkflowHandler {
.get_any_matching_model(task.input.model)?;
log::info!("Using model {} for task {}", model, task.task_id);

// execute workflow with cancellation
// prepare workflow executor
let executor = if model_provider == ModelProvider::Ollama {
Executor::new_at(
model,
Expand All @@ -91,26 +91,28 @@ impl ComputeHandler for WorkflowHandler {
.input
.prompt
.map(|prompt| Entry::try_value_or_str(&prompt));
let result: Option<String>;

// execute workflow with cancellation
let result: String;
tokio::select! {
_ = node.cancellation.cancelled() => {
log::info!("Received cancellation, quitting all tasks.");
return Ok(MessageAcceptance::Accept)
},
exec_result = executor.execute(entry.as_ref(), task.input.workflow, &mut memory) => {
if exec_result.is_empty() {
return Err(format!("Got empty string result for task {}", task.task_id).into());
} else {
result = Some(exec_result);
match exec_result {
Ok(exec_result) => {
result = exec_result;
}
Err(e) => {
return Err(format!("Workflow failed with error {}", e).into());
}
}
}
}
let result = result.ok_or::<String>(format!("No result for task {}", task.task_id))?;

// publish the result
node.send_result(result_topic, &task.public_key, &task.task_id, result)?;

// accept message, someone else may be included in the filter
Ok(MessageAcceptance::Accept)
}
}
2 changes: 1 addition & 1 deletion start.sh
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ handle_ollama_env() {

# if there is no ollama model given, do not add any ollama compose profile
ollama_needed=false
ollama_models="nous-hermes2theta-llama3-8b phi3:medium phi3:medium-128k phi3:3.8b phi3.5 llama3.1:latest"
ollama_models="nous-hermes2theta-llama3-8b phi3:medium phi3:medium-128k phi3:3.8b phi3.5 llama3.1:latest llama3.1:8b-instruct-q8_0"
for m in $(echo "$DKN_MODELS" | tr ',' ' '); do
case " $ollama_models " in
*" $m "*) ollama_needed=true; break;;
Expand Down

0 comments on commit 7ab68fd

Please sign in to comment.