Skip to content

Commit

Permalink
flag missing chat model as error
Browse files Browse the repository at this point in the history
  • Loading branch information
pelikhan committed Aug 15, 2024
1 parent e760c8a commit 2b01499
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 1 deletion.
1 change: 1 addition & 0 deletions packages/core/src/promptrunner.ts
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,7 @@ export async function runTemplate(
trace.renderErrors()
return <GenerationResult>{
status: "cancelled",
statusText: "LLM generation skipped",
messages,
vars,
text: "",
Expand Down
7 changes: 6 additions & 1 deletion packages/vscode/src/lmaccess.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ import { ChatCompletionMessageParam } from "../../core/src/chattypes"
import { LanguageModelChatRequest } from "../../core/src/server/client"
import { ChatStart } from "../../core/src/server/messages"
import { serializeError } from "../../core/src/error"
import { logVerbose } from "../../core/src/util"

async function generateLanguageModelConfiguration(
state: ExtensionState,
Expand Down Expand Up @@ -205,8 +206,12 @@ export function createChatModelRunner(
const { model, messages, modelOptions } = req
const chatModel = await pickChatModel(state, model)
if (!chatModel) {
logVerbose("no language chat model selected, cancelling")
onChunk({
finishReason: "cancel",
finishReason: "fail",
error: serializeError(
new Error("No language chat model selected")
),
})
return
}
Expand Down

0 comments on commit 2b01499

Please sign in to comment.