Skip to content

Commit

Permalink
move runPrompt to global scope only
Browse files Browse the repository at this point in the history
  • Loading branch information
pelikhan committed Jun 10, 2024
1 parent bad8be7 commit a1d100c
Show file tree
Hide file tree
Showing 16 changed files with 159 additions and 160 deletions.
8 changes: 4 additions & 4 deletions docs/genaisrc/genaiscript.d.ts

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 4 additions & 4 deletions genaisrc/genaiscript.d.ts

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 4 additions & 4 deletions packages/core/src/genaisrc/genaiscript.d.ts

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

108 changes: 102 additions & 6 deletions packages/core/src/promptcontext.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,10 @@
import { ChatCompletionsOptions, LanguageModel } from "./chat"
import {
ChatCompletionMessageParam,
ChatCompletionsOptions,
executeChatSession,
LanguageModel,
mergeGenerationOptions,
} from "./chat"
import { HTMLEscape, arrayify, logVerbose } from "./util"
import { host } from "./host"
import { MarkdownTrace } from "./trace"
Expand All @@ -9,25 +15,27 @@ import { readText } from "./fs"
import {
PromptNode,
appendChild,
createChatParticipant,
createFileMergeNode,
createImageNode,
createOutputProcessor,
createTextNode,
renderPromptNode,
} from "./promptdom"
import { bingSearch } from "./websearch"
import { CancellationToken } from "./cancellation"
import { CancellationToken, checkCancelled } from "./cancellation"
import {
RunPromptContextNode,
createChatGenerationContext,
} from "./runpromptcontext"
import { CSVParse, CSVToMarkdown } from "./csv"
import { INIParse, INIStringify } from "./ini"
import { CancelError } from "./error"
import { CancelError, isCancelError, serializeError } from "./error"
import { createFetch } from "./fetch"
import { resolveFileDataUri } from "./file"
import { XMLParse } from "./xml"
import { GenerationStats } from "./expander"
import { fuzzSearch } from "./fuzzsearch"
import { parseModelIdentifier, resolveModelConnectionInfo } from "./models"
import { renderAICI } from "./aici"
import { MODEL_PROVIDER_AICI } from "./constants"

function stringLikeToFileName(f: string | WorkspaceFile) {
return typeof f === "string" ? f : f?.filename
Expand All @@ -39,6 +47,7 @@ export function createPromptContext(
options: GenerationOptions,
model: string
) {
const { cancellationToken, infoCb } = options || {}
const env = new Proxy(vars, {
get: (target: any, prop, recv) => {
const v = target[prop]
Expand Down Expand Up @@ -211,6 +220,93 @@ export function createPromptContext(
cancel: (reason?: string) => {
throw new CancelError(reason || "user cancelled")
},
runPrompt: async (generator, runOptions): Promise<RunPromptResult> => {
try {
const { label } = runOptions || {}
trace.startDetails(`🎁 run prompt ${label || ""}`)
infoCb?.({ text: `run prompt ${label || ""}` })

const genOptions = mergeGenerationOptions(options, runOptions)
const ctx = createChatGenerationContext(genOptions, vars, trace)
if (typeof generator === "string")
ctx.node.children.push(createTextNode(generator))
else await generator(ctx)
const node = ctx.node

checkCancelled(cancellationToken)

let messages: ChatCompletionMessageParam[] = []
let functions: ChatFunctionCallback[] = undefined
let schemas: Record<string, JSONSchema> = undefined
let chatParticipants: ChatParticipant[] = undefined
// expand template
const { provider } = parseModelIdentifier(genOptions.model)
if (provider === MODEL_PROVIDER_AICI) {
const { aici } = await renderAICI("prompt", node)
// todo: output processor?
messages.push(aici)
} else {
const {
errors,
schemas: scs,
functions: fns,
messages: msgs,
chatParticipants: cps,
} = await renderPromptNode(genOptions.model, node, {
trace,
})

schemas = scs
functions = fns
chatParticipants = cps
messages.push(...msgs)

if (errors?.length)
throw new Error("errors while running prompt")
}

const connection = await resolveModelConnectionInfo(
genOptions,
{ trace, token: true }
)
if (!connection.configuration)
throw new Error("model connection error " + connection.info)
const { completer } = await host.resolveLanguageModel(
genOptions,
connection.configuration
)
if (!completer)
throw new Error(
"model driver not found for " + connection.info
)
const resp = await executeChatSession(
connection.configuration,
cancellationToken,
messages,
vars,
functions,
schemas,
completer,
chatParticipants,
genOptions
)
const { json, text } = resp
if (resp.json)
trace.detailsFenced("📩 json (parsed)", json, "json")
else if (text)
trace.detailsFenced(`🔠 output`, text, `markdown`)
return resp
} catch (e) {
trace.error(e)
return {
text: undefined,
finishReason: isCancelError(e) ? "cancel" : "fail",
error: serializeError(e),
}
} finally {
trace.endDetails()
}
},
fetchText: async (urlOrFile, fetchOptions) => {
if (typeof urlOrFile === "string") {
urlOrFile = {
Expand Down
99 changes: 1 addition & 98 deletions packages/core/src/runpromptcontext.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,24 +11,13 @@ import {
createSchemaNode,
createStringTemplateNode,
createTextNode,
renderPromptNode,
} from "./promptdom"
import { MarkdownTrace } from "./trace"
import {
ChatCompletionMessageParam,
executeChatSession,
mergeGenerationOptions,
} from "./chat"
import { GenerationOptions } from "./promptcontext"
import { parseModelIdentifier, resolveModelConnectionInfo } from "./models"
import { renderAICI } from "./aici"
import { CancelError, isCancelError, serializeError } from "./error"
import { checkCancelled } from "./cancellation"
import { MODEL_PROVIDER_AICI } from "./constants"
import { CancelError } from "./error"
import { promptParametersSchemaToJSONSchema } from "./parameters"
import { isJSONSchema } from "./schema"
import { consoleLogFormat } from "./logging"
import { host } from "./host"
import { resolveFileDataUri } from "./file"

export function createChatTurnGenerationContext(
Expand Down Expand Up @@ -114,92 +103,6 @@ export function createChatTurnGenerationContext(
ctx.def("", body, options)
return undefined
},
runPrompt: async (generator, runOptions) => {
try {
const { label } = runOptions || {}
trace.startDetails(`🎁 run prompt ${label || ""}`)
infoCb?.({ text: `run prompt ${label || ""}` })

const genOptions = mergeGenerationOptions(options, runOptions)
const ctx = createChatGenerationContext(genOptions, vars, trace)
if (typeof generator === "string")
ctx.node.children.push(createTextNode(generator))
else await generator(ctx)
const node = ctx.node

checkCancelled(cancellationToken)

let messages: ChatCompletionMessageParam[] = []
let functions: ChatFunctionCallback[] = undefined
let schemas: Record<string, JSONSchema> = undefined
let chatParticipants: ChatParticipant[] = undefined
// expand template
const { provider } = parseModelIdentifier(genOptions.model)
if (provider === MODEL_PROVIDER_AICI) {
const { aici } = await renderAICI("prompt", node)
// todo: output processor?
messages.push(aici)
} else {
const {
errors,
schemas: scs,
functions: fns,
messages: msgs,
chatParticipants: cps,
} = await renderPromptNode(genOptions.model, node, {
trace,
})

schemas = scs
functions = fns
chatParticipants = cps
messages.push(...msgs)

if (errors?.length)
throw new Error("errors while running prompt")
}

const connection = await resolveModelConnectionInfo(
genOptions,
{ trace, token: true }
)
if (!connection.configuration)
throw new Error("model connection error " + connection.info)
const { completer } = await host.resolveLanguageModel(
genOptions,
connection.configuration
)
if (!completer)
throw new Error(
"model driver not found for " + connection.info
)
const resp = await executeChatSession(
connection.configuration,
cancellationToken,
messages,
vars,
functions,
schemas,
completer,
chatParticipants,
genOptions
)
const { json, text } = resp
if (resp.json)
trace.detailsFenced("📩 json (parsed)", json, "json")
else if (text)
trace.detailsFenced(`🔠 output`, text, `markdown`)
return resp
} catch (e) {
trace.error(e)
return {
finishReason: isCancelError(e) ? "cancel" : "fail",
error: serializeError(e),
}
} finally {
trace.endDetails()
}
},
console,
}

Expand Down
8 changes: 4 additions & 4 deletions packages/core/src/types/prompt_template.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1233,10 +1233,6 @@ interface ChatTurnGenerationContext {
data: object[] | object,
options?: DefDataOptions
): string
runPrompt(
generator: string | PromptGenerator,
options?: PromptGeneratorOptions
): Promise<RunPromptResult>
console: PromptGenerationConsole
}

Expand Down Expand Up @@ -1513,6 +1509,10 @@ interface PromptContext extends ChatGenerationContext {
system(options: PromptSystemArgs): void
defFileMerge(fn: FileMergeHandler): void
defOutputProcessor(fn: PromptOutputProcessorHandler): void
runPrompt(
generator: string | PromptGenerator,
options?: PromptGeneratorOptions
): Promise<RunPromptResult>
fetchText(
urlOrFile: string | WorkspaceFile,
options?: FetchTextOptions
Expand Down
8 changes: 4 additions & 4 deletions packages/sample/genaisrc/genaiscript.d.ts

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 4 additions & 4 deletions packages/sample/genaisrc/node/genaiscript.d.ts

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading

0 comments on commit a1d100c

Please sign in to comment.