Skip to content

Commit

Permalink
Console definition in genaiscript.d.ts (#485)
Browse files Browse the repository at this point in the history
* console defined in genaiscript.d.ts

* log message

* better weaving of console.log in trace

* show output in run prompt
  • Loading branch information
pelikhan authored May 29, 2024
1 parent ce6ef7a commit a39e506
Show file tree
Hide file tree
Showing 19 changed files with 355 additions and 48 deletions.
24 changes: 22 additions & 2 deletions docs/genaisrc/genaiscript.d.ts

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

24 changes: 22 additions & 2 deletions genaisrc/genaiscript.d.ts

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

19 changes: 2 additions & 17 deletions packages/core/src/evalprompt.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,24 +10,9 @@ export async function evalPrompt(
logCb?: (msg: string) => void
}
) {
const { logCb, sourceMaps } = options || {}
const log = (...args: any[]) => {
const line = consoleLogFormat(...args)
logCb?.(line)
host.log(LogLevel.Verbose, line)
}
const ctx = Object.freeze<
PromptContext & { console: Partial<typeof console> }
>({
const { sourceMaps } = options || {}
const ctx = Object.freeze<PromptContext>({
...ctx0,
console: {
log: log,
warn: log,
debug: log,
error: log,
info: log,
trace: log,
},
})
const keys = Object.keys(ctx)
const prefix = "async (" + keys.join(",") + ") => { 'use strict';\n"
Expand Down
24 changes: 22 additions & 2 deletions packages/core/src/genaisrc/genaiscript.d.ts

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

21 changes: 20 additions & 1 deletion packages/core/src/runpromptcontext.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ import { checkCancelled } from "./cancellation"
import { MODEL_PROVIDER_AICI } from "./constants"
import { promptParametersSchemaToJSONSchema } from "./parameters"
import { isJSONSchema } from "./schema"
import { consoleLogFormat } from "./logging"

export interface RunPromptContextNode extends RunPromptContext {
node: PromptNode
Expand All @@ -42,6 +43,17 @@ export function createRunPromptContext(
const { cancellationToken } = options || {}
const node: PromptNode = { children: [] }

const log = (...args: any[]) => {
const line = consoleLogFormat(...args)
if (line) trace.log(line)
}
const console = Object.freeze<PromptConsole>({
log,
debug: log,
warn: (args) => trace.warn(consoleLogFormat(...args)),
error: (args) => trace.error(consoleLogFormat(...args)),
})

const defTool: (
name: string,
description: string,
Expand Down Expand Up @@ -132,7 +144,8 @@ export function createRunPromptContext(
},
runPrompt: async (generator, runOptions) => {
try {
trace.startDetails(`🎁 run prompt`)
const { label } = runOptions || {}
trace.startDetails(`🎁 run prompt ${label || ""}`)

const genOptions = mergeGenerationOptions(options, runOptions)
const ctx = createRunPromptContext(genOptions, env, trace)
Expand Down Expand Up @@ -190,6 +203,11 @@ export function createRunPromptContext(
completer,
genOptions
)
const { json, text } = resp
if (resp.json)
trace.detailsFenced("📩 json (parsed)", json, "json")
else if (text)
trace.detailsFenced(`🔠 output`, text, `markdown`)
return resp
} catch (e) {
trace.error(e)
Expand All @@ -201,6 +219,7 @@ export function createRunPromptContext(
trace.endDetails()
}
},
console,
}

return ctx
Expand Down
8 changes: 6 additions & 2 deletions packages/core/src/trace.ts
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ ${this.toResultIcon(success, "")}${title}
}

log(message: string) {
this.content += (message ?? "") + "\n"
this.content += "\n> " + (message ?? "") + "\n"
}

startFence(language: string) {
Expand Down Expand Up @@ -201,7 +201,7 @@ ${this.toResultIcon(success, "")}${title}
const emsg = errorMessage(error)
const msg = message || emsg
this.disableChange(() => {
this.warn(msg)
this.caution(msg)
if (options.details && error?.stack) {
this.content += `> \`\`\`\`\`\`\`markdown`
this.content += error.stack
Expand All @@ -213,6 +213,10 @@ ${this.toResultIcon(success, "")}${title}
}

warn(msg: string) {
this.content += `\n> [!WARNING] ${msg}\n`
}

caution(msg: string) {
this.content += `\n> [!CAUTION] ${msg}\n`
}

Expand Down
17 changes: 16 additions & 1 deletion packages/core/src/types/prompt_template.d.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,10 @@
interface PromptConsole {
log(...data: any[]): void
warn(...data: any[]): void
debug(...data: any[]): void
error(...data: any[]): void
}

type DiagnosticSeverity = "error" | "warning" | "info"

interface Diagnostic {
Expand Down Expand Up @@ -1197,6 +1204,13 @@ interface WriteTextOptions extends ContextExpansionOptions {

type RunPromptGenerator = (ctx: RunPromptContext) => Awaitable<void>

interface RunPromptOptions extends ModelOptions {
/**
* Label for trace
*/
label?:string
}

// keep in sync with prompt_type.d.ts
interface RunPromptContext {
writeText(body: Awaitable<string>, options?: WriteTextOptions): void
Expand All @@ -1215,14 +1229,15 @@ interface RunPromptContext {
): string
runPrompt(
generator: string | RunPromptGenerator,
options?: ModelOptions
options?: RunPromptOptions
): Promise<RunPromptResult>
defTool(
name: string,
description: string,
parameters: PromptParametersSchema | JSONSchema,
fn: ChatFunctionHandler
): void
console: PromptConsole
}

interface GenerationOutput {
Expand Down
7 changes: 6 additions & 1 deletion packages/core/src/types/prompt_type.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@

// keep in sync with PromptContext!

/**
* Console functions
*/
declare var console: PromptConsole

/**
* Setup prompt title and other parameters.
* Exactly one call should be present on top of .genai.js file.
Expand Down Expand Up @@ -174,7 +179,7 @@ declare function cancel(reason?: string): void
*/
declare function runPrompt(
generator: string | RunPromptGenerator,
options?: ModelOptions
options?: RunPromptOptions
): Promise<RunPromptResult>

/**
Expand Down
19 changes: 19 additions & 0 deletions packages/sample/genaisrc/console.genai.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
script({
model: "openai:gpt-3.5-turbo",
tests: {},
})

console.log("log")
console.warn(`warn`)
console.error(`error`)

await runPrompt((_) => {
_.console.log("prompt.log")
_.console.warn("prompt.warn")
_.console.error("prompt.error")
_.$`write a movie title`
}, { label: "inner prompt"})

console.log(`after run prompt`)

$`write a poem`
Loading

0 comments on commit a39e506

Please sign in to comment.