Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: refactor out ContextWindow and ChatLogs from the Chat class #244

Merged
merged 4 commits into from
Oct 2, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion apps/browser/src/pages/Dojo.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -172,9 +172,10 @@ function Dojo() {

const userWorkspace = new EvoCore.InMemoryWorkspace();
setUserWorkspace(userWorkspace);
const contextWindow = new EvoCore.ContextWindow(llm);
const chat = new EvoCore.Chat(
llm,
cl100k_base,
contextWindow,
logger
);

Expand Down
4 changes: 3 additions & 1 deletion apps/cli/src/app.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import {
Timeout,
Workspace,
LlmApi,
ContextWindow,
} from "@evo-ninja/agent-utils";
import dotenv from "dotenv";
import readline from "readline";
Expand Down Expand Up @@ -90,7 +91,8 @@ export function createApp(config?: AppConfig): App {
config?.userWorkspace ?? new FileSystemWorkspace(workspacePath);

// Chat
const chat = new Chat(llm, cl100k_base, logger);
const contextWindow = new ContextWindow(llm);
const chat = new Chat(cl100k_base, contextWindow, logger);

// Debug Logging
let debugLog: DebugLog | undefined;
Expand Down
8 changes: 4 additions & 4 deletions apps/cli/src/diagnostic/DebugLlmApi.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { DebugLog } from "./DebugLog";
import { Timer } from "./Timer";

import { Chat, LlmApi, LlmOptions, ChatMessage } from "@evo-ninja/agent-utils";
import { LlmApi, LlmOptions, ChatLogs, ChatMessage } from "@evo-ninja/agent-utils";

export class DebugLlmApi implements LlmApi {
constructor(
Expand All @@ -18,7 +18,7 @@ export class DebugLlmApi implements LlmApi {
}

async getResponse(
chat: Chat,
chatLogs: ChatLogs,
functionDefinitions: any[],
options?: LlmOptions | undefined
): Promise<ChatMessage | undefined> {
Expand All @@ -28,15 +28,15 @@ export class DebugLlmApi implements LlmApi {
time.start();

const resp = await this.llm.getResponse(
chat,
chatLogs,
functionDefinitions,
options
);

time.end();
this.debugLog.stepLlmReq(
time,
chat.export(),
chatLogs.clone(),
resp
);

Expand Down
18 changes: 11 additions & 7 deletions apps/cli/src/diagnostic/DebugLlmReq.ts
Original file line number Diff line number Diff line change
@@ -1,28 +1,32 @@
import { Timer } from "./Timer";

import { ChatMessageLog, ChatMessage } from "@evo-ninja/agent-utils";
import { ChatLogs, ChatMessage } from "@evo-ninja/agent-utils";

export class DebugLlmReq {
constructor(
public time: Timer,
public chat: ChatMessageLog,
public chatLogs: ChatLogs,
public response?: ChatMessage
) { }

get tokens() {
return this.chat["persistent"].tokens +
this.chat["temporary"].tokens;
get tokens(): number {
return this.chatLogs.tokens;
}

toString(): string {
return JSON.stringify(this.toJSON(), null, 2);
}

toJSON() {
toJSON(): {
time: Timer;
tokens: number;
chat: ChatLogs;
response?: ChatMessage;
} {
return {
time: this.time,
tokens: this.tokens,
chat: this.chat,
chat: this.chatLogs,
response: this.response
};
}
Expand Down
11 changes: 7 additions & 4 deletions apps/cli/src/diagnostic/DebugLog.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { Timer } from "./Timer";
import { DebugLlmReq } from "./DebugLlmReq";

import { ChatMessageLog, ChatMessage, Workspace } from "@evo-ninja/agent-utils";
import { ChatLogs, ChatMessage, Workspace } from "@evo-ninja/agent-utils";

interface DebugGoal {
prompt: string;
Expand Down Expand Up @@ -77,8 +77,8 @@ export class DebugLog {
this.save();
}

stepLlmReq(time: Timer, chat: ChatMessageLog, response?: ChatMessage): void {
const req = new DebugLlmReq(time, chat, response);
stepLlmReq(time: Timer, chatLogs: ChatLogs, response?: ChatMessage): void {
const req = new DebugLlmReq(time, chatLogs, response);
this.goal.llmReqs += 1;
this.goal.tokens += req.tokens;
this._latestStep.llmReqs.push(req);
Expand All @@ -89,7 +89,10 @@ export class DebugLog {
return JSON.stringify(this.toJSON(), null, 2);
}

toJSON() {
toJSON(): {
goal: DebugGoal;
steps: DebugStep[];
} {
return {
goal: this.goal,
steps: this.steps,
Expand Down
4 changes: 2 additions & 2 deletions apps/cli/src/diagnostic/Timer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,11 @@ export class Timer {
return `${this._pad(hours)}:${this._pad(minutes)}:${this._pad(seconds)}`;
}

toString() {
toString(): string {
return this.getHHMMSS();
}

toJSON() {
toJSON(): string {
return this.toString();
}

Expand Down
2 changes: 1 addition & 1 deletion packages/agent-utils/src/agent/basicFunctionCallLoop.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ export async function* basicFunctionCallLoop<TContext extends { llm: LlmApi, cha
await chat.fitToContextWindow();

const functionDefinitions = agentFunctions.map(f => f.definition);
const response = await llm.getResponse(chat, functionDefinitions);
const response = await llm.getResponse(chat.chatLogs, functionDefinitions);

if (!response) {
return ResultErr("No response from LLM.");
Expand Down
Loading