Skip to content

Commit

Permalink
support chat with images
Browse files Browse the repository at this point in the history
  • Loading branch information
blacksev committed Dec 25, 2023
1 parent 9135198 commit 20ff4e3
Show file tree
Hide file tree
Showing 4 changed files with 74 additions and 10 deletions.
11 changes: 10 additions & 1 deletion app/client/api.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { getClientConfig } from "../config/client";
import { ACCESS_CODE_PREFIX, ServiceProvider } from "../constant";
import { ChatMessage, ModelType, useAccessStore } from "../store";
import { AttachFile, ChatMessage, ModelType, useAccessStore } from "../store";
import { ChatGPTApi } from "./platforms/openai";

export const ROLES = ["system", "user", "assistant"] as const;
Expand All @@ -9,9 +9,18 @@ export type MessageRole = (typeof ROLES)[number];
export const Models = ["gemini-pro", "gpt-4"] as const;
export type ChatModel = ModelType;

export interface TypedContent {
type: "text" | "image_url";
text?: string;
image_url?: {
url: string;
};
}

export interface RequestMessage {
role: MessageRole;
content: string;
attachFiles?: AttachFile[];
}

export interface LLMConfig {
Expand Down
30 changes: 26 additions & 4 deletions app/client/platforms/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -68,10 +68,32 @@ export class ChatGPTApi implements LLMApi {
}

async chat(options: ChatOptions) {
const messages = options.messages.map((v) => ({
role: v.role,
content: v.content,
}));
const messages = options.messages.map((v) => {
if (v.attachFiles && v.attachFiles.length > 0) {
const content = [];
for (const file of v.attachFiles) {
content.push({
type: "image_url",
image_url: {
url: file.base64,
},
});
}
content.push({
type: "text",
text: v.content,
});
return {
role: v.role,
content,
};
} else {
return {
role: v.role,
content: v.content,
};
}
});

const modelConfig = {
...useAppConfig.getState().modelConfig,
Expand Down
15 changes: 13 additions & 2 deletions app/components/chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ import {
useAppConfig,
DEFAULT_TOPIC,
ModelType,
AttachFile,
} from "../store";

import {
Expand Down Expand Up @@ -668,7 +669,7 @@ function _Chat() {

const inputRef = useRef<HTMLTextAreaElement>(null);
const [userInput, setUserInput] = useState("");
const [useImages, setUseImages] = useState<any[]>([]);
const [useImages, setUseImages] = useState<AttachFile[]>([]);
const [isLoading, setIsLoading] = useState(false);
const { submitKey, shouldSubmit } = useSubmitHandler();
const { scrollRef, setAutoScroll, scrollDomToBottom } = useScrollToBottom();
Expand Down Expand Up @@ -756,6 +757,7 @@ function _Chat() {
localStorage.setItem(LAST_INPUT_KEY, userInput);
setUserInput("");
setPromptHints([]);
setUseImages([]);
if (!isMobileScreen) inputRef.current?.focus();
setAutoScroll(true);
};
Expand Down Expand Up @@ -1185,6 +1187,15 @@ function _Chat() {

const shouldShowClearContextDivider = i === clearContextIndex - 1;

let textContent: string = "";

if (message.attachFiles && message.attachFiles.length > 0) {
textContent += message.attachFiles
.map((f: AttachFile) => `![${f.filename}](${f.base64})\n`)
.join("");
}
if (message.content) textContent += message.content;

return (
<Fragment key={message.id}>
<div
Expand Down Expand Up @@ -1279,7 +1290,7 @@ function _Chat() {
)}
<div className={styles["chat-message-item"]}>
<Markdown
content={message.content}
content={textContent}
loading={
(message.preview || message.streaming) &&
message.content.length === 0 &&
Expand Down
28 changes: 25 additions & 3 deletions app/store/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,18 @@ import {
StoreKey,
SUMMARIZE_MODEL,
} from "../constant";
import { api, RequestMessage } from "../client/api";
import { api, RequestMessage, TypedContent } from "../client/api";
import { ChatControllerPool } from "../client/controller";
import { prettyObject } from "../utils/format";
import { estimateTokenLength } from "../utils/token";
import { nanoid } from "nanoid";
import { createPersistStore } from "../utils/store";

export type AttachFile = {
filename: string;
base64: string;
};

export type ChatMessage = RequestMessage & {
date: string;
streaming?: boolean;
Expand Down Expand Up @@ -266,16 +271,18 @@ export const useChatStore = createPersistStore(
get().summarizeSession();
},

async onUserInput(content: string, files: File[] = []) {
async onUserInput(content: string, files: AttachFile[] = []) {
const session = get().currentSession();
const modelConfig = session.mask.modelConfig;

const userContent = fillTemplateWith(content, modelConfig);

console.log("[User Input] after template: ", userContent);

const userMessage: ChatMessage = createMessage({
role: "user",
content: userContent,
attachFiles: files,
});

const botMessage: ChatMessage = createMessage({
Expand All @@ -286,7 +293,22 @@ export const useChatStore = createPersistStore(

// get recent messages
const recentMessages = get().getMessagesWithMemory();
const sendMessages = recentMessages.concat(userMessage);

let sendMessages: ChatMessage[];
const allFiles = recentMessages
.flatMap((m) => m.attachFiles ?? [])
.concat(files);
if (allFiles.length > 0) {
sendMessages = [
{
...userMessage,
attachFiles: allFiles,
},
];
} else {
sendMessages = recentMessages.concat(userMessage);
}

const messageIndex = get().currentSession().messages.length + 1;

// save user's and bot's message
Expand Down

0 comments on commit 20ff4e3

Please sign in to comment.