From 98b61ba7cadab39033728ed1d2a149a15a093585 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=BB=8E=E8=B4=B5=E5=A5=87?= Date: Thu, 18 Jan 2024 16:09:29 +0800 Subject: [PATCH] =?UTF-8?q?feat:=20=E6=9B=B4=E6=96=B0=E6=A8=A1=E5=9E=8B?= =?UTF-8?q?=E6=98=A0=E5=B0=84?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Change-Id: I4230181b6d2bc8326f8165c3f71e45ef7a59f7b2 --- modules/ai/chat.ts | 4 ++-- modules/ai/constants.ts | 8 ++++++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/modules/ai/chat.ts b/modules/ai/chat.ts index c3b070f..a369dc7 100644 --- a/modules/ai/chat.ts +++ b/modules/ai/chat.ts @@ -6,7 +6,7 @@ import { createFailResponse } from '../backend-node'; import { handleRequestError } from '../session/api-helper'; import { countToken, countTokenForString } from './encoding'; -import { ChatModel, DEFAULT_MAX_TOKEN, MAX_TOKENS, ChatOptions, ErrorResponse } from './constants'; +import { ChatModel, DEFAULT_MAX_TOKEN, MAX_TOKENS, ChatOptions, ErrorResponse, ModelPointer } from './constants'; import { ChatCompletion, ChatCompletionInStream } from './types'; import { getChatCompletionSupport } from './platform'; import { checkRequest, RequestControlError } from './request-control'; @@ -68,7 +68,7 @@ export async function chat(options: ChatOptions) { const content = { stream: true, - model, + model: ModelPointer[model] ?? model, user: support.user, // max_tokens: maxToken - token, ...other, diff --git a/modules/ai/constants.ts b/modules/ai/constants.ts index 628e1ce..915bcf1 100644 --- a/modules/ai/constants.ts +++ b/modules/ai/constants.ts @@ -98,6 +98,14 @@ export enum ChatModel { GPT_4_32K = 'gpt-4-32k', } +/** + * ๆจกๅž‹ๆŒ‡้’ˆ + */ +export const ModelPointer: Record = { + [ChatModel.GPT_4]: 'gpt-4-1106-preview', + [ChatModel.GPT_4_32K]: 'gpt-4-1106-preview', +}; + export const ALL_SUPPORTED_CHAT_MODEL = [ ChatModel.GPT3_5_TURBO, ChatModel.GPT3_5_TURBO_16K,