Skip to content

Commit

Permalink
#10,etc
Browse files Browse the repository at this point in the history
  • Loading branch information
katsumiar committed May 19, 2024
1 parent 9a2e0ed commit ce8430d
Show file tree
Hide file tree
Showing 3 changed files with 62 additions and 27 deletions.
81 changes: 55 additions & 26 deletions WiseOwlChat/ChatViewModel.cs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
using System.Windows;
using static WiseOwlChat.DirectionsFileManager;
using System.Windows.Input;
using Newtonsoft.Json.Linq;

namespace WiseOwlChat
{
Expand Down Expand Up @@ -58,13 +59,33 @@ public ObservableCollection<PluginInfo> PluginInfos
get => openAIChat.FunctionCallingRegistry.PluginInfos;
}

public MODEL_TYPE ModelType
public string ModelType
{
get { return openAIChat.ModelType; }
get {
string? modelName = EnumHelper.GetDescription(openAIChat.ModelType);
if (modelName == null)
{
return openAIChat.ModelType.ToString();
}
return modelName;
}
set
{
openAIChat.ModelType = value;
OnPropertyChanged();
foreach (MODEL_TYPE model in Enum.GetValues(typeof(MODEL_TYPE)))
{
string? modelName = EnumHelper.GetDescription(model);
if (modelName == value)
{
openAIChat.ModelType = model;
OnPropertyChanged();
return;
}
}
if (Enum.TryParse<MODEL_TYPE>(value, out MODEL_TYPE mode))
{
openAIChat.ModelType = (MODEL_TYPE)mode;
OnPropertyChanged();
}
}
}

Expand Down Expand Up @@ -230,7 +251,11 @@ public ChatViewModel()

foreach (MODEL_TYPE model in Enum.GetValues(typeof(MODEL_TYPE)))
{
ModelTypeItems.Add(model.ToString());
string? modelName = EnumHelper.GetDescription(model);
if (modelName != null)
{
ModelTypeItems.Add(modelName);
}
}

forbiddenExpressionChecker = new ForbiddenExpressionChecker();
Expand Down Expand Up @@ -459,9 +484,10 @@ private async Task query(Action update, Action<string?>? callback, string messag

if (FunctionMode)
{
if (ModelType == MODEL_TYPE.GPT_35_TURBO)
string? modelName = EnumHelper.GetDescription(MODEL_TYPE.GPT_35_TURBO);
if (ModelType == modelName)
{
ModelType = MODEL_TYPE.GPT_35_TURBO_16K;
ModelType = modelName;
popupMessageAction("Changed MODEL_TYPE to GPT_35_TURBO_16K.");
}
}
Expand Down Expand Up @@ -771,25 +797,8 @@ private async Task queryPipeline(List<Task> delayedTaskList, Action update, Acti
continue;
}

await openAIChat.SystemRequestStreamForPipeline(update,
(content) =>
{
if (content != null)
{
logMessage.content = content;
delayedTranslation(delayedTaskList, logMessage);
}
}, viewThinkingMessage, tempRequest, ModelType);

string? elaboration = QueryManager.Instance.GetNameAndText("$Elaboration:");
if (elaboration != null)
if (Enum.TryParse<MODEL_TYPE>(ModelType, out MODEL_TYPE mode))
{
// 推敲
List<ConversationEntry> elaborationRequest = openAIChat.MakeRequest(
false,
elaboration.Replace("{response}", logMessage.content),
null, true);

await openAIChat.SystemRequestStreamForPipeline(update,
(content) =>
{
Expand All @@ -798,7 +807,27 @@ await openAIChat.SystemRequestStreamForPipeline(update,
logMessage.content = content;
delayedTranslation(delayedTaskList, logMessage);
}
}, viewThinkingMessage, elaborationRequest, ModelType);
}, viewThinkingMessage, tempRequest, mode);

string? elaboration = QueryManager.Instance.GetNameAndText("$Elaboration:");
if (elaboration != null)
{
// 推敲
List<ConversationEntry> elaborationRequest = openAIChat.MakeRequest(
false,
elaboration.Replace("{response}", logMessage.content),
null, true);

await openAIChat.SystemRequestStreamForPipeline(update,
(content) =>
{
if (content != null)
{
logMessage.content = content;
delayedTranslation(delayedTaskList, logMessage);
}
}, viewThinkingMessage, elaborationRequest, mode);
}
}

if (logMessage == null || logMessage.content == null)
Expand Down
2 changes: 1 addition & 1 deletion WiseOwlChat/Control/ChatControl.xaml.cs
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ public ChatControl()

modelSelector.Dispatcher.BeginInvoke(new Action(() =>
{
chatViewModel.ModelType = OpenAIChat.MODEL_TYPE.GPT_35_TURBO;
chatViewModel.ModelType = OpenAIChat.MODEL_TYPE.GPT_35_TURBO.ToString();
}), DispatcherPriority.Loaded);

Instance = this;
Expand Down
6 changes: 6 additions & 0 deletions WiseOwlChat/OpenAIChat.cs
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,15 @@ public enum MODEL_TYPE
[Description("gpt-3.5-turbo")]
GPT_35_TURBO,

[Description("gpt-4o")]
GPT_4o,

[Description("gpt-3.5-turbo-16k")]
GPT_35_TURBO_16K,

[Description("gpt-4-turbo")]
GPT_4_TURBO,

[Description("gpt-4")]
GPT_4,

Expand Down

0 comments on commit ce8430d

Please sign in to comment.