diff --git a/ChatGPT.sln b/ChatGPT.sln index 07a90d9c..5082498c 100644 --- a/ChatGPT.sln +++ b/ChatGPT.sln @@ -56,6 +56,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ChatGPT.UI.Game", "samples\ EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "samples", "samples", "{5EDF3913-3E89-44F6-A11F-52DA003AD315}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ChatGPT.CLI.FunctionCalling", "samples\ChatGPT.CLI.FunctionCalling\ChatGPT.CLI.FunctionCalling.csproj", "{0590592D-CDF7-4705-9F34-D8E779B66644}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -122,6 +124,10 @@ Global {B0A8A296-575A-443A-BD55-12713EAFE506}.Debug|Any CPU.Build.0 = Debug|Any CPU {B0A8A296-575A-443A-BD55-12713EAFE506}.Release|Any CPU.ActiveCfg = Release|Any CPU {B0A8A296-575A-443A-BD55-12713EAFE506}.Release|Any CPU.Build.0 = Release|Any CPU + {0590592D-CDF7-4705-9F34-D8E779B66644}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0590592D-CDF7-4705-9F34-D8E779B66644}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0590592D-CDF7-4705-9F34-D8E779B66644}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0590592D-CDF7-4705-9F34-D8E779B66644}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -142,5 +148,6 @@ Global {A9793A98-235E-4877-A3FC-C7C3DB4852FA} = {62EB4E5F-59EB-4D59-83A0-2A8B5B2397ED} {F9D8D17B-228B-4D5E-8126-39E8F41B87A2} = {D3B27217-AB31-4AB0-8FF9-C7528DA03FDE} {B0A8A296-575A-443A-BD55-12713EAFE506} = {5EDF3913-3E89-44F6-A11F-52DA003AD315} + {0590592D-CDF7-4705-9F34-D8E779B66644} = {5EDF3913-3E89-44F6-A11F-52DA003AD315} EndGlobalSection EndGlobal diff --git a/samples/ChatGPT.CLI.FunctionCalling/ChatGPT.CLI.FunctionCalling.csproj b/samples/ChatGPT.CLI.FunctionCalling/ChatGPT.CLI.FunctionCalling.csproj new file mode 100644 index 00000000..039e6a94 --- /dev/null +++ b/samples/ChatGPT.CLI.FunctionCalling/ChatGPT.CLI.FunctionCalling.csproj @@ -0,0 +1,19 @@ + + + Exe + net7.0 + win-x64;linux-x64;linux-arm64;osx-x64;osx-arm64 + enable + enable + full + False + ChatGPT.CLI + + + True + false + + + + + diff --git a/samples/ChatGPT.CLI.FunctionCalling/Program.cs b/samples/ChatGPT.CLI.FunctionCalling/Program.cs new file mode 100644 index 00000000..6d219f3f --- /dev/null +++ b/samples/ChatGPT.CLI.FunctionCalling/Program.cs @@ -0,0 +1,113 @@ +using ChatGPT; +using ChatGPT.ViewModels.Chat; + +Defaults.ConfigureDefaultServices(); + +var directions = +""" +You are a helpful assistant. +Write answers in plain text. +Do not use markdown. +Only use the functions you have been provided with. +"""; + +if (args.Length == 1) +{ + directions = args[0]; +} + +using var cts = new CancellationTokenSource(); + +var functions = GetFunctions(); + +var chat = new ChatViewModel(new ChatSettingsViewModel +{ + MaxTokens = 2000, + Model = "gpt-3.5-turbo-0613", + Functions = functions, + FunctionCall = "auto" + // Force function call by setting FunctionCall property. + // FunctionCall = new { name = "GetCurrentWeather" } +}); + +// Enable to debug json requests and responses. +// chat.Debug = true; + +chat.AddSystemMessage(directions); + +while (true) +{ + Console.Write("> "); + + var input = Console.ReadLine(); + if (string.IsNullOrWhiteSpace(input) || input == Environment.NewLine) + { + continue; + } + + try + { + chat.AddUserMessage(input); + var result = await chat.SendAsync(chat.CreateChatMessages(), cts.Token); + + chat.AddAssistantMessage(result?.Message); + + if (result?.Message is { }) + { + Console.WriteLine(result.Message); + } + + if (result?.FunctionCall is { } functionCall) + { + if (functionCall.Name == "GetCurrentWeather" && functionCall.Arguments is { }) + { + functionCall.Arguments.TryGetValue("location", out var location); + functionCall.Arguments.TryGetValue("unit", out var unit); + var functionCallResult = GetCurrentWeather(location, unit ?? "celsius"); + chat.AddFunctionMessage(functionCallResult, functionCall.Name); + + Console.WriteLine(functionCallResult); + } + } + } + catch (Exception ex) + { + Console.WriteLine("Error: " + ex.Message); + } +} + +string GetCurrentWeather(string? location, string? unit) +{ + Console.WriteLine($"Weather for {location} [{unit}]."); + return "Cloudy."; +} + +object GetFunctions() +{ + return new[] + { + new + { + name = "GetCurrentWeather", + description = "Get the current weather in a given location", + parameters = new + { + type = "object", + properties = new + { + location = new + { + type = "string", + description = "The city and state, e.g. San Francisco, CA" + }, + unit = new + { + type = "string", + @enum = new[] {"celsius", "fahrenheit"} + }, + }, + required = new[] {"location"} + }, + } + }; +} diff --git a/src/ChatGPT.Core/ViewModels/Chat/ChatFunctionCallViewModel.cs b/src/ChatGPT.Core/ViewModels/Chat/ChatFunctionCallViewModel.cs new file mode 100644 index 00000000..69438586 --- /dev/null +++ b/src/ChatGPT.Core/ViewModels/Chat/ChatFunctionCallViewModel.cs @@ -0,0 +1,35 @@ +using System.Text.Json.Serialization; +using CommunityToolkit.Mvvm.ComponentModel; + +namespace ChatGPT.ViewModels.Chat; + +public class ChatFunctionCallViewModel : ObservableObject +{ + private string? _name; + + [JsonConstructor] + public ChatFunctionCallViewModel() + { + } + + public ChatFunctionCallViewModel(string name) + : this() + { + _name = name; + } + + [JsonPropertyName("name")] + public string? Name + { + get => _name; + set => SetProperty(ref _name, value); + } + + public ChatFunctionCallViewModel Copy() + { + return new ChatFunctionCallViewModel + { + Name = _name, + }; + } +} diff --git a/src/ChatGPT.Core/ViewModels/Chat/ChatFunctionViewModel.cs b/src/ChatGPT.Core/ViewModels/Chat/ChatFunctionViewModel.cs new file mode 100644 index 00000000..7066046a --- /dev/null +++ b/src/ChatGPT.Core/ViewModels/Chat/ChatFunctionViewModel.cs @@ -0,0 +1,63 @@ +using System.Text.Json.Serialization; +using CommunityToolkit.Mvvm.ComponentModel; + +namespace ChatGPT.ViewModels.Chat; + +public class ChatFunctionViewModel : ObservableObject +{ + private string? _name; + private string? _description; + private object? _parameters; + + [JsonConstructor] + public ChatFunctionViewModel() + { + } + + public ChatFunctionViewModel(string name, string description) + : this() + { + _name = name; + _description = description; + } + + public ChatFunctionViewModel(string name, string description, object parameters) + : this() + { + _name = name; + _description = description; + _parameters = parameters; + } + + [JsonPropertyName("name")] + public string? Name + { + get => _name; + set => SetProperty(ref _name, value); + } + + [JsonPropertyName("description")] + public string? Description + { + get => _description; + set => SetProperty(ref _description, value); + } + + [JsonPropertyName("parameters")] + public object? Parameters + { + get => _parameters; + set => SetProperty(ref _parameters, value); + } + + public ChatFunctionViewModel Copy() + { + return new ChatFunctionViewModel + { + Name = _name, + Description = _description, + // TODO: Copy Parameters if type is reference. + Parameters = _parameters + }; + } +} diff --git a/src/ChatGPT.Core/ViewModels/Chat/ChatMessageFunctionCallViewModel.cs b/src/ChatGPT.Core/ViewModels/Chat/ChatMessageFunctionCallViewModel.cs new file mode 100644 index 00000000..6ef78675 --- /dev/null +++ b/src/ChatGPT.Core/ViewModels/Chat/ChatMessageFunctionCallViewModel.cs @@ -0,0 +1,52 @@ +using System.Collections.Generic; +using System.Linq; +using System.Text.Json.Serialization; +using CommunityToolkit.Mvvm.ComponentModel; + +namespace ChatGPT.ViewModels.Chat; + +public class ChatMessageFunctionCallViewModel : ObservableObject +{ + private string? _name; + private Dictionary? _arguments; + + [JsonConstructor] + public ChatMessageFunctionCallViewModel() + { + } + + public ChatMessageFunctionCallViewModel(string role, Dictionary arguments) + : this() + { + _name = role; + _arguments = arguments; + } + + [JsonPropertyName("name")] + public string? Name + { + get => _name; + set => SetProperty(ref _name, value); + } + + [JsonPropertyName("arguments")] + public Dictionary? Arguments + { + get => _arguments; + set => SetProperty(ref _arguments, value); + } + + public ChatMessageFunctionCallViewModel Copy() + { + var functionCall = new ChatMessageFunctionCallViewModel + { + Name = _name, + // TODO: Copy entry Value if it's reference value. + Arguments = _arguments?.ToDictionary( + e => e.Key, + e => e.Value) + }; + + return functionCall; + } +} diff --git a/src/ChatGPT.Core/ViewModels/Chat/ChatMessageViewModel.cs b/src/ChatGPT.Core/ViewModels/Chat/ChatMessageViewModel.cs index 77240b07..20d47698 100644 --- a/src/ChatGPT.Core/ViewModels/Chat/ChatMessageViewModel.cs +++ b/src/ChatGPT.Core/ViewModels/Chat/ChatMessageViewModel.cs @@ -13,6 +13,8 @@ public class ChatMessageViewModel : ObservableObject { private string? _role; private string? _message; + private string? _name; + private ChatMessageFunctionCallViewModel? _functionCall; private string? _format; private bool _isSent; private bool _isAwaiting; @@ -52,6 +54,26 @@ public ChatMessageViewModel(string role, string message) _message = message; } + public ChatMessageViewModel(string role, string message, string name) + : this() + { + _role = role; + _message = message; + _name = name; + } + + public ChatMessageViewModel(string role, string? message, string name, ChatMessageFunctionCallViewModel functionCall) + : this() + { + _role = role; + _message = message; + _name = name; + _functionCall = functionCall; + } + + /// + /// The role of the messages author. One of system, user, assistant, or function. + /// [JsonPropertyName("role")] public string? Role { @@ -59,6 +81,9 @@ public string? Role set => SetProperty(ref _role, value); } + /// + /// The contents of the message. content is required for all messages, and may be null for assistant messages with function calls. + /// [JsonPropertyName("message")] public string? Message { @@ -66,6 +91,26 @@ public string? Message set => SetProperty(ref _message, value); } + /// + /// The name of the author of this message. name is required if role is function, and it should be the name of the function whose response is in the content. May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters. + /// + [JsonPropertyName("name")] + public string? Name + { + get => _name; + set => SetProperty(ref _name, value); + } + + /// + /// The name and arguments of a function that should be called, as generated by the model. + /// + [JsonPropertyName("function_call")] + public ChatMessageFunctionCallViewModel? FunctionCall + { + get => _functionCall; + set => SetProperty(ref _functionCall, value); + } + [JsonPropertyName("format")] public string? Format { @@ -299,6 +344,8 @@ public ChatMessageViewModel Copy() { Role = _role, Message = _message, + Name = _name, + FunctionCall = _functionCall?.Copy(), Format = _format, IsSent = _isSent, IsAwaiting = _isAwaiting, diff --git a/src/ChatGPT.Core/ViewModels/Chat/ChatResultViewModel.cs b/src/ChatGPT.Core/ViewModels/Chat/ChatResultViewModel.cs index be701e72..829e71c4 100644 --- a/src/ChatGPT.Core/ViewModels/Chat/ChatResultViewModel.cs +++ b/src/ChatGPT.Core/ViewModels/Chat/ChatResultViewModel.cs @@ -7,6 +7,7 @@ public class ChatResultViewModel : ObservableObject { private string? _message; private bool _isError; + private ChatMessageFunctionCallViewModel? _functionCall; [JsonPropertyName("name")] public string? Message @@ -21,4 +22,11 @@ public bool IsError get => _isError; set => SetProperty(ref _isError, value); } + + [JsonPropertyName("function_call")] + public ChatMessageFunctionCallViewModel? FunctionCall + { + get => _functionCall; + set => SetProperty(ref _functionCall, value); + } } diff --git a/src/ChatGPT.Core/ViewModels/Chat/ChatSettingsViewModel.cs b/src/ChatGPT.Core/ViewModels/Chat/ChatSettingsViewModel.cs index 31b9e36f..c3bfb28a 100644 --- a/src/ChatGPT.Core/ViewModels/Chat/ChatSettingsViewModel.cs +++ b/src/ChatGPT.Core/ViewModels/Chat/ChatSettingsViewModel.cs @@ -5,6 +5,8 @@ namespace ChatGPT.ViewModels.Chat; public partial class ChatSettingsViewModel : ObservableObject { + private object? _functions; + private object? _functionCall; private decimal _temperature; private decimal _topP; private decimal _presencePenalty; @@ -19,6 +21,8 @@ public partial class ChatSettingsViewModel : ObservableObject [JsonConstructor] public ChatSettingsViewModel() { + _functions = null; + _functionCall = null; _temperature = 0.7m; _topP = 1m; _presencePenalty = 0m; @@ -30,6 +34,32 @@ public ChatSettingsViewModel() _apiUrl = null; } + /// + /// A list of functions the model may generate JSON inputs for. + /// + [JsonPropertyName("functions")] + public object? Functions + { + get => _functions; + set => SetProperty(ref _functions, value); + } + + /// + /// Controls how the model responds to function calls. "none" means the model does not call a function, and responds to the end-user. "auto" means the model can pick between an end-user or calling a function. Specifying a particular function via {"name":\ "my_function"} forces the model to call that function. "none" is the default when no functions are present. "auto" is the default if functions are present. + /// + [JsonPropertyName("function_call")] + public object? FunctionCall + { + get => _functionCall; + set => SetProperty(ref _functionCall, value); + } + + /// + /// What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. + /// + /// + /// We generally recommend altering this or top_p but not both. + /// [JsonPropertyName("temperature")] public decimal Temperature { @@ -37,6 +67,12 @@ public decimal Temperature set => SetProperty(ref _temperature, value); } + /// + /// An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. + /// + /// + /// We generally recommend altering this or temperature but not both. + /// [JsonPropertyName("top_p")] public decimal TopP { @@ -44,6 +80,9 @@ public decimal TopP set => SetProperty(ref _topP, value); } + /// + /// Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics. + /// [JsonPropertyName("presence_penalty")] public decimal PresencePenalty { @@ -51,6 +90,9 @@ public decimal PresencePenalty set => SetProperty(ref _presencePenalty, value); } + /// + /// Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. + /// [JsonPropertyName("frequency_penalty")] public decimal FrequencyPenalty { @@ -104,6 +146,12 @@ public ChatSettingsViewModel Copy() { return new ChatSettingsViewModel { + // TODO: Copy Functions object. + Functions = _functions, + // TODO: Copy FunctionCall object. + FunctionCall = _functionCall is ChatFunctionCallViewModel functionCall + ? functionCall.Copy() + : _functionCall, Temperature = _temperature, TopP = _topP, PresencePenalty = _presencePenalty, diff --git a/src/ChatGPT.Core/ViewModels/Chat/ChatViewModel.cs b/src/ChatGPT.Core/ViewModels/Chat/ChatViewModel.cs index b71cb3c0..f5befcf8 100644 --- a/src/ChatGPT.Core/ViewModels/Chat/ChatViewModel.cs +++ b/src/ChatGPT.Core/ViewModels/Chat/ChatViewModel.cs @@ -20,6 +20,7 @@ public class ChatViewModel : ObservableObject private ObservableCollection _messages; private ChatMessageViewModel? _currentMessage; private bool _isEnabled; + private bool _debug; private CancellationTokenSource? _cts; [JsonConstructor] @@ -92,6 +93,13 @@ public bool IsEnabled set => SetProperty(ref _isEnabled, value); } + [JsonIgnore] + public bool Debug + { + get => _debug; + set => SetProperty(ref _debug, value); + } + public void SetMessageActions(ChatMessageViewModel message) { message.SetSendAction(SendAsync); @@ -282,7 +290,9 @@ public ChatMessage[] CreateChatMessages() chatMessages.Add(new ChatMessage { Role = message.Role, - Content = content + Content = content, + Name = message.Name + // TODO: FunctionCall }); continue; @@ -293,7 +303,9 @@ public ChatMessage[] CreateChatMessages() chatMessages.Add(new ChatMessage { Role = message.Role, - Content = message.Message + Content = message.Message, + Name = message.Name, + // TODO: FunctionCall }); } } @@ -396,12 +408,15 @@ public ChatMessage[] CreateChatMessages() { Model = Settings.Model, Messages = messages, + Functions = Settings.Functions, + FunctionCall = Settings.FunctionCall, Suffix = null, Temperature = Settings.Temperature, MaxTokens = Settings.MaxTokens, TopP = 1.0m, Stop = null, ApiUrl = Settings.ApiUrl, + Debug = Debug }; var result = new ChatResultViewModel @@ -424,9 +439,24 @@ public ChatMessage[] CreateChatMessages() } else if (responseData is ChatResponseSuccess success) { - var message = success.Choices?.FirstOrDefault()?.Message?.Content?.Trim(); - result.Message = message ?? ""; + var choice = success.Choices?.FirstOrDefault(); + var message = choice?.Message?.Content?.Trim(); + result.Message = message; result.IsError = false; + + if (choice is { } && choice.Message?.FunctionCall is { } functionCall) + { + var serializer = Defaults.Locator.GetService(); + var arguments = functionCall.Arguments is { } + ? serializer?.Deserialize>(functionCall.Arguments) + : null; + + result.FunctionCall = new () + { + Name = functionCall.Name, + Arguments = arguments + }; + } } return result; @@ -462,6 +492,17 @@ public ChatViewModel AddAssistantMessage(string? message) return this; } + public ChatViewModel AddFunctionMessage(string? message, string? name) + { + Messages.Add(new ChatMessageViewModel + { + Role = "function", + Message = message, + Name = name + }); + return this; + } + private ObservableCollection CopyMessages(out ChatMessageViewModel? currentMessage) { var messages = new ObservableCollection(); diff --git a/src/ChatGPT.Core/ViewModels/MainViewModelJsonContext.cs b/src/ChatGPT.Core/ViewModels/MainViewModelJsonContext.cs index 273d75f0..e963ee65 100644 --- a/src/ChatGPT.Core/ViewModels/MainViewModelJsonContext.cs +++ b/src/ChatGPT.Core/ViewModels/MainViewModelJsonContext.cs @@ -9,6 +9,9 @@ namespace ChatGPT.ViewModels; [JsonSerializable(typeof(ChatMessageViewModel))] [JsonSerializable(typeof(ChatViewModel))] +[JsonSerializable(typeof(ChatFunctionCallViewModel))] +[JsonSerializable(typeof(ChatFunctionViewModel))] +[JsonSerializable(typeof(ChatMessageFunctionCallViewModel))] [JsonSerializable(typeof(ObservableCollection))] [JsonSerializable(typeof(ChatSettingsViewModel))] [JsonSerializable(typeof(ObservableCollection))] diff --git a/src/ChatGPT.UI/Themes/Icons.axaml b/src/ChatGPT.UI/Themes/Icons.axaml index aafd8d36..fb247ff8 100644 --- a/src/ChatGPT.UI/Themes/Icons.axaml +++ b/src/ChatGPT.UI/Themes/Icons.axaml @@ -26,4 +26,5 @@ M4.75 5h14.5A2.75 2.75 0 0 1 22 7.75v8.5A2.75 2.75 0 0 1 19.25 19H4.75A2.75 2.75 0 0 1 2 16.25v-8.5A2.75 2.75 0 0 1 4.75 5Zm0 1.5c-.69 0-1.25.56-1.25 1.25v8.5c0 .69.56 1.25 1.25 1.25h14.5c.69 0 1.25-.56 1.25-1.25v-8.5c0-.69-.56-1.25-1.25-1.25H4.75Z m16.242 2.932 4.826 4.826a2.75 2.75 0 0 1-.715 4.404l-4.87 2.435a.75.75 0 0 0-.374.426l-1.44 4.166a1.25 1.25 0 0 1-2.065.476L8.5 16.561 4.06 21H3v-1.06l4.44-4.44-3.105-3.104a1.25 1.25 0 0 1 .476-2.066l4.166-1.44a.75.75 0 0 0 .426-.373l2.435-4.87a2.75 2.75 0 0 1 4.405-.715Zm3.766 5.886-4.826-4.826a1.25 1.25 0 0 0-2.002.325l-2.435 4.871a2.25 2.25 0 0 1-1.278 1.12l-3.789 1.31 6.705 6.704 1.308-3.789a2.25 2.25 0 0 1 1.12-1.277l4.872-2.436a1.25 1.25 0 0 0 .325-2.002Z M3.28 2.22a.75.75 0 0 0-1.06 1.06l5.905 5.905L4.81 10.33a1.25 1.25 0 0 0-.476 2.065L7.439 15.5 3 19.94V21h1.06l4.44-4.44 3.105 3.105a1.25 1.25 0 0 0 2.065-.476l1.145-3.313 5.905 5.904a.75.75 0 0 0 1.06-1.06L3.28 2.22Zm10.355 12.476-1.252 3.626-6.705-6.705 3.626-1.252 4.331 4.331Zm6.048-3.876-3.787 1.894 1.118 1.118 3.34-1.67a2.75 2.75 0 0 0 .714-4.404l-4.825-4.826a2.75 2.75 0 0 0-4.405.715l-1.67 3.34 1.118 1.117 1.894-3.787a1.25 1.25 0 0 1 2.002-.325l4.826 4.826a1.25 1.25 0 0 1-.325 2.002Z + m8.086 18.611 5.996-14.004a1 1 0 0 1 1.878.677l-.04.11-5.996 14.004a1 1 0 0 1-1.878-.677l.04-.11 5.996-14.004L8.086 18.61Zm-5.793-7.318 4-4a1 1 0 0 1 1.497 1.32l-.083.094L4.414 12l3.293 3.293a1 1 0 0 1-1.32 1.498l-.094-.084-4-4a1 1 0 0 1-.083-1.32l.083-.094 4-4-4 4Zm14-4.001a1 1 0 0 1 1.32-.083l.093.083 4.001 4.001a1 1 0 0 1 .083 1.32l-.083.095-4.001 3.995a1 1 0 0 1-1.497-1.32l.084-.095L19.584 12l-3.293-3.294a1 1 0 0 1 0-1.414Z diff --git a/src/ChatGPT.UI/Views/Chat/ChatMessageRoleView.axaml b/src/ChatGPT.UI/Views/Chat/ChatMessageRoleView.axaml index 1cab87e8..6153034e 100644 --- a/src/ChatGPT.UI/Views/Chat/ChatMessageRoleView.axaml +++ b/src/ChatGPT.UI/Views/Chat/ChatMessageRoleView.axaml @@ -79,6 +79,16 @@ Foreground="{DynamicResource ActionIconBrush}"/> + + + + + diff --git a/src/ChatGPT/Model/Json/Chat/ChatFunctionCall.cs b/src/ChatGPT/Model/Json/Chat/ChatFunctionCall.cs new file mode 100644 index 00000000..cf31c5e2 --- /dev/null +++ b/src/ChatGPT/Model/Json/Chat/ChatFunctionCall.cs @@ -0,0 +1,12 @@ +using System.Runtime.Serialization; +using System.Text.Json.Serialization; + +namespace AI.Model.Json.Chat; + +[DataContract] +public class ChatFunctionCall +{ + [DataMember(Name = "name")] + [JsonPropertyName("name")] + public string? Name { get; set; } +} diff --git a/src/ChatGPT/Model/Json/Chat/ChatFunctionsJsonConverter.cs b/src/ChatGPT/Model/Json/Chat/ChatFunctionsJsonConverter.cs new file mode 100644 index 00000000..a3fa79d2 --- /dev/null +++ b/src/ChatGPT/Model/Json/Chat/ChatFunctionsJsonConverter.cs @@ -0,0 +1,26 @@ +using System; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace AI.Model.Json.Chat; + +internal class ChatFunctionsJsonConverter : JsonConverter +{ + public override object Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + throw new NotImplementedException(); + } + + public override void Write(Utf8JsonWriter writer, object? value, JsonSerializerOptions options) + { + if (value is null) + { + writer.WriteNullValue(); + } + else + { + var json = JsonSerializer.Serialize(value, new JsonSerializerOptions { WriteIndented = true }); + writer.WriteRawValue(json); + } + } +} diff --git a/src/ChatGPT/Model/Json/Chat/ChatJsonContext.cs b/src/ChatGPT/Model/Json/Chat/ChatJsonContext.cs index 6084b609..79f1218c 100644 --- a/src/ChatGPT/Model/Json/Chat/ChatJsonContext.cs +++ b/src/ChatGPT/Model/Json/Chat/ChatJsonContext.cs @@ -1,3 +1,4 @@ +using System.Collections.Generic; using System.Text.Json.Serialization; namespace AI.Model.Json.Chat; @@ -6,6 +7,9 @@ namespace AI.Model.Json.Chat; [JsonSerializable(typeof(ChatResponseSuccess))] [JsonSerializable(typeof(ChatChoice))] [JsonSerializable(typeof(ChatMessage))] +[JsonSerializable(typeof(ChatFunctionCall))] +[JsonSerializable(typeof(ChatMessageFunctionCall))] +[JsonSerializable(typeof(Dictionary))] [JsonSerializable(typeof(ChatUsage))] [JsonSerializable(typeof(ChatResponseError))] [JsonSerializable(typeof(ChatError))] diff --git a/src/ChatGPT/Model/Json/Chat/ChatMessage.cs b/src/ChatGPT/Model/Json/Chat/ChatMessage.cs index b988a5e5..634f38f4 100644 --- a/src/ChatGPT/Model/Json/Chat/ChatMessage.cs +++ b/src/ChatGPT/Model/Json/Chat/ChatMessage.cs @@ -3,14 +3,37 @@ namespace AI.Model.Json.Chat; +/// +/// Chat completion request message model. +/// [DataContract] public class ChatMessage { + /// + /// The role of the messages author. One of system, user, assistant, or function. + /// [DataMember(Name = "role")] [JsonPropertyName("role")] public string? Role { get; set; } + /// + /// The contents of the message. content is required for all messages, and may be null for assistant messages with function calls. + /// [DataMember(Name = "content")] [JsonPropertyName("content")] public string? Content { get; set; } + + /// + /// The name of the author of this message. name is required if role is function, and it should be the name of the function whose response is in the content. May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters. + /// + [DataMember(Name = "name")] + [JsonPropertyName("name")] + public string? Name { get; set; } + + /// + /// The name and arguments of a function that should be called, as generated by the model. + /// + [DataMember(Name = "function_call")] + [JsonPropertyName("function_call")] + public ChatMessageFunctionCall? FunctionCall { get; set; } } diff --git a/src/ChatGPT/Model/Json/Chat/ChatMessageFunctionCall.cs b/src/ChatGPT/Model/Json/Chat/ChatMessageFunctionCall.cs new file mode 100644 index 00000000..a90317ca --- /dev/null +++ b/src/ChatGPT/Model/Json/Chat/ChatMessageFunctionCall.cs @@ -0,0 +1,16 @@ +using System.Runtime.Serialization; +using System.Text.Json.Serialization; + +namespace AI.Model.Json.Chat; + +[DataContract] +public class ChatMessageFunctionCall +{ + [DataMember(Name = "name")] + [JsonPropertyName("name")] + public string? Name { get; set; } + + [DataMember(Name = "arguments")] + [JsonPropertyName("arguments")] + public string? Arguments { get; set; } +} diff --git a/src/ChatGPT/Model/Json/Chat/ChatRequestBody.cs b/src/ChatGPT/Model/Json/Chat/ChatRequestBody.cs index f14f391d..d837dce0 100644 --- a/src/ChatGPT/Model/Json/Chat/ChatRequestBody.cs +++ b/src/ChatGPT/Model/Json/Chat/ChatRequestBody.cs @@ -4,53 +4,122 @@ namespace AI.Model.Json.Chat; +/// +/// Chat completion request body model. +/// [DataContract] public class ChatRequestBody { + /// + /// ID of the model to use. See the model endpoint compatibility table for details on which models work with the Chat API. + /// + /// + /// https://platform.openai.com/docs/models/model-endpoint-compatibility + /// [DataMember(Name = "model")] [JsonPropertyName("model")] public string? Model { get; set; } + /// + /// A list of messages comprising the conversation so far. + /// [DataMember(Name = "messages")] [JsonPropertyName("messages")] public ChatMessage[]? Messages { get; set; } + /// + /// A list of functions the model may generate JSON inputs for. + /// + [DataMember(Name = "functions")] + [JsonPropertyName("functions")] + [JsonConverter(typeof(ChatFunctionsJsonConverter))] + public object? Functions { get; set; } + + /// + /// Controls how the model responds to function calls. "none" means the model does not call a function, and responds to the end-user. "auto" means the model can pick between an end-user or calling a function. Specifying a particular function via {"name":\ "my_function"} forces the model to call that function. "none" is the default when no functions are present. "auto" is the default if functions are present. + /// + [DataMember(Name = "function_call")] + [JsonPropertyName("function_call")] + public object? FunctionCall { get; set; } + + /// + /// What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. + /// + /// + /// We generally recommend altering this or top_p but not both. + /// [DataMember(Name = "temperature")] [JsonPropertyName("temperature")] public decimal Temperature { get; set; } = 1; + /// + /// An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. + /// + /// + /// We generally recommend altering this or temperature but not both. + /// [DataMember(Name = "top_p")] [JsonPropertyName("top_p")] public decimal TopP { get; set; } = 1; + /// + /// How many chat completion choices to generate for each input message. + /// [DataMember(Name = "n")] [JsonPropertyName("n")] public int N { get; set; } = 1; + /// + /// If set, partial message deltas will be sent, like in ChatGPT. Tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a data: [DONE] message. + /// [DataMember(Name = "stream")] [JsonPropertyName("stream")] public bool Stream { get; set; } + /// + /// Up to 4 sequences where the API will stop generating further tokens. + /// [DataMember(Name = "stop")] [JsonPropertyName("stop")] public string? Stop { get; set; } + /// + /// The maximum number of tokens to generate in the chat completion. + /// + /// + /// The total length of input tokens and generated tokens is limited by the model's context length. + /// [DataMember(Name = "max_tokens")] [JsonPropertyName("max_tokens")] public int MaxTokens { get; set; } = 16; + /// + /// Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics. + /// [DataMember(Name = "presence_penalty")] [JsonPropertyName("presence_penalty")] public decimal PresencePenalty { get; set; } + /// + /// Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. + /// [DataMember(Name = "frequency_penalty")] [JsonPropertyName("frequency_penalty")] public decimal FrequencyPenalty { get; set; } + /// + /// Modify the likelihood of specified tokens appearing in the completion. + /// + /// + /// Accepts a json object that maps tokens (specified by their token ID in the tokenizer) to an associated bias value from -100 to 100. Mathematically, the bias is added to the logits generated by the model prior to sampling. The exact effect will vary per model, but values between -1 and 1 should decrease or increase likelihood of selection; values like -100 or 100 should result in a ban or exclusive selection of the relevant token. + /// [DataMember(Name = "logit_bias")] [JsonPropertyName("logit_bias")] public Dictionary? LogitBias { get; set; } + /// + /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. + /// [DataMember(Name = "user")] [JsonPropertyName("user")] public string? User { get; set; } diff --git a/src/ChatGPT/Model/Services/ChatServiceSettings.cs b/src/ChatGPT/Model/Services/ChatServiceSettings.cs index cb767ce3..b4c6d620 100644 --- a/src/ChatGPT/Model/Services/ChatServiceSettings.cs +++ b/src/ChatGPT/Model/Services/ChatServiceSettings.cs @@ -7,6 +7,8 @@ public class ChatServiceSettings public string? ApiUrl { get; set; } public string? Model { get; set; } public ChatMessage[]? Messages { get; set; } + public object? Functions { get; set; } + public object? FunctionCall { get; set; } public string? Suffix { get; set; } public decimal Temperature { get; set; } public int MaxTokens { get; set; } @@ -14,4 +16,5 @@ public class ChatServiceSettings public decimal PresencePenalty { get; set; } public decimal FrequencyPenalty { get; set; } public string? Stop { get; set; } + public bool Debug { get; set; } } diff --git a/src/ChatGPT/Services/ChatService.cs b/src/ChatGPT/Services/ChatService.cs index 83759f6e..5146f3ee 100644 --- a/src/ChatGPT/Services/ChatService.cs +++ b/src/ChatGPT/Services/ChatService.cs @@ -38,6 +38,8 @@ private string GetRequestBodyJson(ChatServiceSettings settings) { Model = model, Messages = settings.Messages, + Functions = settings.Functions, + FunctionCall = settings.FunctionCall, MaxTokens = settings.MaxTokens, Temperature = settings.Temperature, TopP = settings.TopP, @@ -53,7 +55,7 @@ private string GetRequestBodyJson(ChatServiceSettings settings) return _serializer.Serialize(requestBody); } - private async Task SendApiRequestAsync(string apiUrl, string apiKey, string requestBodyJson, CancellationToken token) + private async Task SendApiRequestAsync(string apiUrl, string apiKey, string requestBodyJson, bool debug, CancellationToken token) { // Create a new HttpClient for making the API request @@ -66,6 +68,10 @@ private string GetRequestBodyJson(ChatServiceSettings settings) // Create a new StringContent object with the JSON payload and the correct content type var content = new StringContent(requestBodyJson, Encoding.UTF8, "application/json"); + if (debug) + { + Console.WriteLine($"RequestBody:{Environment.NewLine}{requestBodyJson}"); + } // Send the API request and get the response var response = await s_client.PostAsync(apiUrl, content, token); @@ -76,8 +82,12 @@ private string GetRequestBodyJson(ChatServiceSettings settings) #else var responseBody = await response.Content.ReadAsStringAsync(token); #endif - // Console.WriteLine($"Status code: {response.StatusCode}"); - // Console.WriteLine($"Response body:{Environment.NewLine}{responseBody}"); + if (debug) + { + Console.WriteLine($"Status code: {response.StatusCode}"); + Console.WriteLine($"Response body:{Environment.NewLine}{responseBody}"); + } + switch (response.StatusCode) { case HttpStatusCode.Unauthorized: @@ -130,7 +140,9 @@ private string GetRequestBodyJson(ChatServiceSettings settings) return null; } + var debug = settings.Debug; + // Send the API request and get the response data - return await SendApiRequestAsync(apiUrl, apiKey, requestBodyJson, token); + return await SendApiRequestAsync(apiUrl, apiKey, requestBodyJson, debug, token); } } diff --git a/src/ChatGPT/Services/SystemTextJsonChatSerializer.cs b/src/ChatGPT/Services/SystemTextJsonChatSerializer.cs index 0fc50379..7533790f 100644 --- a/src/ChatGPT/Services/SystemTextJsonChatSerializer.cs +++ b/src/ChatGPT/Services/SystemTextJsonChatSerializer.cs @@ -16,7 +16,8 @@ static SystemTextJsonChatSerializer() new JsonSerializerOptions { DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, - IgnoreReadOnlyProperties = true + IgnoreReadOnlyProperties = true, + WriteIndented = true }); }