Skip to content

Commit

Permalink
feat: refactor code.add ChatGPT text message support. add a generic L…
Browse files Browse the repository at this point in the history
…LMService class.
  • Loading branch information
arthuridea committed Dec 21, 2023
1 parent 99b64e5 commit 8472bef
Show file tree
Hide file tree
Showing 50 changed files with 1,788 additions and 273 deletions.
2 changes: 1 addition & 1 deletion Directory.Build.props
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
<Project>

<PropertyGroup>
<Version>0.0.1.4-preview</Version>
<Version>0.0.2.0-preview</Version>
<Authors>arthuridea</Authors>
<LangVersion>latest</LangVersion>
<Description>A dotnet Library for Baidu ernie-bot and ErnievilgV2</Description>
Expand Down
7 changes: 7 additions & 0 deletions NetCore.BaiduAIGC.sln
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LLMService.Baidu.ErnieVilg"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "03.Docs", "03.Docs", "{7E5E6C28-ABB2-4564-AF1B-D9FCC81B8353}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LLMService.OpenAI.ChatGPT", "src\LLMService.OpenAI.ChatGPT\LLMService.OpenAI.ChatGPT.csproj", "{09322116-1405-47CE-BBB5-35E02167905D}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Expand All @@ -46,6 +48,10 @@ Global
{E3AE89E4-31A0-4EDA-9154-2D089E1FD8A4}.Debug|Any CPU.Build.0 = Debug|Any CPU
{E3AE89E4-31A0-4EDA-9154-2D089E1FD8A4}.Release|Any CPU.ActiveCfg = Release|Any CPU
{E3AE89E4-31A0-4EDA-9154-2D089E1FD8A4}.Release|Any CPU.Build.0 = Release|Any CPU
{09322116-1405-47CE-BBB5-35E02167905D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{09322116-1405-47CE-BBB5-35E02167905D}.Debug|Any CPU.Build.0 = Debug|Any CPU
{09322116-1405-47CE-BBB5-35E02167905D}.Release|Any CPU.ActiveCfg = Release|Any CPU
{09322116-1405-47CE-BBB5-35E02167905D}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
Expand All @@ -55,6 +61,7 @@ Global
{DC8A11BE-3268-4F1E-B83F-27958041ABF1} = {01705FE5-59E6-4B65-B545-5DFD3AFB1F16}
{E73CB18F-26D3-4A6C-8AA2-E89FF17B65C5} = {01705FE5-59E6-4B65-B545-5DFD3AFB1F16}
{E3AE89E4-31A0-4EDA-9154-2D089E1FD8A4} = {01705FE5-59E6-4B65-B545-5DFD3AFB1F16}
{09322116-1405-47CE-BBB5-35E02167905D} = {01705FE5-59E6-4B65-B545-5DFD3AFB1F16}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {4FD3BE39-36D7-4A01-8EEC-39063ED813B4}
Expand Down
4 changes: 2 additions & 2 deletions src/LLMService.Baidu.ErnieVilg/BaiduErnieVilgApiService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ public async Task<PaintResultResponse> Text2Image(PaintApplyRequest request)

var _client = GetClient();

string paintApiEndpoint = BaiduApiDefaults.ErnieVilgV2ApiEndpoint;
string paintApiEndpoint = LLMApiDefaults.ErnieVilgV2ApiEndpoint;
var response = await _client.PostAsJsonAsync(paintApiEndpoint, request);
response.EnsureSuccessStatusCode();

Expand All @@ -91,7 +91,7 @@ public async Task<PaintResultResponse> Text2Image(PaintApplyRequest request)
private async Task<PaintResultResponse> challengePaintResult(HttpClient client, string id, int initDelayInSeconds = 3000)
{
PaintResultResponse result = new();
string paintResultApiEndpoint = BaiduApiDefaults.ErnieVilgV2ResultApiEndpoint;
string paintResultApiEndpoint = LLMApiDefaults.ErnieVilgV2ResultApiEndpoint;
bool taskFinished = false;
int retrys = 0;
Thread.Sleep(initDelayInSeconds);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ public static class ErnieVilgDependencyInjection
public static IServiceCollection AddErnieVilg(this IServiceCollection services, IConfiguration configuration, string ernieVilgConfigKey = "BaiduErnieVilgSettings")
{
// 百度配置文件
var ernieVilgSettings = configuration.GetSection(ernieVilgConfigKey).Get<ClientCredentials>();
var ernieVilgSettings = configuration.GetSection(ernieVilgConfigKey).Get<OAuth2BackendServiceConfig>();

// 智能绘画客户端
services.AddHttpClient(LLMServiceConsts.BaiduErnieVilgApiClientName, client =>
Expand Down
227 changes: 61 additions & 166 deletions src/LLMService.Baidu.Wenxinworkshop/BaiduWenxinApiService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -6,203 +6,98 @@
using Microsoft.Extensions.Logging;
using System.Net.Http.Json;
using System.Text.Json;
using LLMService.Shared.ChatService;
using LLMService.Shared.Authentication.Models;
using Microsoft.Extensions.Options;
using Microsoft.VisualBasic;

namespace LLMService.Baidu.Wenxinworkshop
{
/// <summary>
/// 百度文心api服务
/// </summary>
public class BaiduWenxinApiService : IAIChatApiService<ChatRequest, ChatApiResponse>
public interface IBaiduErniebotLLMService
{
const string _api_client_key = LLMServiceConsts.BaiduWenxinApiClientName;
/// <summary>
/// The HTTP client factory
/// Chats the specified request.
/// </summary>
private readonly IHttpClientFactory _httpClientFactory;
/// <summary>
/// The chat data provider
/// </summary>
private readonly IChatDataProvider<BaiduWenxinMessage> _chatDataProvider;

/// <summary>
/// The HTTP context
/// </summary>
private readonly IHttpContextAccessor _context;
/// <param name="request">The request.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns></returns>
Task Chat(ChatRequest request, CancellationToken cancellationToken = default);
}

///// <summary>
///// The serve sent event handler
///// </summary>
//private readonly IServeSentEventHandler _serveSentEventHandler;
/// <summary>
/// The logger
/// </summary>
private readonly ILogger _logger;
/// <summary>
///
/// </summary>
/// <seealso cref="ChatServiceBase{TRequestDto, TResponseDto, TBackendRequestDto, TBackendResponseDto, TChatMessage, TMessageContent, TChatServiceOption}" />
/// <seealso cref="IBaiduErniebotLLMService" />
public class BaiduErniebotLLMService :
ChatServiceBase<ChatRequest, ChatApiResponse,
BaiduApiChatRequest, BaiduWenxinChatResponse,
ChatMessageBase, string,
OAuth2BackendServiceConfig>, IBaiduErniebotLLMService, IAIChatApiService<ChatRequest, ChatApiResponse>
{
/// <summary>
/// Initializes a new instance of the <see cref="BaiduWenxinApiService"/> class.
/// Initializes a new instance of the <see cref="BaiduErniebotLLMService"/> class.
/// </summary>
/// <param name="factory">The factory.</param>
/// <param name="context"></param>
/// <param name="chatDataProvider">数据持久化服务接口,此处实现了一个简单的Memorycache记录当前会话</param>
/// <param name="context">The context.</param>
/// <param name="chatDataProvider">The chat data provider.</param>
/// <param name="chatOption">The chat option.</param>
/// <param name="logger">The logger.</param>
public BaiduWenxinApiService(
public BaiduErniebotLLMService(
IHttpClientFactory factory,
IHttpContextAccessor context,
IChatDataProvider<BaiduWenxinMessage> chatDataProvider,
ILogger<BaiduWenxinApiService> logger)
{
_httpClientFactory = factory;
_context = context;
_chatDataProvider = chatDataProvider;
_logger = logger;
}
IChatDataProvider<ChatMessageBase, string> chatDataProvider,
IOptionsSnapshot<OAuth2BackendServiceConfig> chatOption,
ILogger<BaiduErniebotLLMService> logger)
: base(factory, context, chatDataProvider, chatOption, logger) { }


/// <summary>
/// Gets the client.
/// Creates the API message.
/// </summary>
/// <param name="content">The message content.</param>
/// <param name="type">The type.</param>
/// <param name="role">The role.</param>
/// <returns></returns>
private HttpClient GetClient()
protected override string CreateMessageContent(string content, string type = "text", string role = "user")
{
var client = _httpClientFactory.CreateClient(_api_client_key);
_logger.LogDebug($"[API CLIENT]{_api_client_key} -> {client.BaseAddress}");
return client;
return content;
}

/// <summary>
/// 发起对话
/// LLMs the request mapping.
/// </summary>
/// <param name="request">请求实体</param>
/// <param name="cancellationToken"></param>
/// <param name="source">The source.</param>
/// <returns></returns>
/// <seealso cref="ChatRequest" />
/// <example>
/// {
/// "temperature": 0.95,
/// "top_p": 0.8,
/// "penalty_score": 1.5,
/// "stream": true,
/// "message": "我是一位擅长儿童发展心理、行为分析的资深教育实践家,我具有丰富的教学经验,具备儿童发展心理学、教育学的丰富知识,并且具有丰富的撰写儿童观察记录报告的经验, 今天在益智区,观察了一会孩子们的活动,洋洋提出问题天上的云为什么不掉下来?,孩子的年龄是3-4岁, 请以发展的眼光观察评价儿童、依据提供的行为数据进行客观分析、加入反思或发展建议, 请为我生成针对于以上的建议观察要点,行为分析,教育建议,家园指导建议,观察记录报告",
/// "model": 2,
/// "user_id": "7ffe3194-2bf0-48ba-8dbd-e888d7d556d3"
/// }
/// </example>
public async Task Chat(ChatRequest request, CancellationToken cancellationToken = default)
protected override BaiduApiChatRequest LLMRequestMapping(ChatRequest source)
{
var response = _context.HttpContext.Response;
#region 初始化会话内容,读取历史记录并加入当前会话中
if (string.IsNullOrEmpty(request.ConversationId))
{
request.ConversationId = Guid.NewGuid().ToString();
}

var conversation = await _chatDataProvider.GetConversationHistory(request.ConversationId);

await _chatDataProvider.AddChatMessage(conversation, request.Message, "user");

#if DEBUG
_logger.LogDebug(@$"【CALL {request.ModelSchema}{JsonSerializer.Serialize(conversation, new JsonSerializerOptions
{
Encoder = System.Text.Encodings.Web.JavaScriptEncoder.UnsafeRelaxedJsonEscaping
})}");
#endif
#endregion

#region 准备HttpClient和请求对象实体
var _client = GetClient();
string chatApiEndpoint = BaiduApiDefaults.LLM_Models[request.ModelSchema];
var postdata = new BaiduApiChatRequest
return new BaiduApiChatRequest
{
Messages = conversation,
Temperature = request.Temperature,
TopP = request.TopP,
PenaltyScore = request.PenaltyScore,
Stream = request.Stream,
UserId = request.UserId,
Temperature = source.Temperature,
TopP = source.TopP,
PenaltyScore = source.PenaltyScore,
Stream = source.Stream,
UserId = source.UserId,
};
#endregion

#region 设置响应头

#endregion

if (!request.Stream) // 非SSE推流格式,正常读取返回json
{
#region 非流式请求
ChatApiResponse result = new();
var apiResponse = await _client.PostAsJsonAsync(chatApiEndpoint, postdata, cancellationToken: cancellationToken);
apiResponse.EnsureSuccessStatusCode();

result = await apiResponse.DeserializeAsync<ChatApiResponse>(logger: _logger);
result.ConversationId = request.ConversationId;
result.ModelSchema = request.ModelSchema;

//只有流模式会返回是否结束标识,在非流式请求中直接设置为true.
result.IsEnd = !request.Stream || result.IsEnd;

if (!result.NeedClearHistory)
{
await _chatDataProvider.AddChatMessage(conversation, result.Result, "assistant");
await _chatDataProvider.SaveChat(request.ConversationId, conversation);
}
else
{
_chatDataProvider.ResetSession(request.ConversationId);
}
response.BuildAIGeneratedResponseFeature();
await response.WriteAsJsonAsync(result, cancellationToken: cancellationToken);

#endregion
}

}
else // SSE推流格式
/// <summary>
/// Gets the ai message.
/// </summary>
/// <param name="response">The response.</param>
/// <returns></returns>
protected override ChatMessageBase GetAIMessage(BaiduWenxinChatResponse response)
{
var data = response.Result;
return new ChatMessageBase
{
#region 返回SSE推流
var content = JsonContent.Create(postdata);

// HttpClient需要设置 HttpCompletionOption.ResponseHeadersRead 来保证响应头结束就返回,否则依然会等到所有内容都响应结束才统一返回,就没意义了.
// 因此要调用SendAsync方法保证可以设置更多的参数

// 准备请求报文内容
var requestHttpMessage = new HttpRequestMessage
{
Method = HttpMethod.Post,
RequestUri = new Uri($"{_client.BaseAddress}{chatApiEndpoint[1..]}"),
Content = content
};
var apiResponse = await _client.SendAsync(requestHttpMessage, HttpCompletionOption.ResponseHeadersRead, cancellationToken);
apiResponse.EnsureSuccessStatusCode();

// 设置响应头
// 设置响应头:返回SSE格式
response.BuildAIGeneratedResponseFeature(true);
// 在读取SSE推流前就开启输出!
await response.Body.FlushAsync(cancellationToken);

using var stream = await apiResponse.Content.ReadAsStreamAsync(cancellationToken);
using var reader = new StreamReader(stream);
string sseSection = string.Empty;
bool isEnd = false;
// SSE 输出格式是一行一条信息 每行格式为 data: {json} \n
// 因此对SSE流做行循环,间隔100ms推送到http response
while (!isEnd)
{
sseSection = await reader.ReadLineAsync();
if (!string.IsNullOrEmpty(sseSection))
{
await response.WriteAsync($"{sseSection} \n", cancellationToken: cancellationToken);
await response.WriteAsync("\n", cancellationToken: cancellationToken);
await response.Body.FlushAsync(cancellationToken);
if (sseSection.Contains("\"is_end\":true"))
{
isEnd = true;
break;
}
await Task.Delay(100, cancellationToken);
}
}
#endregion

}

Content = data,
Role = "assistant"
};
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ public static class BaiduWenxinworkshopDependencyInjection
public static IServiceCollection AddWenxinworkshop(this IServiceCollection services, IConfiguration configuration, string wenxinworkshopConfigKey = "BaiduWenxinSettings")
{
// 百度配置文件
var wenxinSettings = configuration.GetSection(wenxinworkshopConfigKey).Get<ClientCredentials>();
var wenxinSettings = configuration.GetSection(wenxinworkshopConfigKey).Get<OAuth2BackendServiceConfig>();

// 文心大模型客户端

Expand All @@ -39,7 +39,9 @@ public static IServiceCollection AddWenxinworkshop(this IServiceCollection servi
LLMServiceConsts.BaiduWenxinApiAuthority);


services.AddTransient<IAIChatApiService<ChatRequest, ChatApiResponse>, BaiduWenxinApiService>();
//services.AddTransient<IAIChatApiService<ChatRequest, ChatApiResponse>, BaiduWenxinApiService>();
//services.AddTransient<IBaiduWenxinApiService, BaiduWenxinApiService>();
services.AddTransient<IBaiduErniebotLLMService, BaiduErniebotLLMService>();

return services;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,4 +23,8 @@
<ProjectReference Include="..\LLMService.Shared\LLMService.Shared.csproj" />
</ItemGroup>

<ItemGroup>
<Folder Include="Model\" />
</ItemGroup>

</Project>
Loading

0 comments on commit 8472bef

Please sign in to comment.