feat:完成ai网关搭建

This commit is contained in:
ccnetcore
2025-06-21 01:08:14 +08:00
parent 6abcc49ed4
commit 3b74dfd49a
10 changed files with 236 additions and 173 deletions

View File

@@ -1,19 +1,28 @@
using Microsoft.AspNetCore.Http;
using System.Text;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
using Newtonsoft.Json;
using Newtonsoft.Json.Serialization;
using OpenAI.Chat;
using Volo.Abp.Application.Services;
using Yi.Framework.AiHub.Application.Contracts.Dtos;
using Yi.Framework.AiHub.Application.Contracts.Options;
using Yi.Framework.AiHub.Domain.Managers;
namespace Yi.Framework.AiHub.Application.Services;
public class AiService : ApplicationService
{
// private readonly SemanticKernelClient _skClient;
private IHttpContextAccessor httpContextAccessor;
private readonly AiGateWayOptions _options;
private readonly IHttpContextAccessor _httpContextAccessor;
public AiService(IOptions<AiGateWayOptions> options, IHttpContextAccessor httpContextAccessor)
{
_options = options.Value;
this._httpContextAccessor = httpContextAccessor;
}
// public AiService(SemanticKernelClient skClient, IHttpContextAccessor httpContextAccessor)
// {
// _skClient = skClient;
// this.httpContextAccessor = httpContextAccessor;
// }
/// <summary>
/// 获取模型列表
@@ -21,96 +30,68 @@ public class AiService : ApplicationService
/// <returns></returns>
public async Task<List<ModelGetListOutput>> GetModelAsync()
{
return new List<ModelGetListOutput>()
{
new ModelGetListOutput
var output = _options.Chats.SelectMany(x => x.Value.ModelIds)
.Select(x => new ModelGetListOutput()
{
Id = 001,
Category = "chat",
ModelName = "gpt-4.1-mini",
ModelDescribe = "gpt下的ai",
ModelName = x,
ModelDescribe = "这是一个直连模型",
ModelPrice = 4,
ModelType = "1",
ModelShow = "0",
SystemPrompt = "",
ApiHost = "",
ApiKey = "",
Remark = "牛逼"
},
new ModelGetListOutput
{
Id = 002,
Category = "chat",
ModelName = "grok-3-mini",
ModelDescribe = "马斯克的ai",
ModelPrice = 5,
ModelType = "1",
ModelShow = "0",
SystemPrompt = "",
ApiHost = "",
ApiKey = "",
Remark = "牛逼啊"
}
};
Remark = "直连模型"
}).ToList();
return output;
}
// /// <summary>
// /// 发送消息
// /// </summary>
// /// <param name="input"></param>
// /// <param name="cancelToken"></param>
// public async Task PostSendAsync(SendMessageInput input,CancellationToken cancelToken)
// {
// var httpContext = this.httpContextAccessor.HttpContext;
// var response = httpContext.Response;
// // 设置响应头,声明是 SSE 流
// response.ContentType = "text/event-stream";
// response.Headers.Append("Cache-Control", "no-cache");
// response.Headers.Append("Connection", "keep-alive");
//
//
// var chatCompletionService = this._skClient.Kernel.GetRequiredService<IChatCompletionService>(input.Model);
// var history = new ChatHistory();
// var openSettings = new AzureOpenAIPromptExecutionSettings()
// {
// MaxTokens = 3000
// };
// foreach (var aiChatContextDto in input.Messages)
// {
// if (aiChatContextDto.Role == "ai")
// {
// history.AddAssistantMessage(aiChatContextDto.Content);
// }
// else if (aiChatContextDto.Role == "user")
// {
// history.AddUserMessage(aiChatContextDto.Content);
// }
// }
//
// var results = chatCompletionService.GetStreamingChatMessageContentsAsync(
// chatHistory: history,
// executionSettings: openSettings,
// kernel: _skClient.Kernel,
// cancelToken);
//
//
// await using var writer = new StreamWriter(response.Body, Encoding.UTF8, leaveOpen: true);
// await foreach (var result in results)
// {
// var modle = GetMessage(input.Model, result.Content);
// var message = JsonConvert.SerializeObject(modle, new JsonSerializerSettings
// {
// ContractResolver = new CamelCasePropertyNamesContractResolver()
// });
//
// await writer.WriteLineAsync($"data: {message}\n");
// await writer.FlushAsync(cancelToken); // 确保立即推送数据
// }
// }
/// <summary>
/// 发送消息
/// </summary>
/// <param name="input"></param>
/// <param name="cancelToken"></param>
public async Task PostSendAsync(SendMessageInput input, CancellationToken cancelToken)
{
var httpContext = this._httpContextAccessor.HttpContext;
var response = httpContext.Response;
// 设置响应头,声明是 SSE 流
response.ContentType = "text/event-stream";
response.Headers.Append("Cache-Control", "no-cache");
response.Headers.Append("Connection", "keep-alive");
private SendMessageOutputDto GetMessage(string modelId, string content)
var history = new List<ChatMessage>();
foreach (var aiChatContextDto in input.Messages)
{
if (aiChatContextDto.Role == "ai")
{
history.Add(ChatMessage.CreateAssistantMessage(aiChatContextDto.Content));
}
else if (aiChatContextDto.Role == "user")
{
history.Add(ChatMessage.CreateUserMessage(aiChatContextDto.Content));
}
}
var gateWay = LazyServiceProvider.GetRequiredService<AiGateWayManager>();
var completeChatResponse = gateWay.CompleteChatAsync(input.Model, history);
await using var writer = new StreamWriter(response.Body, Encoding.UTF8, leaveOpen: true);
await foreach (var data in completeChatResponse)
{
var model = MapToMessage(input.Model, data);
var message = JsonConvert.SerializeObject(model, new JsonSerializerSettings
{
ContractResolver = new CamelCasePropertyNamesContractResolver()
});
await writer.WriteLineAsync($"data: {message}\n");
await writer.FlushAsync(cancelToken); // 确保立即推送数据
}
}
private SendMessageOutputDto MapToMessage(string modelId, string content)
{
var output = new SendMessageOutputDto
{