using System.Text; using Microsoft.AspNetCore.Http; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Options; using Newtonsoft.Json; using Newtonsoft.Json.Serialization; using OpenAI.Chat; using Volo.Abp.Application.Services; using Volo.Abp.Users; using Yi.Framework.AiHub.Application.Contracts.Dtos; using Yi.Framework.AiHub.Application.Contracts.Options; using Yi.Framework.AiHub.Domain.Managers; namespace Yi.Framework.AiHub.Application.Services; /// /// ai服务 /// public class AiChatService : ApplicationService { private readonly AiGateWayOptions _options; private readonly IHttpContextAccessor _httpContextAccessor; private readonly AiMessageManager _aiMessageManager; public AiChatService(IOptions options, IHttpContextAccessor httpContextAccessor, AiMessageManager aiMessageManager) { _options = options.Value; this._httpContextAccessor = httpContextAccessor; _aiMessageManager = aiMessageManager; } /// /// 获取模型列表 /// /// public async Task> GetModelAsync() { var output = _options.Chats.SelectMany(x => x.Value.ModelIds) .Select(x => new ModelGetListOutput() { Id = 001, Category = "chat", ModelName = x, ModelDescribe = "这是一个直连模型", ModelPrice = 4, ModelType = "1", ModelShow = "0", Remark = "直连模型" }).ToList(); return output; } /// /// 发送消息 /// /// /// public async Task PostSendAsync(SendMessageInput input, CancellationToken cancellationToken) { var httpContext = this._httpContextAccessor.HttpContext; var response = httpContext.Response; // 设置响应头,声明是 SSE 流 response.ContentType = "text/event-stream"; response.Headers.Append("Cache-Control", "no-cache"); response.Headers.Append("Connection", "keep-alive"); var history = new List(); foreach (var aiChatContextDto in input.Messages) { if (aiChatContextDto.Role == "ai") { history.Add(ChatMessage.CreateAssistantMessage(aiChatContextDto.Content)); } else if (aiChatContextDto.Role == "user") { history.Add(ChatMessage.CreateUserMessage(aiChatContextDto.Content)); } } var gateWay = LazyServiceProvider.GetRequiredService(); var completeChatResponse = gateWay.CompleteChatAsync(input.Model, history, cancellationToken); await using var writer = new StreamWriter(response.Body, Encoding.UTF8, leaveOpen: true); await foreach (var data in completeChatResponse) { var model = MapToMessage(input.Model, data); var message = JsonConvert.SerializeObject(model, new JsonSerializerSettings { ContractResolver = new CamelCasePropertyNamesContractResolver() }); await writer.WriteLineAsync($"data: {message}\n"); await writer.FlushAsync(cancellationToken); // 确保立即推送数据 } //断开连接 await writer.WriteLineAsync($"data: done\n"); await writer.FlushAsync(cancellationToken); // 确保立即推送数据 if (CurrentUser.IsAuthenticated && input.SessionId.HasValue) { // 等待接入token // await _aiMessageManager.CreateMessageAsync(CurrentUser.GetId(), input.SessionId.Value, new MessageInputDto // { // Content = null, // Role = null, // DeductCost = 0, // TotalTokens = 0, // ModelId = null, // Remark = null // }); } } private SendMessageOutputDto MapToMessage(string modelId, string content) { var output = new SendMessageOutputDto { Id = 001, Object = "chat.completion.chunk", Created = 1750336171, Model = modelId, Choices = new() { new Choice { Index = 0, Delta = new Delta { Content = content, Role = "assistant" }, FinishReason = null, ContentFilterResults = new() { Hate = new() { Filtered = false, Detected = null }, SelfHarm = new() { Filtered = false, Detected = null }, Sexual = new() { Filtered = false, Detected = null }, Violence = new() { Filtered = false, Detected = null }, Jailbreak = new() { Filtered = false, Detected = false }, Profanity = new() { Filtered = false, Detected = false }, } } }, SystemFingerprint = "", Usage = new Usage { PromptTokens = 75, CompletionTokens = 25, TotalTokens = 100, PromptTokensDetails = new() { AudioTokens = 0, CachedTokens = 0 }, CompletionTokensDetails = new() { AudioTokens = 0, ReasoningTokens = 0, AcceptedPredictionTokens = 0, RejectedPredictionTokens = 0 } } }; return output; } }