feat: 完成ai接口
This commit is contained in:
@@ -0,0 +1,195 @@
|
||||
using System.Text;
|
||||
using Azure;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.SemanticKernel.ChatCompletion;
|
||||
using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
|
||||
using Newtonsoft.Json;
|
||||
using Newtonsoft.Json.Serialization;
|
||||
using Volo.Abp.Application.Services;
|
||||
using Yi.Framework.AiHub.Application.Contracts.Dtos;
|
||||
using Yi.Framework.SemanticKernel;
|
||||
|
||||
namespace Yi.Framework.AiHub.Application.Services;
|
||||
|
||||
public class AiService : ApplicationService
|
||||
{
|
||||
private readonly SemanticKernelClient _skClient;
|
||||
private IHttpContextAccessor httpContextAccessor;
|
||||
|
||||
public AiService(SemanticKernelClient skClient, IHttpContextAccessor httpContextAccessor)
|
||||
{
|
||||
_skClient = skClient;
|
||||
this.httpContextAccessor = httpContextAccessor;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 获取模型列表
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
public async Task<List<ModelGetListOutput>> GetModelAsync()
|
||||
{
|
||||
return new List<ModelGetListOutput>()
|
||||
{
|
||||
new ModelGetListOutput
|
||||
{
|
||||
Id = 001,
|
||||
Category = "chat",
|
||||
ModelName = "gpt-4.1-mini",
|
||||
ModelDescribe = "gpt下的ai",
|
||||
ModelPrice = 4,
|
||||
ModelType = "1",
|
||||
ModelShow = "0",
|
||||
SystemPrompt = "",
|
||||
ApiHost = "",
|
||||
ApiKey = "",
|
||||
Remark = "牛逼"
|
||||
},
|
||||
new ModelGetListOutput
|
||||
{
|
||||
Id = 002,
|
||||
Category = "chat",
|
||||
ModelName = "grok-3-mini",
|
||||
ModelDescribe = "马斯克的ai",
|
||||
ModelPrice = 5,
|
||||
ModelType = "1",
|
||||
ModelShow = "0",
|
||||
SystemPrompt = "",
|
||||
ApiHost = "",
|
||||
ApiKey = "",
|
||||
Remark = "牛逼啊"
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// 发送消息
|
||||
/// </summary>
|
||||
/// <param name="input"></param>
|
||||
public async Task PostSendAsync(SendMessageInput input)
|
||||
{
|
||||
var httpContext = this.httpContextAccessor.HttpContext;
|
||||
var response = httpContext.Response;
|
||||
// 设置响应头,声明是 SSE 流
|
||||
response.ContentType = "text/event-stream";
|
||||
response.Headers.Add("Cache-Control", "no-cache");
|
||||
response.Headers.Add("Connection", "keep-alive");
|
||||
|
||||
|
||||
var chatCompletionService = this._skClient.Kernel.GetRequiredService<IChatCompletionService>(input.Model);
|
||||
var history = new ChatHistory();
|
||||
var openSettings = new AzureOpenAIPromptExecutionSettings()
|
||||
{
|
||||
MaxTokens = 3000
|
||||
};
|
||||
foreach (var aiChatContextDto in input.Messages)
|
||||
{
|
||||
if (aiChatContextDto.Role == "ai")
|
||||
{
|
||||
history.AddAssistantMessage(aiChatContextDto.Content);
|
||||
}
|
||||
else if (aiChatContextDto.Role == "user")
|
||||
{
|
||||
history.AddUserMessage(aiChatContextDto.Content);
|
||||
}
|
||||
}
|
||||
|
||||
var results = chatCompletionService.GetStreamingChatMessageContentsAsync(
|
||||
chatHistory: history,
|
||||
executionSettings: openSettings,
|
||||
kernel: _skClient.Kernel);
|
||||
|
||||
|
||||
await using var writer = new StreamWriter(response.Body, Encoding.UTF8, leaveOpen: true);
|
||||
await foreach (var result in results)
|
||||
{
|
||||
var modle = GetMessage(input.Model, result.Content);
|
||||
var message = JsonConvert.SerializeObject(modle, new JsonSerializerSettings
|
||||
{
|
||||
ContractResolver = new CamelCasePropertyNamesContractResolver()
|
||||
});
|
||||
|
||||
await writer.WriteLineAsync($"data: {message}\n");
|
||||
await writer.FlushAsync(); // 确保立即推送数据
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private SendMessageOutputDto GetMessage(string modelId, string content)
|
||||
{
|
||||
var output = new SendMessageOutputDto
|
||||
{
|
||||
Id = 001,
|
||||
Object = "chat.completion.chunk",
|
||||
Created = 1750336171,
|
||||
Model = modelId,
|
||||
Choices = new()
|
||||
{
|
||||
new Choice
|
||||
{
|
||||
Index = 0,
|
||||
Delta = new Delta
|
||||
{
|
||||
Content = content,
|
||||
Role = "assistant"
|
||||
},
|
||||
FinishReason = null,
|
||||
ContentFilterResults = new()
|
||||
{
|
||||
Hate = new()
|
||||
{
|
||||
Filtered = false,
|
||||
Detected = null
|
||||
},
|
||||
SelfHarm = new()
|
||||
{
|
||||
Filtered = false,
|
||||
Detected = null
|
||||
},
|
||||
Sexual = new()
|
||||
{
|
||||
Filtered = false,
|
||||
Detected = null
|
||||
},
|
||||
Violence = new()
|
||||
{
|
||||
Filtered = false,
|
||||
Detected = null
|
||||
},
|
||||
Jailbreak = new()
|
||||
{
|
||||
Filtered = false,
|
||||
Detected = false
|
||||
},
|
||||
Profanity = new()
|
||||
{
|
||||
Filtered = false,
|
||||
Detected = false
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
SystemFingerprint = "",
|
||||
Usage = new Usage
|
||||
{
|
||||
PromptTokens = 75,
|
||||
CompletionTokens = 25,
|
||||
TotalTokens = 100,
|
||||
PromptTokensDetails = new()
|
||||
{
|
||||
AudioTokens = 0,
|
||||
CachedTokens = 0
|
||||
},
|
||||
CompletionTokensDetails = new()
|
||||
{
|
||||
AudioTokens = 0,
|
||||
ReasoningTokens = 0,
|
||||
AcceptedPredictionTokens = 0,
|
||||
RejectedPredictionTokens = 0
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return output;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user