fix: 统一处理 yi- 前缀模型并修正统计与计费记录
- 调用模型前去除 yi- 前缀,避免实际请求模型不匹配 - 存储消息、使用量统计及尊享套餐扣减统一使用原始模型ID - 尊享套餐常量新增 gpt-5.2、gemini-3 等模型 - 前端补充 Element Plus ElSubMenu 类型声明
This commit is contained in:
@@ -21,5 +21,11 @@ public class PremiumPackageConst
|
||||
"yi-claude-sonnet-4-5-20250929",
|
||||
"yi-claude-haiku-4-5-20251001",
|
||||
"yi-claude-opus-4-5-20251101",
|
||||
|
||||
"yi-gpt-5.2",
|
||||
"yi-gpt-5.2-codex",
|
||||
"yi-gemini-3-pro-high",
|
||||
"yi-gemini-3-pro",
|
||||
|
||||
];
|
||||
}
|
||||
@@ -126,6 +126,14 @@ public class AiGateWayManager : DomainService
|
||||
var modelDescribe = await GetModelAsync(ModelApiTypeEnum.OpenAi, request.Model);
|
||||
var chatService =
|
||||
LazyServiceProvider.GetRequiredKeyedService<IChatCompletionService>(modelDescribe.HandlerName);
|
||||
|
||||
var sourceModelId = request.Model;
|
||||
if (!string.IsNullOrEmpty(request.Model) &&
|
||||
request.Model.StartsWith("yi-", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
request.Model = request.Model[3..];
|
||||
}
|
||||
|
||||
var data = await chatService.CompleteChatAsync(modelDescribe, request, cancellationToken);
|
||||
data.SupplementalMultiplier(modelDescribe.Multiplier);
|
||||
if (userId is not null)
|
||||
@@ -134,7 +142,7 @@ public class AiGateWayManager : DomainService
|
||||
new MessageInputDto
|
||||
{
|
||||
Content = sessionId is null ? "不予存储" : request.Messages?.LastOrDefault().Content ?? string.Empty,
|
||||
ModelId = request.Model,
|
||||
ModelId = sourceModelId,
|
||||
TokenUsage = data.Usage,
|
||||
}, tokenId);
|
||||
|
||||
@@ -143,14 +151,14 @@ public class AiGateWayManager : DomainService
|
||||
{
|
||||
Content =
|
||||
sessionId is null ? "不予存储" : data.Choices?.FirstOrDefault()?.Delta.Content ?? string.Empty,
|
||||
ModelId = request.Model,
|
||||
ModelId = sourceModelId,
|
||||
TokenUsage = data.Usage
|
||||
}, tokenId);
|
||||
|
||||
await _usageStatisticsManager.SetUsageAsync(userId.Value, request.Model, data.Usage, tokenId);
|
||||
await _usageStatisticsManager.SetUsageAsync(userId.Value, sourceModelId, data.Usage, tokenId);
|
||||
|
||||
// 扣减尊享token包用量
|
||||
if (PremiumPackageConst.ModeIds.Contains(request.Model))
|
||||
if (PremiumPackageConst.ModeIds.Contains(sourceModelId))
|
||||
{
|
||||
var totalTokens = data.Usage?.TotalTokens ?? 0;
|
||||
if (totalTokens > 0)
|
||||
@@ -194,6 +202,13 @@ public class AiGateWayManager : DomainService
|
||||
var chatService =
|
||||
LazyServiceProvider.GetRequiredKeyedService<IChatCompletionService>(modelDescribe.HandlerName);
|
||||
|
||||
var sourceModelId = request.Model;
|
||||
if (!string.IsNullOrEmpty(request.Model) &&
|
||||
request.Model.StartsWith("yi-", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
request.Model = request.Model[3..];
|
||||
}
|
||||
|
||||
var completeChatResponse = chatService.CompleteChatStreamAsync(modelDescribe, request, cancellationToken);
|
||||
var tokenUsage = new ThorUsageResponse();
|
||||
|
||||
@@ -285,7 +300,7 @@ public class AiGateWayManager : DomainService
|
||||
new MessageInputDto
|
||||
{
|
||||
Content = sessionId is null ? "不予存储" : request.Messages?.LastOrDefault()?.MessagesStore ?? string.Empty,
|
||||
ModelId = request.Model,
|
||||
ModelId = sourceModelId,
|
||||
TokenUsage = tokenUsage,
|
||||
}, tokenId);
|
||||
|
||||
@@ -293,14 +308,14 @@ public class AiGateWayManager : DomainService
|
||||
new MessageInputDto
|
||||
{
|
||||
Content = sessionId is null ? "不予存储" : backupSystemContent.ToString(),
|
||||
ModelId = request.Model,
|
||||
ModelId = sourceModelId,
|
||||
TokenUsage = tokenUsage
|
||||
}, tokenId);
|
||||
|
||||
await _usageStatisticsManager.SetUsageAsync(userId, request.Model, tokenUsage, tokenId);
|
||||
await _usageStatisticsManager.SetUsageAsync(userId, sourceModelId, tokenUsage, tokenId);
|
||||
|
||||
// 扣减尊享token包用量
|
||||
if (userId is not null && PremiumPackageConst.ModeIds.Contains(request.Model))
|
||||
if (userId is not null && PremiumPackageConst.ModeIds.Contains(sourceModelId))
|
||||
{
|
||||
var totalTokens = tokenUsage.TotalTokens ?? 0;
|
||||
if (totalTokens > 0)
|
||||
@@ -677,6 +692,13 @@ public class AiGateWayManager : DomainService
|
||||
|
||||
var chatService =
|
||||
LazyServiceProvider.GetRequiredKeyedService<IOpenAiResponseService>(modelDescribe.HandlerName);
|
||||
var sourceModelId = request.Model;
|
||||
if (!string.IsNullOrEmpty(request.Model) &&
|
||||
request.Model.StartsWith("yi-", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
request.Model = request.Model[3..];
|
||||
}
|
||||
|
||||
var data = await chatService.ResponsesAsync(modelDescribe, request, cancellationToken);
|
||||
|
||||
data.SupplementalMultiplier(modelDescribe.Multiplier);
|
||||
@@ -693,7 +715,7 @@ public class AiGateWayManager : DomainService
|
||||
new MessageInputDto
|
||||
{
|
||||
Content = "不予存储",
|
||||
ModelId = request.Model,
|
||||
ModelId = sourceModelId,
|
||||
TokenUsage = tokenUsage,
|
||||
}, tokenId);
|
||||
|
||||
@@ -701,11 +723,11 @@ public class AiGateWayManager : DomainService
|
||||
new MessageInputDto
|
||||
{
|
||||
Content = "不予存储",
|
||||
ModelId = request.Model,
|
||||
ModelId = sourceModelId,
|
||||
TokenUsage = tokenUsage
|
||||
}, tokenId);
|
||||
|
||||
await _usageStatisticsManager.SetUsageAsync(userId.Value, request.Model, tokenUsage, tokenId);
|
||||
await _usageStatisticsManager.SetUsageAsync(userId.Value, sourceModelId, tokenUsage, tokenId);
|
||||
|
||||
// 扣减尊享token包用量
|
||||
var totalTokens = tokenUsage.TotalTokens ?? 0;
|
||||
@@ -746,7 +768,12 @@ public class AiGateWayManager : DomainService
|
||||
var modelDescribe = await GetModelAsync(ModelApiTypeEnum.Response, request.Model);
|
||||
var chatService =
|
||||
LazyServiceProvider.GetRequiredKeyedService<IOpenAiResponseService>(modelDescribe.HandlerName);
|
||||
|
||||
var sourceModelId = request.Model;
|
||||
if (!string.IsNullOrEmpty(request.Model) &&
|
||||
request.Model.StartsWith("yi-", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
request.Model = request.Model[3..];
|
||||
}
|
||||
var completeChatResponse = chatService.ResponsesStreamAsync(modelDescribe, request, cancellationToken);
|
||||
ThorUsageResponse? tokenUsage = null;
|
||||
try
|
||||
@@ -786,7 +813,7 @@ public class AiGateWayManager : DomainService
|
||||
new MessageInputDto
|
||||
{
|
||||
Content = "不予存储",
|
||||
ModelId = request.Model,
|
||||
ModelId = sourceModelId,
|
||||
TokenUsage = tokenUsage,
|
||||
}, tokenId);
|
||||
|
||||
@@ -794,11 +821,11 @@ public class AiGateWayManager : DomainService
|
||||
new MessageInputDto
|
||||
{
|
||||
Content = "不予存储",
|
||||
ModelId = request.Model,
|
||||
ModelId = sourceModelId,
|
||||
TokenUsage = tokenUsage
|
||||
}, tokenId);
|
||||
|
||||
await _usageStatisticsManager.SetUsageAsync(userId, request.Model, tokenUsage, tokenId);
|
||||
await _usageStatisticsManager.SetUsageAsync(userId, sourceModelId, tokenUsage, tokenId);
|
||||
|
||||
// 扣减尊享token包用量
|
||||
if (userId.HasValue && tokenUsage is not null)
|
||||
|
||||
Reference in New Issue
Block a user