fix: 统一处理 yi- 前缀模型并修正统计与计费记录
- 调用模型前去除 yi- 前缀,避免实际请求模型不匹配 - 存储消息、使用量统计及尊享套餐扣减统一使用原始模型ID - 尊享套餐常量新增 gpt-5.2、gemini-3 等模型 - 前端补充 Element Plus ElSubMenu 类型声明
This commit is contained in:
@@ -21,5 +21,11 @@ public class PremiumPackageConst
|
|||||||
"yi-claude-sonnet-4-5-20250929",
|
"yi-claude-sonnet-4-5-20250929",
|
||||||
"yi-claude-haiku-4-5-20251001",
|
"yi-claude-haiku-4-5-20251001",
|
||||||
"yi-claude-opus-4-5-20251101",
|
"yi-claude-opus-4-5-20251101",
|
||||||
|
|
||||||
|
"yi-gpt-5.2",
|
||||||
|
"yi-gpt-5.2-codex",
|
||||||
|
"yi-gemini-3-pro-high",
|
||||||
|
"yi-gemini-3-pro",
|
||||||
|
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
@@ -126,6 +126,14 @@ public class AiGateWayManager : DomainService
|
|||||||
var modelDescribe = await GetModelAsync(ModelApiTypeEnum.OpenAi, request.Model);
|
var modelDescribe = await GetModelAsync(ModelApiTypeEnum.OpenAi, request.Model);
|
||||||
var chatService =
|
var chatService =
|
||||||
LazyServiceProvider.GetRequiredKeyedService<IChatCompletionService>(modelDescribe.HandlerName);
|
LazyServiceProvider.GetRequiredKeyedService<IChatCompletionService>(modelDescribe.HandlerName);
|
||||||
|
|
||||||
|
var sourceModelId = request.Model;
|
||||||
|
if (!string.IsNullOrEmpty(request.Model) &&
|
||||||
|
request.Model.StartsWith("yi-", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
request.Model = request.Model[3..];
|
||||||
|
}
|
||||||
|
|
||||||
var data = await chatService.CompleteChatAsync(modelDescribe, request, cancellationToken);
|
var data = await chatService.CompleteChatAsync(modelDescribe, request, cancellationToken);
|
||||||
data.SupplementalMultiplier(modelDescribe.Multiplier);
|
data.SupplementalMultiplier(modelDescribe.Multiplier);
|
||||||
if (userId is not null)
|
if (userId is not null)
|
||||||
@@ -134,7 +142,7 @@ public class AiGateWayManager : DomainService
|
|||||||
new MessageInputDto
|
new MessageInputDto
|
||||||
{
|
{
|
||||||
Content = sessionId is null ? "不予存储" : request.Messages?.LastOrDefault().Content ?? string.Empty,
|
Content = sessionId is null ? "不予存储" : request.Messages?.LastOrDefault().Content ?? string.Empty,
|
||||||
ModelId = request.Model,
|
ModelId = sourceModelId,
|
||||||
TokenUsage = data.Usage,
|
TokenUsage = data.Usage,
|
||||||
}, tokenId);
|
}, tokenId);
|
||||||
|
|
||||||
@@ -143,14 +151,14 @@ public class AiGateWayManager : DomainService
|
|||||||
{
|
{
|
||||||
Content =
|
Content =
|
||||||
sessionId is null ? "不予存储" : data.Choices?.FirstOrDefault()?.Delta.Content ?? string.Empty,
|
sessionId is null ? "不予存储" : data.Choices?.FirstOrDefault()?.Delta.Content ?? string.Empty,
|
||||||
ModelId = request.Model,
|
ModelId = sourceModelId,
|
||||||
TokenUsage = data.Usage
|
TokenUsage = data.Usage
|
||||||
}, tokenId);
|
}, tokenId);
|
||||||
|
|
||||||
await _usageStatisticsManager.SetUsageAsync(userId.Value, request.Model, data.Usage, tokenId);
|
await _usageStatisticsManager.SetUsageAsync(userId.Value, sourceModelId, data.Usage, tokenId);
|
||||||
|
|
||||||
// 扣减尊享token包用量
|
// 扣减尊享token包用量
|
||||||
if (PremiumPackageConst.ModeIds.Contains(request.Model))
|
if (PremiumPackageConst.ModeIds.Contains(sourceModelId))
|
||||||
{
|
{
|
||||||
var totalTokens = data.Usage?.TotalTokens ?? 0;
|
var totalTokens = data.Usage?.TotalTokens ?? 0;
|
||||||
if (totalTokens > 0)
|
if (totalTokens > 0)
|
||||||
@@ -194,6 +202,13 @@ public class AiGateWayManager : DomainService
|
|||||||
var chatService =
|
var chatService =
|
||||||
LazyServiceProvider.GetRequiredKeyedService<IChatCompletionService>(modelDescribe.HandlerName);
|
LazyServiceProvider.GetRequiredKeyedService<IChatCompletionService>(modelDescribe.HandlerName);
|
||||||
|
|
||||||
|
var sourceModelId = request.Model;
|
||||||
|
if (!string.IsNullOrEmpty(request.Model) &&
|
||||||
|
request.Model.StartsWith("yi-", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
request.Model = request.Model[3..];
|
||||||
|
}
|
||||||
|
|
||||||
var completeChatResponse = chatService.CompleteChatStreamAsync(modelDescribe, request, cancellationToken);
|
var completeChatResponse = chatService.CompleteChatStreamAsync(modelDescribe, request, cancellationToken);
|
||||||
var tokenUsage = new ThorUsageResponse();
|
var tokenUsage = new ThorUsageResponse();
|
||||||
|
|
||||||
@@ -285,7 +300,7 @@ public class AiGateWayManager : DomainService
|
|||||||
new MessageInputDto
|
new MessageInputDto
|
||||||
{
|
{
|
||||||
Content = sessionId is null ? "不予存储" : request.Messages?.LastOrDefault()?.MessagesStore ?? string.Empty,
|
Content = sessionId is null ? "不予存储" : request.Messages?.LastOrDefault()?.MessagesStore ?? string.Empty,
|
||||||
ModelId = request.Model,
|
ModelId = sourceModelId,
|
||||||
TokenUsage = tokenUsage,
|
TokenUsage = tokenUsage,
|
||||||
}, tokenId);
|
}, tokenId);
|
||||||
|
|
||||||
@@ -293,14 +308,14 @@ public class AiGateWayManager : DomainService
|
|||||||
new MessageInputDto
|
new MessageInputDto
|
||||||
{
|
{
|
||||||
Content = sessionId is null ? "不予存储" : backupSystemContent.ToString(),
|
Content = sessionId is null ? "不予存储" : backupSystemContent.ToString(),
|
||||||
ModelId = request.Model,
|
ModelId = sourceModelId,
|
||||||
TokenUsage = tokenUsage
|
TokenUsage = tokenUsage
|
||||||
}, tokenId);
|
}, tokenId);
|
||||||
|
|
||||||
await _usageStatisticsManager.SetUsageAsync(userId, request.Model, tokenUsage, tokenId);
|
await _usageStatisticsManager.SetUsageAsync(userId, sourceModelId, tokenUsage, tokenId);
|
||||||
|
|
||||||
// 扣减尊享token包用量
|
// 扣减尊享token包用量
|
||||||
if (userId is not null && PremiumPackageConst.ModeIds.Contains(request.Model))
|
if (userId is not null && PremiumPackageConst.ModeIds.Contains(sourceModelId))
|
||||||
{
|
{
|
||||||
var totalTokens = tokenUsage.TotalTokens ?? 0;
|
var totalTokens = tokenUsage.TotalTokens ?? 0;
|
||||||
if (totalTokens > 0)
|
if (totalTokens > 0)
|
||||||
@@ -677,6 +692,13 @@ public class AiGateWayManager : DomainService
|
|||||||
|
|
||||||
var chatService =
|
var chatService =
|
||||||
LazyServiceProvider.GetRequiredKeyedService<IOpenAiResponseService>(modelDescribe.HandlerName);
|
LazyServiceProvider.GetRequiredKeyedService<IOpenAiResponseService>(modelDescribe.HandlerName);
|
||||||
|
var sourceModelId = request.Model;
|
||||||
|
if (!string.IsNullOrEmpty(request.Model) &&
|
||||||
|
request.Model.StartsWith("yi-", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
request.Model = request.Model[3..];
|
||||||
|
}
|
||||||
|
|
||||||
var data = await chatService.ResponsesAsync(modelDescribe, request, cancellationToken);
|
var data = await chatService.ResponsesAsync(modelDescribe, request, cancellationToken);
|
||||||
|
|
||||||
data.SupplementalMultiplier(modelDescribe.Multiplier);
|
data.SupplementalMultiplier(modelDescribe.Multiplier);
|
||||||
@@ -693,7 +715,7 @@ public class AiGateWayManager : DomainService
|
|||||||
new MessageInputDto
|
new MessageInputDto
|
||||||
{
|
{
|
||||||
Content = "不予存储",
|
Content = "不予存储",
|
||||||
ModelId = request.Model,
|
ModelId = sourceModelId,
|
||||||
TokenUsage = tokenUsage,
|
TokenUsage = tokenUsage,
|
||||||
}, tokenId);
|
}, tokenId);
|
||||||
|
|
||||||
@@ -701,11 +723,11 @@ public class AiGateWayManager : DomainService
|
|||||||
new MessageInputDto
|
new MessageInputDto
|
||||||
{
|
{
|
||||||
Content = "不予存储",
|
Content = "不予存储",
|
||||||
ModelId = request.Model,
|
ModelId = sourceModelId,
|
||||||
TokenUsage = tokenUsage
|
TokenUsage = tokenUsage
|
||||||
}, tokenId);
|
}, tokenId);
|
||||||
|
|
||||||
await _usageStatisticsManager.SetUsageAsync(userId.Value, request.Model, tokenUsage, tokenId);
|
await _usageStatisticsManager.SetUsageAsync(userId.Value, sourceModelId, tokenUsage, tokenId);
|
||||||
|
|
||||||
// 扣减尊享token包用量
|
// 扣减尊享token包用量
|
||||||
var totalTokens = tokenUsage.TotalTokens ?? 0;
|
var totalTokens = tokenUsage.TotalTokens ?? 0;
|
||||||
@@ -746,7 +768,12 @@ public class AiGateWayManager : DomainService
|
|||||||
var modelDescribe = await GetModelAsync(ModelApiTypeEnum.Response, request.Model);
|
var modelDescribe = await GetModelAsync(ModelApiTypeEnum.Response, request.Model);
|
||||||
var chatService =
|
var chatService =
|
||||||
LazyServiceProvider.GetRequiredKeyedService<IOpenAiResponseService>(modelDescribe.HandlerName);
|
LazyServiceProvider.GetRequiredKeyedService<IOpenAiResponseService>(modelDescribe.HandlerName);
|
||||||
|
var sourceModelId = request.Model;
|
||||||
|
if (!string.IsNullOrEmpty(request.Model) &&
|
||||||
|
request.Model.StartsWith("yi-", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
request.Model = request.Model[3..];
|
||||||
|
}
|
||||||
var completeChatResponse = chatService.ResponsesStreamAsync(modelDescribe, request, cancellationToken);
|
var completeChatResponse = chatService.ResponsesStreamAsync(modelDescribe, request, cancellationToken);
|
||||||
ThorUsageResponse? tokenUsage = null;
|
ThorUsageResponse? tokenUsage = null;
|
||||||
try
|
try
|
||||||
@@ -786,7 +813,7 @@ public class AiGateWayManager : DomainService
|
|||||||
new MessageInputDto
|
new MessageInputDto
|
||||||
{
|
{
|
||||||
Content = "不予存储",
|
Content = "不予存储",
|
||||||
ModelId = request.Model,
|
ModelId = sourceModelId,
|
||||||
TokenUsage = tokenUsage,
|
TokenUsage = tokenUsage,
|
||||||
}, tokenId);
|
}, tokenId);
|
||||||
|
|
||||||
@@ -794,11 +821,11 @@ public class AiGateWayManager : DomainService
|
|||||||
new MessageInputDto
|
new MessageInputDto
|
||||||
{
|
{
|
||||||
Content = "不予存储",
|
Content = "不予存储",
|
||||||
ModelId = request.Model,
|
ModelId = sourceModelId,
|
||||||
TokenUsage = tokenUsage
|
TokenUsage = tokenUsage
|
||||||
}, tokenId);
|
}, tokenId);
|
||||||
|
|
||||||
await _usageStatisticsManager.SetUsageAsync(userId, request.Model, tokenUsage, tokenId);
|
await _usageStatisticsManager.SetUsageAsync(userId, sourceModelId, tokenUsage, tokenId);
|
||||||
|
|
||||||
// 扣减尊享token包用量
|
// 扣减尊享token包用量
|
||||||
if (userId.HasValue && tokenUsage is not null)
|
if (userId.HasValue && tokenUsage is not null)
|
||||||
|
|||||||
1
Yi.Ai.Vue3/types/components.d.ts
vendored
1
Yi.Ai.Vue3/types/components.d.ts
vendored
@@ -49,6 +49,7 @@ declare module 'vue' {
|
|||||||
ElSegmented: typeof import('element-plus/es')['ElSegmented']
|
ElSegmented: typeof import('element-plus/es')['ElSegmented']
|
||||||
ElSelect: typeof import('element-plus/es')['ElSelect']
|
ElSelect: typeof import('element-plus/es')['ElSelect']
|
||||||
ElSkeleton: typeof import('element-plus/es')['ElSkeleton']
|
ElSkeleton: typeof import('element-plus/es')['ElSkeleton']
|
||||||
|
ElSubMenu: typeof import('element-plus/es')['ElSubMenu']
|
||||||
ElSwitch: typeof import('element-plus/es')['ElSwitch']
|
ElSwitch: typeof import('element-plus/es')['ElSwitch']
|
||||||
ElTable: typeof import('element-plus/es')['ElTable']
|
ElTable: typeof import('element-plus/es')['ElTable']
|
||||||
ElTableColumn: typeof import('element-plus/es')['ElTableColumn']
|
ElTableColumn: typeof import('element-plus/es')['ElTableColumn']
|
||||||
|
|||||||
Reference in New Issue
Block a user