fix: 修正消息与Anthropic返回的Token统计逻辑,避免零值覆盖并支持缓存Token计算

This commit is contained in:
chenchun
2025-10-11 23:27:46 +08:00
parent 2b12e18e6c
commit 593b3a4cdd
2 changed files with 25 additions and 16 deletions

View File

@@ -21,12 +21,13 @@ public class AnthropicStreamDto
public ThorUsageResponse TokenUsage => new ThorUsageResponse public ThorUsageResponse TokenUsage => new ThorUsageResponse
{ {
PromptTokens = Usage?.InputTokens, PromptTokens = Usage?.InputTokens + Usage?.CacheCreationInputTokens + Usage?.CacheCreationInputTokens,
InputTokens = Usage?.InputTokens, InputTokens = Usage?.InputTokens + Usage?.CacheCreationInputTokens + Usage?.CacheCreationInputTokens,
OutputTokens = Usage?.OutputTokens, OutputTokens = Usage?.OutputTokens,
InputTokensDetails = null, InputTokensDetails = null,
CompletionTokens = Usage?.OutputTokens, CompletionTokens = Usage?.OutputTokens,
TotalTokens = Usage?.InputTokens + Usage?.OutputTokens, TotalTokens = Usage?.InputTokens + Usage?.CacheCreationInputTokens + Usage?.CacheCreationInputTokens +
Usage?.OutputTokens,
PromptTokensDetails = null, PromptTokensDetails = null,
CompletionTokensDetails = null CompletionTokensDetails = null
}; };
@@ -95,12 +96,13 @@ public class AnthropicChatCompletionDto
public ThorUsageResponse TokenUsage => new ThorUsageResponse public ThorUsageResponse TokenUsage => new ThorUsageResponse
{ {
PromptTokens = Usage?.InputTokens, PromptTokens = Usage?.InputTokens + Usage?.CacheCreationInputTokens + Usage?.CacheCreationInputTokens,
InputTokens = Usage?.InputTokens, InputTokens = Usage?.InputTokens + Usage?.CacheCreationInputTokens + Usage?.CacheCreationInputTokens,
OutputTokens = Usage?.OutputTokens, OutputTokens = Usage?.OutputTokens,
InputTokensDetails = null, InputTokensDetails = null,
CompletionTokens = Usage?.OutputTokens, CompletionTokens = Usage?.OutputTokens,
TotalTokens = Usage?.InputTokens + Usage?.OutputTokens, TotalTokens = Usage?.InputTokens + Usage?.CacheCreationInputTokens + Usage?.CacheCreationInputTokens +
Usage?.OutputTokens,
PromptTokensDetails = null, PromptTokensDetails = null,
CompletionTokensDetails = null CompletionTokensDetails = null
}; };

View File

@@ -29,13 +29,20 @@ public class MessageAggregateRoot : FullAuditedAggregateRoot<Guid>
ModelId = modelId; ModelId = modelId;
if (tokenUsage is not null) if (tokenUsage is not null)
{ {
long inputTokenCount = tokenUsage.PromptTokens long inputTokenCount =
?? tokenUsage.InputTokens (tokenUsage.PromptTokens.HasValue && tokenUsage.PromptTokens.Value != 0)
?? 0; ? tokenUsage.PromptTokens.Value
: (tokenUsage.InputTokens.HasValue && tokenUsage.InputTokens.Value != 0)
? tokenUsage.InputTokens.Value
: 0;
long outputTokenCount =
(tokenUsage.CompletionTokens.HasValue && tokenUsage.CompletionTokens.Value != 0)
? tokenUsage.CompletionTokens.Value
: (tokenUsage.OutputTokens.HasValue && tokenUsage.OutputTokens.Value != 0)
? tokenUsage.OutputTokens.Value
: 0;
long outputTokenCount = tokenUsage.CompletionTokens
?? tokenUsage.OutputTokens
?? 0;
this.TokenUsage = new TokenUsageValueObject this.TokenUsage = new TokenUsageValueObject
{ {