fix: 修正消息与Anthropic返回的Token统计逻辑,避免零值覆盖并支持缓存Token计算
This commit is contained in:
@@ -18,15 +18,16 @@ public class AnthropicStreamDto
|
||||
[JsonPropertyName("usage")] public AnthropicCompletionDtoUsage? Usage { get; set; }
|
||||
|
||||
[JsonPropertyName("error")] public AnthropicStreamErrorDto? Error { get; set; }
|
||||
|
||||
|
||||
public ThorUsageResponse TokenUsage => new ThorUsageResponse
|
||||
{
|
||||
PromptTokens = Usage?.InputTokens,
|
||||
InputTokens = Usage?.InputTokens,
|
||||
PromptTokens = Usage?.InputTokens + Usage?.CacheCreationInputTokens + Usage?.CacheCreationInputTokens,
|
||||
InputTokens = Usage?.InputTokens + Usage?.CacheCreationInputTokens + Usage?.CacheCreationInputTokens,
|
||||
OutputTokens = Usage?.OutputTokens,
|
||||
InputTokensDetails = null,
|
||||
CompletionTokens = Usage?.OutputTokens,
|
||||
TotalTokens = Usage?.InputTokens + Usage?.OutputTokens,
|
||||
TotalTokens = Usage?.InputTokens + Usage?.CacheCreationInputTokens + Usage?.CacheCreationInputTokens +
|
||||
Usage?.OutputTokens,
|
||||
PromptTokensDetails = null,
|
||||
CompletionTokensDetails = null
|
||||
};
|
||||
@@ -95,12 +96,13 @@ public class AnthropicChatCompletionDto
|
||||
|
||||
public ThorUsageResponse TokenUsage => new ThorUsageResponse
|
||||
{
|
||||
PromptTokens = Usage?.InputTokens,
|
||||
InputTokens = Usage?.InputTokens,
|
||||
PromptTokens = Usage?.InputTokens + Usage?.CacheCreationInputTokens + Usage?.CacheCreationInputTokens,
|
||||
InputTokens = Usage?.InputTokens + Usage?.CacheCreationInputTokens + Usage?.CacheCreationInputTokens,
|
||||
OutputTokens = Usage?.OutputTokens,
|
||||
InputTokensDetails = null,
|
||||
CompletionTokens = Usage?.OutputTokens,
|
||||
TotalTokens = Usage?.InputTokens + Usage?.OutputTokens,
|
||||
TotalTokens = Usage?.InputTokens + Usage?.CacheCreationInputTokens + Usage?.CacheCreationInputTokens +
|
||||
Usage?.OutputTokens,
|
||||
PromptTokensDetails = null,
|
||||
CompletionTokensDetails = null
|
||||
};
|
||||
@@ -119,9 +121,9 @@ public class AnthropicChatCompletionDtoContent
|
||||
public object? input { get; set; }
|
||||
|
||||
[JsonPropertyName("thinking")] public string? Thinking { get; set; }
|
||||
|
||||
|
||||
[JsonPropertyName("partial_json")] public string? PartialJson { get; set; }
|
||||
|
||||
|
||||
public string? signature { get; set; }
|
||||
}
|
||||
|
||||
@@ -142,6 +144,6 @@ public class AnthropicCompletionDtoUsage
|
||||
|
||||
public class AnthropicServerToolUse
|
||||
{
|
||||
[JsonPropertyName("web_search_requests")]
|
||||
[JsonPropertyName("web_search_requests")]
|
||||
public int? WebSearchRequests { get; set; }
|
||||
}
|
||||
@@ -29,13 +29,20 @@ public class MessageAggregateRoot : FullAuditedAggregateRoot<Guid>
|
||||
ModelId = modelId;
|
||||
if (tokenUsage is not null)
|
||||
{
|
||||
long inputTokenCount = tokenUsage.PromptTokens
|
||||
?? tokenUsage.InputTokens
|
||||
?? 0;
|
||||
long inputTokenCount =
|
||||
(tokenUsage.PromptTokens.HasValue && tokenUsage.PromptTokens.Value != 0)
|
||||
? tokenUsage.PromptTokens.Value
|
||||
: (tokenUsage.InputTokens.HasValue && tokenUsage.InputTokens.Value != 0)
|
||||
? tokenUsage.InputTokens.Value
|
||||
: 0;
|
||||
|
||||
long outputTokenCount =
|
||||
(tokenUsage.CompletionTokens.HasValue && tokenUsage.CompletionTokens.Value != 0)
|
||||
? tokenUsage.CompletionTokens.Value
|
||||
: (tokenUsage.OutputTokens.HasValue && tokenUsage.OutputTokens.Value != 0)
|
||||
? tokenUsage.OutputTokens.Value
|
||||
: 0;
|
||||
|
||||
long outputTokenCount = tokenUsage.CompletionTokens
|
||||
?? tokenUsage.OutputTokens
|
||||
?? 0;
|
||||
|
||||
this.TokenUsage = new TokenUsageValueObject
|
||||
{
|
||||
|
||||
Reference in New Issue
Block a user