fix(executor): ignore null OpenAI stream usage chunks

- Added validation so OpenAI-style usage parsing only accepts object payloads with token fields.
- Prevented streaming usage:null chunks from publishing zero-token records before the final usage chunk arrives.
- Reused the shared OpenAI-style parser for stream usage to support both chat completions and responses token field names.
- Added tests covering null usage chunks and input/output token usage fields in streaming responses.
This commit is contained in:
mochenya
2026-05-05 12:30:03 +08:00
parent da6c599efd
commit 99dfbaef61
2 changed files with 58 additions and 16 deletions

View File

@@ -248,7 +248,7 @@ func resolveUsageAuthType(auth *cliproxyauth.Auth) string {
func ParseCodexUsage(data []byte) (usage.Detail, bool) {
usageNode := gjson.ParseBytes(data).Get("response.usage")
if !usageNode.Exists() {
if !hasOpenAIStyleUsageTokenFields(usageNode) {
return usage.Detail{}, false
}
return parseOpenAIStyleUsageNode(usageNode), true
@@ -256,7 +256,7 @@ func ParseCodexUsage(data []byte) (usage.Detail, bool) {
func ParseCodexImageToolUsage(data []byte) (usage.Detail, bool) {
usageNode := gjson.ParseBytes(data).Get("response.tool_usage.image_gen")
if !usageNode.Exists() || !usageNode.IsObject() {
if !hasOpenAIStyleUsageTokenFields(usageNode) {
return usage.Detail{}, false
}
return parseOpenAIStyleUsageNode(usageNode), true
@@ -264,12 +264,27 @@ func ParseCodexImageToolUsage(data []byte) (usage.Detail, bool) {
func ParseOpenAIUsage(data []byte) usage.Detail {
usageNode := gjson.ParseBytes(data).Get("usage")
if !usageNode.Exists() {
if !hasOpenAIStyleUsageTokenFields(usageNode) {
return usage.Detail{}
}
return parseOpenAIStyleUsageNode(usageNode)
}
func hasOpenAIStyleUsageTokenFields(usageNode gjson.Result) bool {
if !usageNode.Exists() || !usageNode.IsObject() {
return false
}
return usageNode.Get("prompt_tokens").Exists() ||
usageNode.Get("input_tokens").Exists() ||
usageNode.Get("completion_tokens").Exists() ||
usageNode.Get("output_tokens").Exists() ||
usageNode.Get("total_tokens").Exists() ||
usageNode.Get("prompt_tokens_details.cached_tokens").Exists() ||
usageNode.Get("input_tokens_details.cached_tokens").Exists() ||
usageNode.Get("completion_tokens_details.reasoning_tokens").Exists() ||
usageNode.Get("output_tokens_details.reasoning_tokens").Exists()
}
func parseOpenAIStyleUsageNode(usageNode gjson.Result) usage.Detail {
inputNode := usageNode.Get("prompt_tokens")
if !inputNode.Exists() {
@@ -307,21 +322,10 @@ func ParseOpenAIStreamUsage(line []byte) (usage.Detail, bool) {
return usage.Detail{}, false
}
usageNode := gjson.GetBytes(payload, "usage")
if !usageNode.Exists() {
if !hasOpenAIStyleUsageTokenFields(usageNode) {
return usage.Detail{}, false
}
detail := usage.Detail{
InputTokens: usageNode.Get("prompt_tokens").Int(),
OutputTokens: usageNode.Get("completion_tokens").Int(),
TotalTokens: usageNode.Get("total_tokens").Int(),
}
if cached := usageNode.Get("prompt_tokens_details.cached_tokens"); cached.Exists() {
detail.CachedTokens = cached.Int()
}
if reasoning := usageNode.Get("completion_tokens_details.reasoning_tokens"); reasoning.Exists() {
detail.ReasoningTokens = reasoning.Int()
}
return detail, true
return parseOpenAIStyleUsageNode(usageNode), true
}
func ParseClaudeUsage(data []byte) usage.Detail {

View File

@@ -48,6 +48,44 @@ func TestParseOpenAIUsageResponses(t *testing.T) {
}
}
func TestParseOpenAIUsageIgnoresNullUsage(t *testing.T) {
data := []byte(`{"usage":null}`)
detail := ParseOpenAIUsage(data)
if detail != (usage.Detail{}) {
t.Fatalf("detail = %+v, want zero detail", detail)
}
}
func TestParseOpenAIStreamUsageIgnoresNullUsage(t *testing.T) {
line := []byte(`data: {"id":"chunk_1","object":"chat.completion.chunk","choices":[{"index":0,"delta":{"content":"hi"},"finish_reason":null}],"usage":null}`)
if detail, ok := ParseOpenAIStreamUsage(line); ok {
t.Fatalf("ParseOpenAIStreamUsage() = (%+v, true), want false for null usage", detail)
}
}
func TestParseOpenAIStreamUsageResponsesFields(t *testing.T) {
line := []byte(`data: {"id":"chunk_1","object":"chat.completion.chunk","choices":[],"usage":{"input_tokens":8,"output_tokens":5,"total_tokens":13,"input_tokens_details":{"cached_tokens":3},"output_tokens_details":{"reasoning_tokens":2}}}`)
detail, ok := ParseOpenAIStreamUsage(line)
if !ok {
t.Fatal("ParseOpenAIStreamUsage() ok = false, want true")
}
if detail.InputTokens != 8 {
t.Fatalf("input tokens = %d, want %d", detail.InputTokens, 8)
}
if detail.OutputTokens != 5 {
t.Fatalf("output tokens = %d, want %d", detail.OutputTokens, 5)
}
if detail.TotalTokens != 13 {
t.Fatalf("total tokens = %d, want %d", detail.TotalTokens, 13)
}
if detail.CachedTokens != 3 {
t.Fatalf("cached tokens = %d, want %d", detail.CachedTokens, 3)
}
if detail.ReasoningTokens != 2 {
t.Fatalf("reasoning tokens = %d, want %d", detail.ReasoningTokens, 2)
}
}
func TestParseGeminiCLIUsage_TopLevelUsageMetadata(t *testing.T) {
data := []byte(`{"usageMetadata":{"promptTokenCount":11,"candidatesTokenCount":7,"thoughtsTokenCount":3,"totalTokenCount":21,"cachedContentTokenCount":5}}`)
detail := ParseGeminiCLIUsage(data)