From f9bebeb4b5eb8262e43c17db93331c188e191b72 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=B0=8F=E6=B5=B7?= <7836246@qq.com> Date: Wed, 18 Mar 2026 14:06:53 +0800 Subject: [PATCH] =?UTF-8?q?fix:=20thinking=20=E6=A3=80=E6=B5=8B=E4=BD=8D?= =?UTF-8?q?=E7=BD=AE=E7=BA=A6=E6=9D=9F=EF=BC=8C=E9=98=B2=E6=AD=A2=E6=AD=A3?= =?UTF-8?q?=E6=96=87=E5=AD=97=E9=9D=A2=E9=87=8F=E8=AF=AF=E8=A7=A6=E5=8F=91?= =?UTF-8?q?=20(Issue=20#64)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 将所有 includes('') 替换为 hasLeadingThinking(), 只在 出现在响应开头时才触发提取, 防止用户消息或模型正文中的字面量标签误触发 extractThinking 导致内容丢失。 --- src/handler.ts | 12 ++++++------ src/openai-handler.ts | 16 ++++++++-------- src/streaming-text.ts | 12 ++++++++++++ 3 files changed, 26 insertions(+), 14 deletions(-) diff --git a/src/handler.ts b/src/handler.ts index ce18dca..1d04383 100644 --- a/src/handler.ts +++ b/src/handler.ts @@ -20,7 +20,7 @@ import { convertToCursorRequest, parseToolCalls, hasToolCalls } from './converte import { sendCursorRequest, sendCursorRequestFull } from './cursor-client.js'; import { getConfig } from './config.js'; import { createRequestLogger, type RequestLogger } from './logger.js'; -import { createIncrementalTextStreamer, splitLeadingThinkingBlocks, stripThinkingTags } from './streaming-text.js'; +import { createIncrementalTextStreamer, hasLeadingThinking, splitLeadingThinkingBlocks, stripThinkingTags } from './streaming-text.js'; function msgId(): string { return 'msg_' + uuidv4().replace(/-/g, '').substring(0, 24); @@ -1072,7 +1072,7 @@ async function handleDirectTextStream( hasTools: false, }); - if (!finalThinkingContent && finalRawResponse.includes('')) { + if (!finalThinkingContent && hasLeadingThinking(finalRawResponse)) { const { thinkingContent: extracted } = extractThinking(finalRawResponse); if (extracted) { finalThinkingContent = extracted; @@ -1360,7 +1360,7 @@ async function handleStream(res: Response, cursorReq: CursorChatRequest, body: A // ★ Thinking 提取(在拒绝检测之前,防止 thinking 内容触发 isRefusal 误判) // 混合流式阶段可能已经提取了 thinking,优先使用 let thinkingContent = hybridThinkingContent || ''; - if (fullResponse.includes('')) { + if (hasLeadingThinking(fullResponse)) { const { thinkingContent: extracted, strippedText } = extractThinking(fullResponse); if (extracted) { if (!thinkingContent) thinkingContent = extracted; @@ -1393,7 +1393,7 @@ async function handleStream(res: Response, cursorReq: CursorChatRequest, body: A activeCursorReq = await convertToCursorRequest(retryBody); await executeStream(true); // 重试不传回调(纯缓冲模式) // 重试后也需要剥离 thinking 标签 - if (fullResponse.includes('')) { + if (hasLeadingThinking(fullResponse)) { const { thinkingContent: retryThinking, strippedText: retryStripped } = extractThinking(fullResponse); if (retryThinking) { thinkingContent = retryThinking; @@ -1798,7 +1798,7 @@ async function handleNonStream(res: Response, cursorReq: CursorChatRequest, body // ★ Thinking 提取(在拒绝检测之前) // 始终剥离 thinking 标签,避免泄漏到最终文本中 let thinkingContent = ''; - if (fullText.includes('')) { + if (hasLeadingThinking(fullText)) { const { thinkingContent: extracted, strippedText } = extractThinking(fullText); if (extracted) { thinkingContent = extracted; @@ -1826,7 +1826,7 @@ async function handleNonStream(res: Response, cursorReq: CursorChatRequest, body activeCursorReq = await convertToCursorRequest(retryBody); fullText = await sendCursorRequestFull(activeCursorReq); // 重试后也需要剥离 thinking 标签 - if (fullText.includes('')) { + if (hasLeadingThinking(fullText)) { const { thinkingContent: retryThinking, strippedText: retryStripped } = extractThinking(fullText); if (retryThinking) { thinkingContent = retryThinking; diff --git a/src/openai-handler.ts b/src/openai-handler.ts index a7f3867..993006d 100644 --- a/src/openai-handler.ts +++ b/src/openai-handler.ts @@ -28,7 +28,7 @@ import { convertToCursorRequest, parseToolCalls, hasToolCalls } from './converte import { sendCursorRequest, sendCursorRequestFull } from './cursor-client.js'; import { getConfig } from './config.js'; import { createRequestLogger } from './logger.js'; -import { createIncrementalTextStreamer, splitLeadingThinkingBlocks, stripThinkingTags } from './streaming-text.js'; +import { createIncrementalTextStreamer, hasLeadingThinking, splitLeadingThinkingBlocks, stripThinkingTags } from './streaming-text.js'; import { autoContinueCursorToolResponseFull, autoContinueCursorToolResponseStream, @@ -907,7 +907,7 @@ async function handleOpenAIStream( // ★ Thinking 提取(在拒绝检测之前) let reasoningContent: string | undefined = hybridThinkingContent || undefined; - if (fullResponse.includes('')) { + if (hasLeadingThinking(fullResponse)) { const { thinkingContent: extracted, strippedText } = extractThinking(fullResponse); if (extracted) { if (thinkingEnabled && !reasoningContent) { @@ -1118,7 +1118,7 @@ async function handleOpenAINonStream( // ★ Thinking 提取必须在拒绝检测之前 — 否则 thinking 内容中的关键词会触发 isRefusal 误判 const thinkingEnabled = anthropicReq.thinking?.type === 'enabled'; let reasoningContent: string | undefined; - if (fullText.includes('')) { + if (hasLeadingThinking(fullText)) { const { thinkingContent: extracted, strippedText } = extractThinking(fullText); if (extracted) { if (thinkingEnabled) { @@ -1140,7 +1140,7 @@ async function handleOpenAINonStream( activeCursorReq = retryCursorReq; fullText = await sendCursorRequestFull(activeCursorReq); // 重试响应也需要先剥离 thinking - if (fullText.includes('')) { + if (hasLeadingThinking(fullText)) { fullText = extractThinking(fullText).strippedText; } if (!shouldRetry()) break; @@ -1510,7 +1510,7 @@ async function handleResponsesStream( await executeStream(); // Thinking 提取 - if (fullResponse.includes('')) { + if (hasLeadingThinking(fullResponse)) { const { strippedText } = extractThinking(fullResponse); fullResponse = strippedText; } @@ -1527,7 +1527,7 @@ async function handleResponsesStream( const retryBody = buildRetryRequest(anthropicReq, retryCount - 1); activeCursorReq = await convertToCursorRequest(retryBody); await executeStream(); - if (fullResponse.includes('')) { + if (hasLeadingThinking(fullResponse)) { fullResponse = extractThinking(fullResponse).strippedText; } } @@ -1713,7 +1713,7 @@ async function handleResponsesNonStream( const hasTools = (anthropicReq.tools?.length ?? 0) > 0; // Thinking 提取 - if (fullText.includes('')) { + if (hasLeadingThinking(fullText)) { fullText = extractThinking(fullText).strippedText; } @@ -1725,7 +1725,7 @@ async function handleResponsesNonStream( const retryCursorReq = await convertToCursorRequest(retryBody); activeCursorReq = retryCursorReq; fullText = await sendCursorRequestFull(activeCursorReq); - if (fullText.includes('')) { + if (hasLeadingThinking(fullText)) { fullText = extractThinking(fullText).strippedText; } if (!shouldRetry()) break; diff --git a/src/streaming-text.ts b/src/streaming-text.ts index b9da55b..8016b98 100644 --- a/src/streaming-text.ts +++ b/src/streaming-text.ts @@ -52,6 +52,18 @@ export function stripThinkingTags(text: string): string { return text.slice(0, startIdx).trim(); } +/** + * 检测文本是否以 开头(允许前导空白)。 + * + * ★ 修复 Issue #64:用位置约束替代宽松的 includes(''), + * 防止用户消息或模型正文中的字面量 误触发 extractThinking, + * 导致正文内容被错误截断或丢失。 + */ +export function hasLeadingThinking(text: string): boolean { + if (!text) return false; + return /^\s*/.test(text); +} + /** * 只解析“前导 thinking 块”。 *