fix(ui): take account of reasoning in token count calculation (#8324)

We were skipping reasoning traces when counting tokens, yielding to a
wrong sum count.

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
Ettore Di Giacinto
2026-02-01 10:48:31 +01:00
committed by GitHub
parent 234072769c
commit 397f7f0862

View File

@@ -1404,6 +1404,11 @@ async function promptGPT(systemPrompt, input) {
case "reasoning":
hasReasoningFromAPI = true; // Mark that we're receiving reasoning from API
if (eventData.content) {
// Count tokens for rate calculation (thinking/reasoning)
const reasoningRequest = activeRequests.get(chatId);
if (reasoningRequest) {
reasoningRequest.tokensReceived += Math.ceil(eventData.content.length / 4);
}
const currentChat = chatStore.getChat(chatId);
if (!currentChat) break; // Chat was deleted
const isMCPMode = currentChat.mcpMode || false;
@@ -1959,6 +1964,11 @@ async function promptGPT(systemPrompt, input) {
if (reasoningDelta && reasoningDelta.trim() !== "") {
hasReasoningFromAPI = true; // Mark that we're receiving reasoning from API
reasoningContent += reasoningDelta;
// Count tokens for rate calculation (thinking/reasoning)
const reasoningRequest = activeRequests.get(chatId);
if (reasoningRequest) {
reasoningRequest.tokensReceived += Math.ceil(reasoningDelta.length / 4);
}
const currentChat = chatStore.getChat(chatId);
if (!currentChat) {
// Chat was deleted, skip this line