mirror of
https://github.com/zadam/trilium.git
synced 2025-11-02 19:36:12 +01:00
refactor(llm): update chat saving logic to prevent race conditions between client and server
This commit is contained in:
@@ -951,9 +951,9 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Save the updated data to the note
|
// DON'T save here - let the server handle saving the complete conversation
|
||||||
this.saveCurrentData()
|
// to avoid race conditions between client and server saves
|
||||||
.catch(err => console.error("Failed to save data after streaming completed:", err));
|
console.log("Updated metadata after streaming completion, server should save");
|
||||||
})
|
})
|
||||||
.catch(err => console.error("Error fetching session data after streaming:", err));
|
.catch(err => console.error("Error fetching session data after streaming:", err));
|
||||||
}
|
}
|
||||||
@@ -991,11 +991,9 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
|
|
||||||
console.log(`Cached tool execution for ${toolData.tool} to be saved later`);
|
console.log(`Cached tool execution for ${toolData.tool} to be saved later`);
|
||||||
|
|
||||||
// Save immediately after receiving a tool execution
|
// DON'T save immediately during streaming - let the server handle saving
|
||||||
// This ensures we don't lose tool execution data if streaming fails
|
// to avoid race conditions between client and server saves
|
||||||
this.saveCurrentData().catch(err => {
|
console.log(`Tool execution cached, will be saved by server`);
|
||||||
console.error("Failed to save tool execution data:", err);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
// Complete handler
|
// Complete handler
|
||||||
@@ -1078,10 +1076,36 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
// Hide loading indicator
|
// Hide loading indicator
|
||||||
hideLoadingIndicator(this.loadingIndicator);
|
hideLoadingIndicator(this.loadingIndicator);
|
||||||
|
|
||||||
// Save the final state to the Chat Note
|
// DON'T save here immediately - let the server save the accumulated response first
|
||||||
this.saveCurrentData().catch(err => {
|
// to avoid race conditions. We'll reload the data from the server after a short delay.
|
||||||
console.error("Failed to save assistant response to note:", err);
|
console.log("Stream completed, waiting for server to save then reloading data...");
|
||||||
});
|
setTimeout(async () => {
|
||||||
|
try {
|
||||||
|
console.log("About to reload data from server...");
|
||||||
|
const currentMessageCount = this.messages.length;
|
||||||
|
console.log(`Current client message count before reload: ${currentMessageCount}`);
|
||||||
|
|
||||||
|
// Reload the data from the server which should have the complete conversation
|
||||||
|
const reloadSuccess = await this.loadSavedData();
|
||||||
|
|
||||||
|
const newMessageCount = this.messages.length;
|
||||||
|
console.log(`Reload success: ${reloadSuccess}, message count after reload: ${newMessageCount}`);
|
||||||
|
|
||||||
|
if (reloadSuccess && newMessageCount > currentMessageCount) {
|
||||||
|
console.log("Successfully reloaded data with more complete conversation");
|
||||||
|
} else if (!reloadSuccess) {
|
||||||
|
console.warn("Reload failed, keeping current client state");
|
||||||
|
} else {
|
||||||
|
console.warn("Reload succeeded but message count didn't increase");
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to reload data after stream completion:", error);
|
||||||
|
// Fallback: save our current state if reload fails
|
||||||
|
this.saveCurrentData().catch(err => {
|
||||||
|
console.error("Failed to save assistant response to note:", err);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, 3000); // Wait 3 seconds for server to complete its save
|
||||||
}
|
}
|
||||||
|
|
||||||
// Scroll to bottom
|
// Scroll to bottom
|
||||||
|
|||||||
@@ -150,7 +150,7 @@ class RestChatService {
|
|||||||
|
|
||||||
// Import WebSocket service for streaming
|
// Import WebSocket service for streaming
|
||||||
const wsService = await import('../../ws.js');
|
const wsService = await import('../../ws.js');
|
||||||
let accumulatedContent = '';
|
const accumulatedContentRef = { value: '' };
|
||||||
|
|
||||||
const pipelineInput: ChatPipelineInput = {
|
const pipelineInput: ChatPipelineInput = {
|
||||||
messages: chat.messages.map(msg => ({
|
messages: chat.messages.map(msg => ({
|
||||||
@@ -162,8 +162,7 @@ class RestChatService {
|
|||||||
showThinking: showThinking,
|
showThinking: showThinking,
|
||||||
options: pipelineOptions,
|
options: pipelineOptions,
|
||||||
streamCallback: req.method === 'GET' ? (data, done, rawChunk) => {
|
streamCallback: req.method === 'GET' ? (data, done, rawChunk) => {
|
||||||
this.handleStreamCallback(data, done, rawChunk, wsService.default, chatNoteId, res);
|
this.handleStreamCallback(data, done, rawChunk, wsService.default, chatNoteId, res, accumulatedContentRef);
|
||||||
if (data) accumulatedContent += data;
|
|
||||||
} : undefined
|
} : undefined
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -194,13 +193,15 @@ class RestChatService {
|
|||||||
};
|
};
|
||||||
} else {
|
} else {
|
||||||
// For streaming, response is already sent via WebSocket/SSE
|
// For streaming, response is already sent via WebSocket/SSE
|
||||||
// Save the accumulated content
|
// Save the accumulated content - prefer accumulated content over response.text
|
||||||
if (accumulatedContent) {
|
const finalContent = accumulatedContentRef.value || response.text || '';
|
||||||
|
if (finalContent) {
|
||||||
chat.messages.push({
|
chat.messages.push({
|
||||||
role: 'assistant',
|
role: 'assistant',
|
||||||
content: accumulatedContent
|
content: finalContent
|
||||||
});
|
});
|
||||||
await chatStorageService.updateChat(chat.id, chat.messages, chat.title);
|
await chatStorageService.updateChat(chat.id, chat.messages, chat.title);
|
||||||
|
log.info(`Saved accumulated streaming content: ${finalContent.length} characters`);
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@@ -219,7 +220,8 @@ class RestChatService {
|
|||||||
rawChunk: any,
|
rawChunk: any,
|
||||||
wsService: any,
|
wsService: any,
|
||||||
chatNoteId: string,
|
chatNoteId: string,
|
||||||
res: Response
|
res: Response,
|
||||||
|
accumulatedContentRef: { value: string }
|
||||||
) {
|
) {
|
||||||
const message: LLMStreamMessage = {
|
const message: LLMStreamMessage = {
|
||||||
type: 'llm-stream',
|
type: 'llm-stream',
|
||||||
@@ -229,6 +231,15 @@ class RestChatService {
|
|||||||
|
|
||||||
if (data) {
|
if (data) {
|
||||||
message.content = data;
|
message.content = data;
|
||||||
|
// Handle accumulation carefully - if this appears to be a complete response
|
||||||
|
// (done=true and data is much longer than current accumulated), replace rather than append
|
||||||
|
if (done && data.length > accumulatedContentRef.value.length && data.includes(accumulatedContentRef.value)) {
|
||||||
|
// This looks like a complete final response that includes what we've accumulated
|
||||||
|
accumulatedContentRef.value = data;
|
||||||
|
} else {
|
||||||
|
// Normal incremental accumulation
|
||||||
|
accumulatedContentRef.value += data;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (rawChunk && 'thinking' in rawChunk && rawChunk.thinking) {
|
if (rawChunk && 'thinking' in rawChunk && rawChunk.thinking) {
|
||||||
|
|||||||
Reference in New Issue
Block a user