mirror of
https://github.com/zadam/trilium.git
synced 2026-03-30 00:30:22 +02:00
Compare commits
68 Commits
main
...
experiment
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
abbe6437a9 | ||
|
|
f2d67d4128 | ||
|
|
7c9e02996e | ||
|
|
c43e10c4af | ||
|
|
25037324ab | ||
|
|
b8f9916d13 | ||
|
|
ed8b9cc943 | ||
|
|
efbe7e0a21 | ||
|
|
46dd500d37 | ||
|
|
261c95fb06 | ||
|
|
41a122f722 | ||
|
|
490406e12a | ||
|
|
d12677094d | ||
|
|
3c69792744 | ||
|
|
395e79adbf | ||
|
|
d5e56d8e29 | ||
|
|
e4c4873aa7 | ||
|
|
293da1d4ef | ||
|
|
d1c206a05a | ||
|
|
37b370511f | ||
|
|
734ef5533a | ||
|
|
0eb9b9fdac | ||
|
|
7817890cfe | ||
|
|
23dbedd139 | ||
|
|
2c8e2251fa | ||
|
|
4c27ed9997 | ||
|
|
d2fd1362c0 | ||
|
|
45e57f0d5e | ||
|
|
660facea96 | ||
|
|
9fa2e940d6 | ||
|
|
0ffcfb8f43 | ||
|
|
ad1b3df74e | ||
|
|
0ccf10bbbb | ||
|
|
59c007e801 | ||
|
|
0654bc1049 | ||
|
|
9fabefc847 | ||
|
|
e70ded0be1 | ||
|
|
16806275e0 | ||
|
|
e8214c3aae | ||
|
|
3a8e148301 | ||
|
|
a0b546614f | ||
|
|
5fcea86b94 | ||
|
|
d8c00ed6c0 | ||
|
|
863e68ec88 | ||
|
|
046ee343dc | ||
|
|
2db9e376d5 | ||
|
|
9458128ad6 | ||
|
|
89638e3f56 | ||
|
|
8d492d7d4b | ||
|
|
246c561b64 | ||
|
|
88295f2462 | ||
|
|
d2d4e1cbac | ||
|
|
261e5b59e0 | ||
|
|
fa7ec01329 | ||
|
|
4c4a29f9cf | ||
|
|
9ddcaf4552 | ||
|
|
c806a99fbc | ||
|
|
ad91d360ce | ||
|
|
cf8d7cd71f | ||
|
|
f370799b1d | ||
|
|
f8655b5de4 | ||
|
|
b551f0fe2d | ||
|
|
f6e8bdb0fd | ||
|
|
9029ea8085 | ||
|
|
d61ade9fe9 | ||
|
|
aa1fe549c7 | ||
|
|
e3701bbcb4 | ||
|
|
fb7fc4bf0c |
@@ -125,6 +125,15 @@ Trilium provides powerful user scripting capabilities:
|
||||
- OpenID and TOTP authentication support
|
||||
- Sanitization of user-generated content
|
||||
|
||||
### Client-Side API Restrictions
|
||||
- **Do not use `crypto.randomUUID()`** or other Web Crypto APIs that require secure contexts - Trilium can run over HTTP, not just HTTPS
|
||||
- Use `randomString()` from `apps/client/src/services/utils.ts` for generating IDs instead
|
||||
|
||||
### Shared Types Policy
|
||||
- Types shared between client and server belong in `@triliumnext/commons` (`packages/commons/src/lib/`)
|
||||
- Import shared types directly from `@triliumnext/commons` - do not re-export them from app-specific modules
|
||||
- Keep app-specific types (e.g., `LlmProvider` for server, `StreamCallbacks` for client) in their respective apps
|
||||
|
||||
## Common Development Tasks
|
||||
|
||||
### Adding New Note Types
|
||||
|
||||
@@ -508,7 +508,7 @@ type EventMappings = {
|
||||
contentSafeMarginChanged: {
|
||||
top: number;
|
||||
noteContext: NoteContext;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
export type EventListener<T extends EventNames> = {
|
||||
|
||||
@@ -18,7 +18,7 @@ const RELATION = "relation";
|
||||
* end user. Those types should be used only for checking against, they are
|
||||
* not for direct use.
|
||||
*/
|
||||
export type NoteType = "file" | "image" | "search" | "noteMap" | "launcher" | "doc" | "contentWidget" | "text" | "relationMap" | "render" | "canvas" | "mermaid" | "book" | "webView" | "code" | "mindMap" | "spreadsheet";
|
||||
export type NoteType = "file" | "image" | "search" | "noteMap" | "launcher" | "doc" | "contentWidget" | "text" | "relationMap" | "render" | "canvas" | "mermaid" | "book" | "webView" | "code" | "mindMap" | "spreadsheet" | "llmChat";
|
||||
|
||||
export interface NotePathRecord {
|
||||
isArchived: boolean;
|
||||
|
||||
@@ -84,6 +84,55 @@ async function createSearchNote(opts = {}) {
|
||||
return await froca.getNote(note.noteId);
|
||||
}
|
||||
|
||||
async function createLlmChat() {
|
||||
const note = await server.post<FNoteRow>("special-notes/llm-chat");
|
||||
|
||||
await ws.waitForMaxKnownEntityChangeId();
|
||||
|
||||
return await froca.getNote(note.noteId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the most recently modified LLM chat.
|
||||
* Returns null if no chat exists.
|
||||
*/
|
||||
async function getMostRecentLlmChat() {
|
||||
const note = await server.get<FNoteRow | null>("special-notes/most-recent-llm-chat");
|
||||
|
||||
if (!note) {
|
||||
return null;
|
||||
}
|
||||
|
||||
await ws.waitForMaxKnownEntityChangeId();
|
||||
|
||||
return await froca.getNote(note.noteId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the most recent LLM chat, or creates a new one if none exists.
|
||||
* Used by sidebar chat for persistent conversations across page refreshes.
|
||||
*/
|
||||
async function getOrCreateLlmChat() {
|
||||
const note = await server.get<FNoteRow>("special-notes/get-or-create-llm-chat");
|
||||
|
||||
await ws.waitForMaxKnownEntityChangeId();
|
||||
|
||||
return await froca.getNote(note.noteId);
|
||||
}
|
||||
|
||||
export interface RecentLlmChat {
|
||||
noteId: string;
|
||||
title: string;
|
||||
dateModified: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a list of recent LLM chats for the history popup.
|
||||
*/
|
||||
async function getRecentLlmChats(limit: number = 10): Promise<RecentLlmChat[]> {
|
||||
return await server.get<RecentLlmChat[]>(`special-notes/recent-llm-chats?limit=${limit}`);
|
||||
}
|
||||
|
||||
export default {
|
||||
getInboxNote,
|
||||
getTodayNote,
|
||||
@@ -94,5 +143,9 @@ export default {
|
||||
getMonthNote,
|
||||
getYearNote,
|
||||
createSqlConsole,
|
||||
createSearchNote
|
||||
createSearchNote,
|
||||
createLlmChat,
|
||||
getMostRecentLlmChat,
|
||||
getOrCreateLlmChat,
|
||||
getRecentLlmChats
|
||||
};
|
||||
|
||||
@@ -19,7 +19,8 @@ export const byNoteType: Record<Exclude<NoteType, "book">, string | null> = {
|
||||
search: null,
|
||||
text: null,
|
||||
webView: null,
|
||||
spreadsheet: null
|
||||
spreadsheet: null,
|
||||
llmChat: null
|
||||
};
|
||||
|
||||
export const byBookType: Record<ViewTypeOptions, string | null> = {
|
||||
|
||||
109
apps/client/src/services/llm_chat.ts
Normal file
109
apps/client/src/services/llm_chat.ts
Normal file
@@ -0,0 +1,109 @@
|
||||
import type { LlmMessage, LlmCitation, LlmChatConfig, LlmUsage, LlmModelInfo } from "@triliumnext/commons";
|
||||
import server from "./server.js";
|
||||
|
||||
/**
|
||||
* Fetch available models for a provider.
|
||||
*/
|
||||
export async function getAvailableModels(provider: string = "anthropic"): Promise<LlmModelInfo[]> {
|
||||
const response = await server.get<{ models?: LlmModelInfo[] }>(`llm-chat/models?provider=${encodeURIComponent(provider)}`);
|
||||
return response.models ?? [];
|
||||
}
|
||||
|
||||
export interface StreamCallbacks {
|
||||
onChunk: (text: string) => void;
|
||||
onThinking?: (text: string) => void;
|
||||
onToolUse?: (toolName: string, input: Record<string, unknown>) => void;
|
||||
onToolResult?: (toolName: string, result: string) => void;
|
||||
onCitation?: (citation: LlmCitation) => void;
|
||||
onUsage?: (usage: LlmUsage) => void;
|
||||
onError: (error: string) => void;
|
||||
onDone: () => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stream a chat completion from the LLM API using Server-Sent Events.
|
||||
*/
|
||||
export async function streamChatCompletion(
|
||||
messages: LlmMessage[],
|
||||
config: LlmChatConfig,
|
||||
callbacks: StreamCallbacks
|
||||
): Promise<void> {
|
||||
const headers = await server.getHeaders();
|
||||
|
||||
const response = await fetch(`${window.glob.baseApiUrl}llm-chat/stream`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
...headers,
|
||||
"Content-Type": "application/json"
|
||||
} as HeadersInit,
|
||||
body: JSON.stringify({ messages, config })
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
callbacks.onError(`HTTP ${response.status}: ${response.statusText}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const reader = response.body?.getReader();
|
||||
if (!reader) {
|
||||
callbacks.onError("No response body");
|
||||
return;
|
||||
}
|
||||
|
||||
const decoder = new TextDecoder();
|
||||
let buffer = "";
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
|
||||
buffer += decoder.decode(value, { stream: true });
|
||||
const lines = buffer.split("\n");
|
||||
buffer = lines.pop() || "";
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.startsWith("data: ")) {
|
||||
try {
|
||||
const data = JSON.parse(line.slice(6));
|
||||
|
||||
switch (data.type) {
|
||||
case "text":
|
||||
callbacks.onChunk(data.content);
|
||||
break;
|
||||
case "thinking":
|
||||
callbacks.onThinking?.(data.content);
|
||||
break;
|
||||
case "tool_use":
|
||||
callbacks.onToolUse?.(data.toolName, data.toolInput);
|
||||
break;
|
||||
case "tool_result":
|
||||
callbacks.onToolResult?.(data.toolName, data.result);
|
||||
break;
|
||||
case "citation":
|
||||
if (data.citation) {
|
||||
callbacks.onCitation?.(data.citation);
|
||||
}
|
||||
break;
|
||||
case "usage":
|
||||
if (data.usage) {
|
||||
callbacks.onUsage?.(data.usage);
|
||||
}
|
||||
break;
|
||||
case "error":
|
||||
callbacks.onError(data.error);
|
||||
break;
|
||||
case "done":
|
||||
callbacks.onDone();
|
||||
break;
|
||||
}
|
||||
} catch {
|
||||
// Ignore JSON parse errors for partial data
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
reader.releaseLock();
|
||||
}
|
||||
}
|
||||
@@ -41,6 +41,7 @@ export const NOTE_TYPES: NoteTypeMapping[] = [
|
||||
{ type: "relationMap", mime: "application/json", title: t("note_types.relation-map"), icon: "bxs-network-chart" },
|
||||
|
||||
// Misc note types
|
||||
{ type: "llmChat", mime: "application/json", title: t("note_types.llm-chat"), icon: "bx-message-square-dots" },
|
||||
{ type: "render", mime: "", title: t("note_types.render-note"), icon: "bx-extension" },
|
||||
{ type: "search", title: t("note_types.saved-search"), icon: "bx-file-find", static: true },
|
||||
{ type: "webView", mime: "", title: t("note_types.web-view"), icon: "bx-globe-alt" },
|
||||
|
||||
@@ -1599,6 +1599,7 @@
|
||||
"geo-map": "Geo Map",
|
||||
"beta-feature": "Beta",
|
||||
"ai-chat": "AI Chat",
|
||||
"llm-chat": "AI Chat",
|
||||
"task-list": "Task List",
|
||||
"new-feature": "New",
|
||||
"collections": "Collections",
|
||||
@@ -1610,6 +1611,43 @@
|
||||
"toggle-on-hint": "Note is not protected, click to make it protected",
|
||||
"toggle-off-hint": "Note is protected, click to make it unprotected"
|
||||
},
|
||||
"llm_chat": {
|
||||
"placeholder": "Type a message...",
|
||||
"send": "Send",
|
||||
"sending": "Sending...",
|
||||
"empty_state": "Start a conversation by typing a message below.",
|
||||
"searching_web": "Searching the web...",
|
||||
"web_search": "Web search",
|
||||
"note_tools": "Note access",
|
||||
"sources": "Sources",
|
||||
"extended_thinking": "Extended thinking",
|
||||
"legacy_models": "Legacy models",
|
||||
"thinking": "Thinking...",
|
||||
"thought_process": "Thought process",
|
||||
"tool_calls": "{{count}} tool call(s)",
|
||||
"input": "Input",
|
||||
"result": "Result",
|
||||
"tokens_used": "{{prompt}} prompt + {{completion}} completion = {{total}} tokens",
|
||||
"tokens_used_with_cost": "{{prompt}} prompt + {{completion}} completion = {{total}} tokens (~${{cost}})",
|
||||
"tokens_used_with_model": "{{model}}: {{prompt}} prompt + {{completion}} completion = {{total}} tokens",
|
||||
"tokens_used_with_model_and_cost": "{{model}}: {{prompt}} prompt + {{completion}} completion = {{total}} tokens (~${{cost}})",
|
||||
"tokens": "tokens",
|
||||
"context_used": "{{percentage}}% used",
|
||||
"note_context_enabled": "Click to disable note context: {{title}}",
|
||||
"note_context_disabled": "Click to include current note in context",
|
||||
"no_provider_message": "No AI provider configured. Add one to start chatting.",
|
||||
"add_provider": "Add AI Provider"
|
||||
},
|
||||
"sidebar_chat": {
|
||||
"title": "AI Chat",
|
||||
"launcher_title": "Open AI Chat",
|
||||
"new_chat": "Start new chat",
|
||||
"save_chat": "Save chat to notes",
|
||||
"empty_state": "Start a conversation",
|
||||
"history": "Chat history",
|
||||
"recent_chats": "Recent chats",
|
||||
"no_chats": "No previous chats"
|
||||
},
|
||||
"shared_switch": {
|
||||
"shared": "Shared",
|
||||
"toggle-on-title": "Share the note",
|
||||
@@ -2230,5 +2268,21 @@
|
||||
"sample_xy": "XY",
|
||||
"sample_venn": "Venn",
|
||||
"sample_ishikawa": "Ishikawa"
|
||||
},
|
||||
"llm": {
|
||||
"settings_title": "AI / LLM",
|
||||
"settings_description": "Configure AI and Large Language Model integrations.",
|
||||
"add_provider": "Add Provider",
|
||||
"add_provider_title": "Add AI Provider",
|
||||
"configured_providers": "Configured Providers",
|
||||
"no_providers_configured": "No providers configured yet.",
|
||||
"provider_name": "Name",
|
||||
"provider_type": "Provider",
|
||||
"actions": "Actions",
|
||||
"delete_provider": "Delete",
|
||||
"delete_provider_confirmation": "Are you sure you want to delete the provider \"{{name}}\"?",
|
||||
"api_key": "API Key",
|
||||
"api_key_placeholder": "Enter your API key",
|
||||
"cancel": "Cancel"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ import HistoryNavigationButton from "./HistoryNavigation";
|
||||
import { LaunchBarContext } from "./launch_bar_widgets";
|
||||
import { CommandButton, CustomWidget, NoteLauncher, QuickSearchLauncherWidget, ScriptLauncher, TodayLauncher } from "./LauncherDefinitions";
|
||||
import ProtectedSessionStatusWidget from "./ProtectedSessionStatusWidget";
|
||||
import SidebarChatButton from "./SidebarChatButton";
|
||||
import SpacerWidget from "./SpacerWidget";
|
||||
import SyncStatus from "./SyncStatus";
|
||||
|
||||
@@ -98,6 +99,8 @@ function initBuiltinWidget(note: FNote, isHorizontalLayout: boolean) {
|
||||
return <QuickSearchLauncherWidget />;
|
||||
case "mobileTabSwitcher":
|
||||
return <TabSwitcher />;
|
||||
case "sidebarChat":
|
||||
return <SidebarChatButton />;
|
||||
default:
|
||||
console.warn(`Unrecognized builtin widget ${builtinWidget} for launcher ${note.noteId} "${note.title}"`);
|
||||
}
|
||||
|
||||
24
apps/client/src/widgets/launch_bar/SidebarChatButton.tsx
Normal file
24
apps/client/src/widgets/launch_bar/SidebarChatButton.tsx
Normal file
@@ -0,0 +1,24 @@
|
||||
import { useCallback } from "preact/hooks";
|
||||
|
||||
import appContext from "../../components/app_context";
|
||||
import { t } from "../../services/i18n";
|
||||
import { LaunchBarActionButton } from "./launch_bar_widgets";
|
||||
|
||||
/**
|
||||
* Launcher button to open the sidebar (which contains the chat).
|
||||
* The chat widget is always visible in the sidebar for non-chat notes.
|
||||
*/
|
||||
export default function SidebarChatButton() {
|
||||
const handleClick = useCallback(() => {
|
||||
// Open right pane if hidden, or toggle it if visible
|
||||
appContext.triggerEvent("toggleRightPane", {});
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<LaunchBarActionButton
|
||||
icon="bx bx-message-square-dots"
|
||||
text={t("sidebar_chat.launcher_title")}
|
||||
onClick={handleClick}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -12,7 +12,7 @@ import { TypeWidgetProps } from "./type_widgets/type_widget";
|
||||
* A `NoteType` altered by the note detail widget, taking into consideration whether the note is editable or not and adding special note types such as an empty one,
|
||||
* for protected session or attachment information.
|
||||
*/
|
||||
export type ExtendedNoteType = Exclude<NoteType, "launcher" | "text" | "code"> | "empty" | "readOnlyCode" | "readOnlyText" | "editableText" | "editableCode" | "attachmentDetail" | "attachmentList" | "protectedSession" | "sqlConsole";
|
||||
export type ExtendedNoteType = Exclude<NoteType, "launcher" | "text" | "code" | "llmChat"> | "empty" | "readOnlyCode" | "readOnlyText" | "editableText" | "editableCode" | "attachmentDetail" | "attachmentList" | "protectedSession" | "sqlConsole" | "llmChat";
|
||||
|
||||
export type TypeWidget = ((props: TypeWidgetProps) => VNode | JSX.Element | undefined);
|
||||
type NoteTypeView = () => (Promise<{ default: TypeWidget } | TypeWidget> | TypeWidget);
|
||||
@@ -147,5 +147,11 @@ export const TYPE_MAPPINGS: Record<ExtendedNoteType, NoteTypeMapping> = {
|
||||
className: "note-detail-spreadsheet",
|
||||
printable: true,
|
||||
isFullHeight: true
|
||||
},
|
||||
llmChat: {
|
||||
view: () => import("./type_widgets/llm_chat/LlmChat"),
|
||||
className: "note-detail-llm-chat",
|
||||
printable: true,
|
||||
isFullHeight: true
|
||||
}
|
||||
};
|
||||
|
||||
@@ -5,16 +5,27 @@ interface FormDropdownList<T> extends Omit<DropdownProps, "children"> {
|
||||
values: T[];
|
||||
keyProperty: keyof T;
|
||||
titleProperty: keyof T;
|
||||
/** Property to show as a small suffix next to the title */
|
||||
titleSuffixProperty?: keyof T;
|
||||
descriptionProperty?: keyof T;
|
||||
currentValue: string;
|
||||
onChange(newValue: string): void;
|
||||
}
|
||||
|
||||
export default function FormDropdownList<T>({ values, keyProperty, titleProperty, descriptionProperty, currentValue, onChange, ...restProps }: FormDropdownList<T>) {
|
||||
export default function FormDropdownList<T>({ values, keyProperty, titleProperty, titleSuffixProperty, descriptionProperty, currentValue, onChange, ...restProps }: FormDropdownList<T>) {
|
||||
const currentValueData = values.find(value => value[keyProperty] === currentValue);
|
||||
|
||||
const renderTitle = (item: T) => {
|
||||
const title = item[titleProperty] as string;
|
||||
const suffix = titleSuffixProperty ? item[titleSuffixProperty] as string : null;
|
||||
if (suffix) {
|
||||
return <>{title} <small>{suffix}</small></>;
|
||||
}
|
||||
return title;
|
||||
};
|
||||
|
||||
return (
|
||||
<Dropdown text={currentValueData?.[titleProperty] ?? ""} {...restProps}>
|
||||
<Dropdown text={currentValueData ? renderTitle(currentValueData) : ""} {...restProps}>
|
||||
{values.map(item => (
|
||||
<FormListItem
|
||||
onClick={() => onChange(item[keyProperty] as string)}
|
||||
@@ -22,9 +33,9 @@ export default function FormDropdownList<T>({ values, keyProperty, titleProperty
|
||||
description={descriptionProperty && item[descriptionProperty] as string}
|
||||
selected={currentValue === item[keyProperty]}
|
||||
>
|
||||
{item[titleProperty] as string}
|
||||
{renderTitle(item)}
|
||||
</FormListItem>
|
||||
))}
|
||||
</Dropdown>
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -85,7 +85,7 @@ export function NoteContextMenu({ note, noteContext, itemsAtStart, itemsNearNote
|
||||
);
|
||||
const isElectron = getIsElectron();
|
||||
const isMac = getIsMac();
|
||||
const hasSource = ["text", "code", "relationMap", "mermaid", "canvas", "mindMap", "spreadsheet"].includes(noteType);
|
||||
const hasSource = ["text", "code", "relationMap", "mermaid", "canvas", "mindMap", "spreadsheet", "llmChat"].includes(noteType);
|
||||
const isSearchOrBook = ["search", "book"].includes(noteType);
|
||||
const isHelpPage = note.noteId.startsWith("_help");
|
||||
const [syncServerHost] = useTriliumOption("syncServerHost");
|
||||
|
||||
@@ -19,6 +19,7 @@ import PdfAttachments from "./pdf/PdfAttachments";
|
||||
import PdfLayers from "./pdf/PdfLayers";
|
||||
import PdfPages from "./pdf/PdfPages";
|
||||
import RightPanelWidget from "./RightPanelWidget";
|
||||
import SidebarChat from "./SidebarChat";
|
||||
import TableOfContents from "./TableOfContents";
|
||||
|
||||
const MIN_WIDTH_PERCENT = 5;
|
||||
@@ -91,6 +92,11 @@ function useItems(rightPaneVisible: boolean, widgetsByParent: WidgetsByParent) {
|
||||
el: <HighlightsList />,
|
||||
enabled: noteType === "text" && highlightsList.length > 0,
|
||||
},
|
||||
{
|
||||
el: <SidebarChat />,
|
||||
enabled: noteType !== "llmChat",
|
||||
position: 1000
|
||||
},
|
||||
...widgetsByParent.getLegacyWidgets("right-pane").map((widget) => ({
|
||||
el: <CustomLegacyWidget key={widget._noteId} originalWidget={widget as LegacyRightPanelWidget} />,
|
||||
enabled: true,
|
||||
|
||||
@@ -51,7 +51,7 @@ export default function RightPanelWidget({ id, title, buttons, children, contain
|
||||
>
|
||||
<ActionButton icon="bx bx-chevron-down" text="" />
|
||||
<div class="card-header-title">{title}</div>
|
||||
<div class="card-header-buttons">
|
||||
<div class="card-header-buttons" onClick={e => e.stopPropagation()}>
|
||||
{buttons}
|
||||
{contextMenuItems && (
|
||||
<ActionButton
|
||||
|
||||
118
apps/client/src/widgets/sidebar/SidebarChat.css
Normal file
118
apps/client/src/widgets/sidebar/SidebarChat.css
Normal file
@@ -0,0 +1,118 @@
|
||||
/* Sidebar Chat Widget Styles */
|
||||
|
||||
.sidebar-chat-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
flex: 1;
|
||||
min-height: 0; /* Allow shrinking in flex context */
|
||||
overflow: hidden; /* Contain children within available space */
|
||||
}
|
||||
|
||||
.sidebar-chat-container .llm-chat-input-form {
|
||||
flex-shrink: 0; /* Keep input bar from shrinking */
|
||||
|
||||
.llm-chat-input {
|
||||
font-size: 0.9em;
|
||||
padding: 0.5em;
|
||||
}
|
||||
}
|
||||
|
||||
.sidebar-chat-messages {
|
||||
flex: 1;
|
||||
min-height: 0; /* Allow flex shrinking for scroll containment */
|
||||
overflow-y: auto;
|
||||
padding: 0.5rem;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 0.5rem;
|
||||
}
|
||||
|
||||
/* Reuse llm-chat-message styles but make them more compact */
|
||||
.sidebar-chat-messages .llm-chat-message {
|
||||
padding: 0.5rem 0.75rem;
|
||||
margin-bottom: 0;
|
||||
max-width: 100%;
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
|
||||
.sidebar-chat-messages .llm-chat-message-role {
|
||||
font-size: 0.75rem;
|
||||
}
|
||||
|
||||
.sidebar-chat-messages .llm-chat-tool-activity {
|
||||
font-size: 0.85rem;
|
||||
padding: 0.375rem 0.75rem;
|
||||
margin-bottom: 0;
|
||||
max-width: 100%;
|
||||
}
|
||||
|
||||
/* Make the sidebar chat widget grow to fill available space when expanded */
|
||||
#right-pane .widget.grow:not(.collapsed) {
|
||||
flex: 1;
|
||||
flex-shrink: 1; /* Override flex-shrink: 0 from main styles */
|
||||
min-height: 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
#right-pane .widget.grow:not(.collapsed) .body-wrapper {
|
||||
flex: 1;
|
||||
min-height: 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
overflow: hidden; /* Override overflow: auto from main styles */
|
||||
}
|
||||
|
||||
#right-pane .widget.grow:not(.collapsed) .card-body {
|
||||
flex: 1;
|
||||
min-height: 0;
|
||||
overflow: hidden; /* Override overflow: auto - let child handle scrolling */
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
/* Compact markdown in sidebar */
|
||||
.sidebar-chat-messages .llm-chat-markdown {
|
||||
font-size: 0.9rem;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
.sidebar-chat-messages .llm-chat-markdown p {
|
||||
margin: 0 0 0.5em 0;
|
||||
}
|
||||
|
||||
.sidebar-chat-messages .llm-chat-markdown pre {
|
||||
padding: 0.5rem;
|
||||
font-size: 0.8rem;
|
||||
}
|
||||
|
||||
.sidebar-chat-messages .llm-chat-markdown code {
|
||||
font-size: 0.85em;
|
||||
}
|
||||
|
||||
/* History dropdown */
|
||||
.sidebar-chat-history-empty {
|
||||
padding: 0.75rem 1rem;
|
||||
text-align: center;
|
||||
color: var(--muted-text-color);
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
|
||||
.sidebar-chat-history-item-content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
.sidebar-chat-history-item-content span,
|
||||
.sidebar-chat-history-item-content strong {
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.sidebar-chat-history-date {
|
||||
font-size: 0.75rem;
|
||||
color: var(--muted-text-color);
|
||||
margin-top: 0.125rem;
|
||||
}
|
||||
315
apps/client/src/widgets/sidebar/SidebarChat.tsx
Normal file
315
apps/client/src/widgets/sidebar/SidebarChat.tsx
Normal file
@@ -0,0 +1,315 @@
|
||||
import type { Dropdown as BootstrapDropdown } from "bootstrap";
|
||||
import { useCallback, useEffect, useRef, useState } from "preact/hooks";
|
||||
|
||||
import dateNoteService, { type RecentLlmChat } from "../../services/date_notes.js";
|
||||
import { t } from "../../services/i18n.js";
|
||||
import server from "../../services/server.js";
|
||||
import ActionButton from "../react/ActionButton.js";
|
||||
import Dropdown from "../react/Dropdown.js";
|
||||
import { FormListItem } from "../react/FormList.js";
|
||||
import { useActiveNoteContext } from "../react/hooks.js";
|
||||
import NoItems from "../react/NoItems.js";
|
||||
import ChatInputBar from "../type_widgets/llm_chat/ChatInputBar.js";
|
||||
import ChatMessage from "../type_widgets/llm_chat/ChatMessage.js";
|
||||
import type { LlmChatContent } from "../type_widgets/llm_chat/llm_chat_types.js";
|
||||
import { useLlmChat } from "../type_widgets/llm_chat/useLlmChat.js";
|
||||
import RightPanelWidget from "./RightPanelWidget.js";
|
||||
import "./SidebarChat.css";
|
||||
|
||||
/**
|
||||
* Sidebar chat widget that appears in the right panel.
|
||||
* Uses a hidden LLM chat note for persistence across all notes.
|
||||
* The same chat persists when switching between notes.
|
||||
*/
|
||||
export default function SidebarChat() {
|
||||
const [chatNoteId, setChatNoteId] = useState<string | null>(null);
|
||||
const [shouldSave, setShouldSave] = useState(false);
|
||||
const [recentChats, setRecentChats] = useState<RecentLlmChat[]>([]);
|
||||
const saveTimeoutRef = useRef<ReturnType<typeof setTimeout>>();
|
||||
const historyDropdownRef = useRef<BootstrapDropdown | null>(null);
|
||||
|
||||
// Get the current active note context
|
||||
const { noteId: activeNoteId, note: activeNote } = useActiveNoteContext();
|
||||
|
||||
// Use shared chat hook with sidebar-specific options
|
||||
const chat = useLlmChat(
|
||||
// onMessagesChange - trigger save
|
||||
() => setShouldSave(true),
|
||||
{ defaultEnableNoteTools: true, supportsExtendedThinking: true }
|
||||
);
|
||||
|
||||
// Update chat context when active note changes
|
||||
useEffect(() => {
|
||||
chat.setContextNoteId(activeNoteId ?? undefined);
|
||||
}, [activeNoteId, chat.setContextNoteId]);
|
||||
|
||||
// Ref to access chat methods in effects without triggering re-runs
|
||||
const chatRef = useRef(chat);
|
||||
chatRef.current = chat;
|
||||
|
||||
// Handle debounced save when shouldSave is triggered
|
||||
useEffect(() => {
|
||||
if (!shouldSave || !chatNoteId) {
|
||||
setShouldSave(false);
|
||||
return;
|
||||
}
|
||||
|
||||
setShouldSave(false);
|
||||
|
||||
if (saveTimeoutRef.current) {
|
||||
clearTimeout(saveTimeoutRef.current);
|
||||
}
|
||||
|
||||
saveTimeoutRef.current = setTimeout(async () => {
|
||||
const content = chat.getContent();
|
||||
try {
|
||||
await server.put(`notes/${chatNoteId}/data`, {
|
||||
content: JSON.stringify(content)
|
||||
});
|
||||
} catch (err) {
|
||||
console.error("Failed to save chat:", err);
|
||||
}
|
||||
}, 500);
|
||||
}, [shouldSave, chatNoteId, chat]);
|
||||
|
||||
// Load the most recent chat on mount (runs once)
|
||||
useEffect(() => {
|
||||
let cancelled = false;
|
||||
|
||||
const loadMostRecentChat = async () => {
|
||||
try {
|
||||
const existingChat = await dateNoteService.getMostRecentLlmChat();
|
||||
|
||||
if (cancelled) return;
|
||||
|
||||
if (existingChat) {
|
||||
setChatNoteId(existingChat.noteId);
|
||||
// Load content inline to avoid dependency issues
|
||||
try {
|
||||
const blob = await server.get<{ content: string }>(`notes/${existingChat.noteId}/blob`);
|
||||
if (!cancelled && blob?.content) {
|
||||
const parsed: LlmChatContent = JSON.parse(blob.content);
|
||||
chatRef.current.loadFromContent(parsed);
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Failed to load chat content:", err);
|
||||
}
|
||||
} else {
|
||||
// No existing chat - will create on first message
|
||||
setChatNoteId(null);
|
||||
chatRef.current.clearMessages();
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Failed to load sidebar chat:", err);
|
||||
}
|
||||
};
|
||||
|
||||
loadMostRecentChat();
|
||||
|
||||
return () => {
|
||||
cancelled = true;
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Custom submit handler that ensures chat note exists first
|
||||
const handleSubmit = useCallback(async (e: Event) => {
|
||||
e.preventDefault();
|
||||
if (!chat.input.trim() || chat.isStreaming) return;
|
||||
|
||||
// Ensure chat note exists before sending (lazy creation)
|
||||
let noteId = chatNoteId;
|
||||
if (!noteId) {
|
||||
try {
|
||||
const note = await dateNoteService.getOrCreateLlmChat();
|
||||
if (note) {
|
||||
setChatNoteId(note.noteId);
|
||||
noteId = note.noteId;
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Failed to create sidebar chat:", err);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (!noteId) {
|
||||
console.error("Cannot send message: no chat note available");
|
||||
return;
|
||||
}
|
||||
|
||||
// Delegate to shared handler
|
||||
await chat.handleSubmit(e);
|
||||
}, [chatNoteId, chat]);
|
||||
|
||||
const handleKeyDown = useCallback((e: KeyboardEvent) => {
|
||||
if (e.key === "Enter" && !e.shiftKey) {
|
||||
e.preventDefault();
|
||||
handleSubmit(e);
|
||||
}
|
||||
}, [handleSubmit]);
|
||||
|
||||
const handleNewChat = useCallback(async () => {
|
||||
try {
|
||||
const note = await dateNoteService.createLlmChat();
|
||||
if (note) {
|
||||
setChatNoteId(note.noteId);
|
||||
chat.clearMessages();
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Failed to create new chat:", err);
|
||||
}
|
||||
}, [chat]);
|
||||
|
||||
const handleSaveChat = useCallback(async () => {
|
||||
if (!chatNoteId) return;
|
||||
try {
|
||||
await server.post("special-notes/save-llm-chat", { llmChatNoteId: chatNoteId });
|
||||
// Create a new empty chat after saving
|
||||
const note = await dateNoteService.createLlmChat();
|
||||
if (note) {
|
||||
setChatNoteId(note.noteId);
|
||||
chat.clearMessages();
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Failed to save chat to permanent location:", err);
|
||||
}
|
||||
}, [chatNoteId, chat]);
|
||||
|
||||
const loadRecentChats = useCallback(async () => {
|
||||
try {
|
||||
const chats = await dateNoteService.getRecentLlmChats(10);
|
||||
setRecentChats(chats);
|
||||
} catch (err) {
|
||||
console.error("Failed to load recent chats:", err);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const handleSelectChat = useCallback(async (noteId: string) => {
|
||||
historyDropdownRef.current?.hide();
|
||||
|
||||
if (noteId === chatNoteId) return;
|
||||
|
||||
try {
|
||||
const blob = await server.get<{ content: string }>(`notes/${noteId}/blob`);
|
||||
if (blob?.content) {
|
||||
const parsed: LlmChatContent = JSON.parse(blob.content);
|
||||
setChatNoteId(noteId);
|
||||
chat.loadFromContent(parsed);
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Failed to load selected chat:", err);
|
||||
}
|
||||
}, [chatNoteId, chat]);
|
||||
|
||||
return (
|
||||
<RightPanelWidget
|
||||
id="sidebar-chat"
|
||||
title={t("sidebar_chat.title")}
|
||||
grow
|
||||
buttons={
|
||||
<>
|
||||
<ActionButton
|
||||
icon="bx bx-plus"
|
||||
text=""
|
||||
title={t("sidebar_chat.new_chat")}
|
||||
onClick={handleNewChat}
|
||||
/>
|
||||
<Dropdown
|
||||
text=""
|
||||
buttonClassName="bx bx-history"
|
||||
title={t("sidebar_chat.history")}
|
||||
iconAction
|
||||
hideToggleArrow
|
||||
dropdownContainerClassName="tn-dropdown-menu-scrollable"
|
||||
dropdownOptions={{ popperConfig: { strategy: "fixed" } }}
|
||||
dropdownRef={historyDropdownRef}
|
||||
onShown={loadRecentChats}
|
||||
>
|
||||
{recentChats.length === 0 ? (
|
||||
<li className="sidebar-chat-history-empty">
|
||||
{t("sidebar_chat.no_chats")}
|
||||
</li>
|
||||
) : (
|
||||
recentChats.map(chatItem => (
|
||||
<FormListItem
|
||||
key={chatItem.noteId}
|
||||
icon="bx bx-message-square-dots"
|
||||
className={chatItem.noteId === chatNoteId ? "active" : ""}
|
||||
onClick={() => handleSelectChat(chatItem.noteId)}
|
||||
>
|
||||
<div className="sidebar-chat-history-item-content">
|
||||
{chatItem.noteId === chatNoteId
|
||||
? <strong>{chatItem.title}</strong>
|
||||
: <span>{chatItem.title}</span>}
|
||||
<span className="sidebar-chat-history-date">
|
||||
{new Date(chatItem.dateModified).toLocaleDateString()}
|
||||
</span>
|
||||
</div>
|
||||
</FormListItem>
|
||||
))
|
||||
)}
|
||||
</Dropdown>
|
||||
<ActionButton
|
||||
icon="bx bx-save"
|
||||
text=""
|
||||
title={t("sidebar_chat.save_chat")}
|
||||
onClick={handleSaveChat}
|
||||
disabled={chat.messages.length === 0}
|
||||
/>
|
||||
</>
|
||||
}
|
||||
>
|
||||
<div className="sidebar-chat-container">
|
||||
<div className="sidebar-chat-messages">
|
||||
{chat.messages.length === 0 && !chat.isStreaming && (
|
||||
<NoItems
|
||||
icon="bx bx-conversation"
|
||||
text={t("sidebar_chat.empty_state")}
|
||||
/>
|
||||
)}
|
||||
{chat.messages.map(msg => (
|
||||
<ChatMessage key={msg.id} message={msg} />
|
||||
))}
|
||||
{chat.toolActivity && !chat.streamingThinking && (
|
||||
<div className="llm-chat-tool-activity">
|
||||
<span className="llm-chat-tool-spinner" />
|
||||
{chat.toolActivity}
|
||||
</div>
|
||||
)}
|
||||
{chat.isStreaming && chat.streamingThinking && (
|
||||
<ChatMessage
|
||||
message={{
|
||||
id: "streaming-thinking",
|
||||
role: "assistant",
|
||||
content: chat.streamingThinking,
|
||||
createdAt: new Date().toISOString(),
|
||||
type: "thinking"
|
||||
}}
|
||||
isStreaming
|
||||
/>
|
||||
)}
|
||||
{chat.isStreaming && chat.streamingContent && (
|
||||
<ChatMessage
|
||||
message={{
|
||||
id: "streaming",
|
||||
role: "assistant",
|
||||
content: chat.streamingContent,
|
||||
createdAt: new Date().toISOString(),
|
||||
citations: chat.pendingCitations.length > 0 ? chat.pendingCitations : undefined
|
||||
}}
|
||||
isStreaming
|
||||
/>
|
||||
)}
|
||||
<div ref={chat.messagesEndRef} />
|
||||
</div>
|
||||
<ChatInputBar
|
||||
chat={chat}
|
||||
rows={2}
|
||||
activeNoteId={activeNoteId ?? undefined}
|
||||
activeNoteTitle={activeNote?.title}
|
||||
onSubmit={handleSubmit}
|
||||
onKeyDown={handleKeyDown}
|
||||
/>
|
||||
</div>
|
||||
</RightPanelWidget>
|
||||
);
|
||||
}
|
||||
@@ -14,11 +14,12 @@ import SyncOptions from "./options/sync";
|
||||
import OtherSettings from "./options/other";
|
||||
import InternationalizationOptions from "./options/i18n";
|
||||
import AdvancedSettings from "./options/advanced";
|
||||
import LlmSettings from "./options/llm";
|
||||
import "./ContentWidget.css";
|
||||
import { t } from "../../services/i18n";
|
||||
import BackendLog from "./code/BackendLog";
|
||||
|
||||
export type OptionPages = "_optionsAppearance" | "_optionsShortcuts" | "_optionsTextNotes" | "_optionsCodeNotes" | "_optionsImages" | "_optionsSpellcheck" | "_optionsPassword" | "_optionsMFA" | "_optionsEtapi" | "_optionsBackup" | "_optionsSync" | "_optionsOther" | "_optionsLocalization" | "_optionsAdvanced";
|
||||
export type OptionPages = "_optionsAppearance" | "_optionsShortcuts" | "_optionsTextNotes" | "_optionsCodeNotes" | "_optionsImages" | "_optionsSpellcheck" | "_optionsPassword" | "_optionsMFA" | "_optionsEtapi" | "_optionsBackup" | "_optionsSync" | "_optionsOther" | "_optionsLocalization" | "_optionsAdvanced" | "_optionsLlm";
|
||||
|
||||
const CONTENT_WIDGETS: Record<OptionPages | "_backendLog", (props: TypeWidgetProps) => JSX.Element> = {
|
||||
_optionsAppearance: AppearanceSettings,
|
||||
@@ -35,6 +36,7 @@ const CONTENT_WIDGETS: Record<OptionPages | "_backendLog", (props: TypeWidgetPro
|
||||
_optionsOther: OtherSettings,
|
||||
_optionsLocalization: InternationalizationOptions,
|
||||
_optionsAdvanced: AdvancedSettings,
|
||||
_optionsLlm: LlmSettings,
|
||||
_backendLog: BackendLog
|
||||
}
|
||||
|
||||
|
||||
238
apps/client/src/widgets/type_widgets/llm_chat/ChatInputBar.tsx
Normal file
238
apps/client/src/widgets/type_widgets/llm_chat/ChatInputBar.tsx
Normal file
@@ -0,0 +1,238 @@
|
||||
import type { RefObject } from "preact";
|
||||
import { useState, useCallback } from "preact/hooks";
|
||||
|
||||
import { t } from "../../../services/i18n.js";
|
||||
import ActionButton from "../../react/ActionButton.js";
|
||||
import Button from "../../react/Button.js";
|
||||
import Dropdown from "../../react/Dropdown.js";
|
||||
import { FormDropdownDivider, FormDropdownSubmenu, FormListItem, FormListToggleableItem } from "../../react/FormList.js";
|
||||
import type { UseLlmChatReturn } from "./useLlmChat.js";
|
||||
import AddProviderModal, { type LlmProviderConfig } from "../options/llm/AddProviderModal.js";
|
||||
import options from "../../../services/options.js";
|
||||
|
||||
/** Format token count with thousands separators */
|
||||
function formatTokenCount(tokens: number): string {
|
||||
return tokens.toLocaleString();
|
||||
}
|
||||
|
||||
interface ChatInputBarProps {
|
||||
/** The chat hook result */
|
||||
chat: UseLlmChatReturn;
|
||||
/** Number of rows for the textarea (default: 3) */
|
||||
rows?: number;
|
||||
/** Current active note ID (for note context toggle) */
|
||||
activeNoteId?: string;
|
||||
/** Current active note title (for note context toggle) */
|
||||
activeNoteTitle?: string;
|
||||
/** Custom submit handler (overrides chat.handleSubmit) */
|
||||
onSubmit?: (e: Event) => void;
|
||||
/** Custom key down handler (overrides chat.handleKeyDown) */
|
||||
onKeyDown?: (e: KeyboardEvent) => void;
|
||||
/** Callback when web search toggle changes */
|
||||
onWebSearchChange?: () => void;
|
||||
/** Callback when note tools toggle changes */
|
||||
onNoteToolsChange?: () => void;
|
||||
/** Callback when extended thinking toggle changes */
|
||||
onExtendedThinkingChange?: () => void;
|
||||
/** Callback when model changes */
|
||||
onModelChange?: (model: string) => void;
|
||||
}
|
||||
|
||||
export default function ChatInputBar({
|
||||
chat,
|
||||
rows = 3,
|
||||
activeNoteId,
|
||||
activeNoteTitle,
|
||||
onSubmit,
|
||||
onKeyDown,
|
||||
onWebSearchChange,
|
||||
onNoteToolsChange,
|
||||
onExtendedThinkingChange,
|
||||
onModelChange
|
||||
}: ChatInputBarProps) {
|
||||
const [showAddProviderModal, setShowAddProviderModal] = useState(false);
|
||||
|
||||
const handleSubmit = onSubmit ?? chat.handleSubmit;
|
||||
const handleKeyDown = onKeyDown ?? chat.handleKeyDown;
|
||||
|
||||
const handleWebSearchToggle = (newValue: boolean) => {
|
||||
chat.setEnableWebSearch(newValue);
|
||||
onWebSearchChange?.();
|
||||
};
|
||||
|
||||
const handleNoteToolsToggle = (newValue: boolean) => {
|
||||
chat.setEnableNoteTools(newValue);
|
||||
onNoteToolsChange?.();
|
||||
};
|
||||
|
||||
const handleExtendedThinkingToggle = (newValue: boolean) => {
|
||||
chat.setEnableExtendedThinking(newValue);
|
||||
onExtendedThinkingChange?.();
|
||||
};
|
||||
|
||||
const handleModelSelect = (model: string) => {
|
||||
chat.setSelectedModel(model);
|
||||
onModelChange?.(model);
|
||||
};
|
||||
|
||||
const handleNoteContextToggle = () => {
|
||||
if (chat.contextNoteId) {
|
||||
chat.setContextNoteId(undefined);
|
||||
} else if (activeNoteId) {
|
||||
chat.setContextNoteId(activeNoteId);
|
||||
}
|
||||
};
|
||||
|
||||
const handleAddProvider = useCallback(async (provider: LlmProviderConfig) => {
|
||||
// Get current providers and add the new one
|
||||
const currentProviders = options.getJson("llmProviders") || [];
|
||||
const newProviders = [...currentProviders, provider];
|
||||
await options.save("llmProviders", JSON.stringify(newProviders));
|
||||
// Refresh models to pick up the new provider
|
||||
chat.refreshModels();
|
||||
}, [chat]);
|
||||
|
||||
const isNoteContextEnabled = !!chat.contextNoteId && !!activeNoteId;
|
||||
|
||||
const currentModel = chat.availableModels.find(m => m.id === chat.selectedModel);
|
||||
const currentModels = chat.availableModels.filter(m => !m.isLegacy);
|
||||
const legacyModels = chat.availableModels.filter(m => m.isLegacy);
|
||||
const contextWindow = currentModel?.contextWindow || 200000;
|
||||
const percentage = Math.min((chat.lastPromptTokens / contextWindow) * 100, 100);
|
||||
const isWarning = percentage > 75;
|
||||
const isCritical = percentage > 90;
|
||||
const pieColor = isCritical ? "var(--danger-color, #d9534f)" : isWarning ? "var(--warning-color, #f0ad4e)" : "var(--main-selection-color, #007bff)";
|
||||
|
||||
// Show setup prompt if no provider is configured
|
||||
if (!chat.isCheckingProvider && !chat.hasProvider) {
|
||||
return (
|
||||
<div className="llm-chat-no-provider">
|
||||
<div className="llm-chat-no-provider-content">
|
||||
<span className="bx bx-bot llm-chat-no-provider-icon" />
|
||||
<p>{t("llm_chat.no_provider_message")}</p>
|
||||
<Button
|
||||
text={t("llm_chat.add_provider")}
|
||||
icon="bx bx-plus"
|
||||
onClick={() => setShowAddProviderModal(true)}
|
||||
/>
|
||||
</div>
|
||||
<AddProviderModal
|
||||
show={showAddProviderModal}
|
||||
onHidden={() => setShowAddProviderModal(false)}
|
||||
onSave={handleAddProvider}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<form className="llm-chat-input-form" onSubmit={handleSubmit}>
|
||||
<textarea
|
||||
ref={chat.textareaRef as RefObject<HTMLTextAreaElement>}
|
||||
className="llm-chat-input"
|
||||
value={chat.input}
|
||||
onInput={(e) => chat.setInput((e.target as HTMLTextAreaElement).value)}
|
||||
placeholder={t("llm_chat.placeholder")}
|
||||
disabled={chat.isStreaming}
|
||||
onKeyDown={handleKeyDown}
|
||||
rows={rows}
|
||||
/>
|
||||
<div className="llm-chat-options">
|
||||
<div className="llm-chat-model-selector">
|
||||
<span className="bx bx-chip" />
|
||||
<Dropdown
|
||||
text={<>{currentModel?.name}</>}
|
||||
disabled={chat.isStreaming}
|
||||
buttonClassName="llm-chat-model-select"
|
||||
>
|
||||
{currentModels.map(model => (
|
||||
<FormListItem
|
||||
key={model.id}
|
||||
onClick={() => handleModelSelect(model.id)}
|
||||
checked={chat.selectedModel === model.id}
|
||||
>
|
||||
{model.name} <small>({model.costDescription})</small>
|
||||
</FormListItem>
|
||||
))}
|
||||
{legacyModels.length > 0 && (
|
||||
<>
|
||||
<FormDropdownDivider />
|
||||
<FormDropdownSubmenu
|
||||
icon="bx bx-history"
|
||||
title={t("llm_chat.legacy_models")}
|
||||
>
|
||||
{legacyModels.map(model => (
|
||||
<FormListItem
|
||||
key={model.id}
|
||||
onClick={() => handleModelSelect(model.id)}
|
||||
checked={chat.selectedModel === model.id}
|
||||
>
|
||||
{model.name} <small>({model.costDescription})</small>
|
||||
</FormListItem>
|
||||
))}
|
||||
</FormDropdownSubmenu>
|
||||
</>
|
||||
)}
|
||||
<FormDropdownDivider />
|
||||
<FormListToggleableItem
|
||||
icon="bx bx-globe"
|
||||
title={t("llm_chat.web_search")}
|
||||
currentValue={chat.enableWebSearch}
|
||||
onChange={handleWebSearchToggle}
|
||||
disabled={chat.isStreaming}
|
||||
/>
|
||||
<FormListToggleableItem
|
||||
icon="bx bx-note"
|
||||
title={t("llm_chat.note_tools")}
|
||||
currentValue={chat.enableNoteTools}
|
||||
onChange={handleNoteToolsToggle}
|
||||
disabled={chat.isStreaming}
|
||||
/>
|
||||
<FormListToggleableItem
|
||||
icon="bx bx-brain"
|
||||
title={t("llm_chat.extended_thinking")}
|
||||
currentValue={chat.enableExtendedThinking}
|
||||
onChange={handleExtendedThinkingToggle}
|
||||
disabled={chat.isStreaming}
|
||||
/>
|
||||
</Dropdown>
|
||||
{activeNoteId && activeNoteTitle && (
|
||||
<Button
|
||||
text={activeNoteTitle}
|
||||
icon={isNoteContextEnabled ? "bx-file" : "bx-hide"}
|
||||
kind="lowProfile"
|
||||
size="micro"
|
||||
className={`llm-chat-note-context ${isNoteContextEnabled ? "active" : ""}`}
|
||||
onClick={handleNoteContextToggle}
|
||||
disabled={chat.isStreaming}
|
||||
title={isNoteContextEnabled
|
||||
? t("llm_chat.note_context_enabled", { title: activeNoteTitle })
|
||||
: t("llm_chat.note_context_disabled")}
|
||||
/>
|
||||
)}
|
||||
{chat.lastPromptTokens > 0 && (
|
||||
<div
|
||||
className="llm-chat-context-indicator"
|
||||
title={`${formatTokenCount(chat.lastPromptTokens)} / ${formatTokenCount(contextWindow)} ${t("llm_chat.tokens")}`}
|
||||
>
|
||||
<div
|
||||
className="llm-chat-context-pie"
|
||||
style={{
|
||||
background: `conic-gradient(${pieColor} ${percentage}%, var(--accented-background-color) ${percentage}%)`
|
||||
}}
|
||||
/>
|
||||
<span className="llm-chat-context-text">{t("llm_chat.context_used", { percentage: percentage.toFixed(0) })}</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<ActionButton
|
||||
icon={chat.isStreaming ? "bx bx-loader-alt bx-spin" : "bx bx-send"}
|
||||
text={chat.isStreaming ? t("llm_chat.sending") : t("llm_chat.send")}
|
||||
onClick={handleSubmit}
|
||||
disabled={chat.isStreaming || !chat.input.trim()}
|
||||
className="llm-chat-send-btn"
|
||||
/>
|
||||
</div>
|
||||
</form>
|
||||
);
|
||||
}
|
||||
211
apps/client/src/widgets/type_widgets/llm_chat/ChatMessage.tsx
Normal file
211
apps/client/src/widgets/type_widgets/llm_chat/ChatMessage.tsx
Normal file
@@ -0,0 +1,211 @@
|
||||
import type { LlmCitation, LlmUsage } from "@triliumnext/commons";
|
||||
import { useMemo } from "preact/hooks";
|
||||
import { marked } from "marked";
|
||||
import { t } from "../../../services/i18n.js";
|
||||
import "./LlmChat.css";
|
||||
|
||||
// Configure marked for safe rendering
|
||||
marked.setOptions({
|
||||
breaks: true, // Convert \n to <br>
|
||||
gfm: true // GitHub Flavored Markdown
|
||||
});
|
||||
|
||||
type MessageType = "message" | "error" | "thinking";
|
||||
|
||||
interface ToolCall {
|
||||
id: string;
|
||||
toolName: string;
|
||||
input: Record<string, unknown>;
|
||||
result?: string;
|
||||
}
|
||||
|
||||
interface StoredMessage {
|
||||
id: string;
|
||||
role: "user" | "assistant" | "system";
|
||||
content: string;
|
||||
createdAt: string;
|
||||
citations?: LlmCitation[];
|
||||
/** Message type for special rendering. Defaults to "message" if omitted. */
|
||||
type?: MessageType;
|
||||
/** Tool calls made during this response */
|
||||
toolCalls?: ToolCall[];
|
||||
/** Token usage for this response */
|
||||
usage?: LlmUsage;
|
||||
}
|
||||
|
||||
interface Props {
|
||||
message: StoredMessage;
|
||||
isStreaming?: boolean;
|
||||
}
|
||||
|
||||
export default function ChatMessage({ message, isStreaming }: Props) {
|
||||
const roleLabel = message.role === "user" ? "You" : "Assistant";
|
||||
const isError = message.type === "error";
|
||||
const isThinking = message.type === "thinking";
|
||||
|
||||
// Render markdown for assistant messages (not errors or thinking)
|
||||
const renderedContent = useMemo(() => {
|
||||
if (message.role === "assistant" && !isError && !isThinking) {
|
||||
return marked.parse(message.content) as string;
|
||||
}
|
||||
return null;
|
||||
}, [message.content, message.role, isError, isThinking]);
|
||||
|
||||
const messageClasses = [
|
||||
"llm-chat-message",
|
||||
`llm-chat-message-${message.role}`,
|
||||
isError && "llm-chat-message-error",
|
||||
isThinking && "llm-chat-message-thinking"
|
||||
].filter(Boolean).join(" ");
|
||||
|
||||
// Render thinking messages in a collapsible details element
|
||||
if (isThinking) {
|
||||
return (
|
||||
<details className={messageClasses}>
|
||||
<summary className="llm-chat-thinking-summary">
|
||||
<span className="bx bx-brain" />
|
||||
{t("llm_chat.thought_process")}
|
||||
</summary>
|
||||
<div className="llm-chat-message-content llm-chat-thinking-content">
|
||||
{message.content}
|
||||
{isStreaming && <span className="llm-chat-cursor" />}
|
||||
</div>
|
||||
</details>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={messageClasses}>
|
||||
<div className="llm-chat-message-role">
|
||||
{isError ? "Error" : roleLabel}
|
||||
</div>
|
||||
<div className="llm-chat-message-content">
|
||||
{message.role === "assistant" && !isError ? (
|
||||
<>
|
||||
<div
|
||||
className="llm-chat-markdown"
|
||||
dangerouslySetInnerHTML={{ __html: renderedContent || "" }}
|
||||
/>
|
||||
{isStreaming && <span className="llm-chat-cursor" />}
|
||||
</>
|
||||
) : (
|
||||
message.content
|
||||
)}
|
||||
</div>
|
||||
{message.toolCalls && message.toolCalls.length > 0 && (
|
||||
<details className="llm-chat-tool-calls">
|
||||
<summary className="llm-chat-tool-calls-summary">
|
||||
<span className="bx bx-wrench" />
|
||||
{t("llm_chat.tool_calls", { count: message.toolCalls.length })}
|
||||
</summary>
|
||||
<div className="llm-chat-tool-calls-list">
|
||||
{message.toolCalls.map((tool) => (
|
||||
<div key={tool.id} className="llm-chat-tool-call">
|
||||
<div className="llm-chat-tool-call-name">
|
||||
{tool.toolName}
|
||||
</div>
|
||||
<div className="llm-chat-tool-call-input">
|
||||
<strong>{t("llm_chat.input")}:</strong>
|
||||
<pre>{JSON.stringify(tool.input, null, 2)}</pre>
|
||||
</div>
|
||||
{tool.result && (
|
||||
<div className="llm-chat-tool-call-result">
|
||||
<strong>{t("llm_chat.result")}:</strong>
|
||||
<pre>{(() => {
|
||||
if (typeof tool.result === "string" && (tool.result.startsWith("{") || tool.result.startsWith("["))) {
|
||||
try {
|
||||
return JSON.stringify(JSON.parse(tool.result), null, 2);
|
||||
} catch {
|
||||
return tool.result;
|
||||
}
|
||||
}
|
||||
return tool.result;
|
||||
})()}</pre>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</details>
|
||||
)}
|
||||
{message.citations && message.citations.length > 0 && (
|
||||
<div className="llm-chat-citations">
|
||||
<div className="llm-chat-citations-label">
|
||||
<span className="bx bx-link" />
|
||||
{t("llm_chat.sources")}
|
||||
</div>
|
||||
<ul className="llm-chat-citations-list">
|
||||
{message.citations.map((citation, idx) => {
|
||||
// Determine display text: title, URL hostname, or cited text
|
||||
let displayText = citation.title;
|
||||
if (!displayText && citation.url) {
|
||||
try {
|
||||
displayText = new URL(citation.url).hostname;
|
||||
} catch {
|
||||
displayText = citation.url;
|
||||
}
|
||||
}
|
||||
if (!displayText) {
|
||||
displayText = citation.citedText?.slice(0, 50) || `Source ${idx + 1}`;
|
||||
}
|
||||
|
||||
return (
|
||||
<li key={idx}>
|
||||
{citation.url ? (
|
||||
<a
|
||||
href={citation.url}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
title={citation.citedText || citation.url}
|
||||
>
|
||||
{displayText}
|
||||
</a>
|
||||
) : (
|
||||
<span title={citation.citedText}>
|
||||
{displayText}
|
||||
</span>
|
||||
)}
|
||||
</li>
|
||||
);
|
||||
})}
|
||||
</ul>
|
||||
</div>
|
||||
)}
|
||||
{message.usage && typeof message.usage.promptTokens === "number" && (
|
||||
<div className="llm-chat-usage">
|
||||
<span className="bx bx-chip" />
|
||||
<span className="llm-chat-usage-text">
|
||||
{message.usage.model && message.usage.cost != null
|
||||
? t("llm_chat.tokens_used_with_model_and_cost", {
|
||||
model: message.usage.model,
|
||||
prompt: message.usage.promptTokens.toLocaleString(),
|
||||
completion: message.usage.completionTokens.toLocaleString(),
|
||||
total: message.usage.totalTokens.toLocaleString(),
|
||||
cost: message.usage.cost.toFixed(4)
|
||||
})
|
||||
: message.usage.model
|
||||
? t("llm_chat.tokens_used_with_model", {
|
||||
model: message.usage.model,
|
||||
prompt: message.usage.promptTokens.toLocaleString(),
|
||||
completion: message.usage.completionTokens.toLocaleString(),
|
||||
total: message.usage.totalTokens.toLocaleString()
|
||||
})
|
||||
: message.usage.cost != null
|
||||
? t("llm_chat.tokens_used_with_cost", {
|
||||
prompt: message.usage.promptTokens.toLocaleString(),
|
||||
completion: message.usage.completionTokens.toLocaleString(),
|
||||
total: message.usage.totalTokens.toLocaleString(),
|
||||
cost: message.usage.cost.toFixed(4)
|
||||
})
|
||||
: t("llm_chat.tokens_used", {
|
||||
prompt: message.usage.promptTokens.toLocaleString(),
|
||||
completion: message.usage.completionTokens.toLocaleString(),
|
||||
total: message.usage.totalTokens.toLocaleString()
|
||||
})
|
||||
}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
592
apps/client/src/widgets/type_widgets/llm_chat/LlmChat.css
Normal file
592
apps/client/src/widgets/type_widgets/llm_chat/LlmChat.css
Normal file
@@ -0,0 +1,592 @@
|
||||
.llm-chat-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
height: 100%;
|
||||
padding: 1rem;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
.llm-chat-messages {
|
||||
flex: 1;
|
||||
overflow-y: auto;
|
||||
padding-bottom: 1rem;
|
||||
}
|
||||
|
||||
.llm-chat-message {
|
||||
margin-bottom: 1rem;
|
||||
padding: 0.75rem 1rem;
|
||||
border-radius: 8px;
|
||||
max-width: 85%;
|
||||
user-select: text;
|
||||
}
|
||||
|
||||
.llm-chat-message-user {
|
||||
background: var(--accented-background-color);
|
||||
margin-left: auto;
|
||||
}
|
||||
|
||||
.llm-chat-message-assistant {
|
||||
background: var(--main-background-color);
|
||||
border: 1px solid var(--main-border-color);
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
.llm-chat-message-role {
|
||||
font-weight: 600;
|
||||
margin-bottom: 0.25rem;
|
||||
font-size: 0.8rem;
|
||||
color: var(--muted-text-color);
|
||||
}
|
||||
|
||||
.llm-chat-message-content {
|
||||
word-wrap: break-word;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
/* Preserve whitespace only for user messages (plain text) */
|
||||
.llm-chat-message-user .llm-chat-message-content {
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
|
||||
.llm-chat-cursor {
|
||||
display: inline-block;
|
||||
width: 8px;
|
||||
height: 1.1em;
|
||||
background: currentColor;
|
||||
margin-left: 2px;
|
||||
vertical-align: text-bottom;
|
||||
animation: llm-chat-blink 1s infinite;
|
||||
}
|
||||
|
||||
@keyframes llm-chat-blink {
|
||||
0%, 50% { opacity: 1; }
|
||||
51%, 100% { opacity: 0; }
|
||||
}
|
||||
|
||||
/* Tool activity indicator */
|
||||
.llm-chat-tool-activity {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
padding: 0.5rem 1rem;
|
||||
margin-bottom: 1rem;
|
||||
border-radius: 8px;
|
||||
background: var(--accented-background-color);
|
||||
color: var(--muted-text-color);
|
||||
font-size: 0.9rem;
|
||||
max-width: 85%;
|
||||
}
|
||||
|
||||
.llm-chat-tool-spinner {
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
border: 2px solid var(--muted-text-color);
|
||||
border-top-color: transparent;
|
||||
border-radius: 50%;
|
||||
animation: llm-chat-spin 0.8s linear infinite;
|
||||
}
|
||||
|
||||
@keyframes llm-chat-spin {
|
||||
to { transform: rotate(360deg); }
|
||||
}
|
||||
|
||||
/* Citations */
|
||||
.llm-chat-citations {
|
||||
margin-top: 0.75rem;
|
||||
padding-top: 0.75rem;
|
||||
border-top: 1px solid var(--main-border-color);
|
||||
}
|
||||
|
||||
.llm-chat-citations-label {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.25rem;
|
||||
font-size: 0.8rem;
|
||||
font-weight: 600;
|
||||
color: var(--muted-text-color);
|
||||
margin-bottom: 0.25rem;
|
||||
}
|
||||
|
||||
.llm-chat-citations-list {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
list-style: none;
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 0.5rem;
|
||||
}
|
||||
|
||||
.llm-chat-citations-list li {
|
||||
font-size: 0.8rem;
|
||||
}
|
||||
|
||||
.llm-chat-citations-list a {
|
||||
color: var(--link-color, #007bff);
|
||||
text-decoration: none;
|
||||
padding: 0.125rem 0.5rem;
|
||||
background: var(--accented-background-color);
|
||||
border-radius: 4px;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.llm-chat-citations-list a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
/* Error */
|
||||
.llm-chat-error {
|
||||
padding: 0.75rem 1rem;
|
||||
margin-bottom: 1rem;
|
||||
border-radius: 8px;
|
||||
background: var(--danger-background-color, #fee);
|
||||
border: 1px solid var(--danger-border-color, #fcc);
|
||||
color: var(--danger-text-color, #c00);
|
||||
user-select: text;
|
||||
}
|
||||
|
||||
/* Error message (persisted in conversation) */
|
||||
.llm-chat-message-error {
|
||||
background: var(--danger-background-color, #fee);
|
||||
border: 1px solid var(--danger-border-color, #fcc);
|
||||
color: var(--danger-text-color, #c00);
|
||||
}
|
||||
|
||||
.llm-chat-message-error .llm-chat-message-role {
|
||||
color: var(--danger-text-color, #c00);
|
||||
}
|
||||
|
||||
/* Thinking message (collapsible) */
|
||||
.llm-chat-message-thinking {
|
||||
background: var(--accented-background-color);
|
||||
border: 1px dashed var(--main-border-color);
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.llm-chat-thinking-summary {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
font-size: 0.85rem;
|
||||
font-weight: 500;
|
||||
color: var(--muted-text-color);
|
||||
padding: 0.25rem 0;
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
.llm-chat-thinking-summary::-webkit-details-marker {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.llm-chat-thinking-summary::before {
|
||||
content: "▶";
|
||||
font-size: 0.7em;
|
||||
transition: transform 0.2s ease;
|
||||
}
|
||||
|
||||
.llm-chat-message-thinking[open] .llm-chat-thinking-summary::before {
|
||||
transform: rotate(90deg);
|
||||
}
|
||||
|
||||
.llm-chat-thinking-summary .bx {
|
||||
font-size: 1rem;
|
||||
}
|
||||
|
||||
.llm-chat-thinking-content {
|
||||
margin-top: 0.5rem;
|
||||
padding-top: 0.5rem;
|
||||
border-top: 1px solid var(--main-border-color);
|
||||
font-size: 0.9rem;
|
||||
color: var(--muted-text-color);
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
|
||||
/* Input form */
|
||||
.llm-chat-input-form {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 0.5rem;
|
||||
padding-top: 1rem;
|
||||
border-top: 1px solid var(--main-border-color);
|
||||
}
|
||||
|
||||
.llm-chat-input {
|
||||
flex: 1;
|
||||
min-height: 60px;
|
||||
max-height: 200px;
|
||||
resize: vertical;
|
||||
padding: 0.75rem;
|
||||
border: 1px solid var(--main-border-color);
|
||||
border-radius: 8px;
|
||||
font-family: inherit;
|
||||
font-size: inherit;
|
||||
background: var(--main-background-color);
|
||||
color: var(--main-text-color);
|
||||
}
|
||||
|
||||
.llm-chat-input:focus {
|
||||
outline: none;
|
||||
border-color: var(--main-selection-color);
|
||||
box-shadow: 0 0 0 2px var(--main-selection-color-soft, rgba(0, 123, 255, 0.25));
|
||||
}
|
||||
|
||||
.llm-chat-input:disabled {
|
||||
opacity: 0.6;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
/* Options row */
|
||||
.llm-chat-options {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.75rem;
|
||||
}
|
||||
|
||||
.llm-chat-send-btn {
|
||||
margin-left: auto;
|
||||
font-size: 1.25rem;
|
||||
}
|
||||
|
||||
.llm-chat-send-btn.disabled {
|
||||
opacity: 0.4;
|
||||
}
|
||||
|
||||
/* Model selector */
|
||||
.llm-chat-model-selector {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.375rem;
|
||||
font-size: 0.85rem;
|
||||
color: var(--muted-text-color);
|
||||
}
|
||||
|
||||
.llm-chat-model-selector .bx {
|
||||
font-size: 1rem;
|
||||
}
|
||||
|
||||
.llm-chat-model-selector .dropdown {
|
||||
display: flex;
|
||||
|
||||
small {
|
||||
margin-left: 0.5em;
|
||||
color: var(--muted-text-color);
|
||||
}
|
||||
|
||||
/* Position legacy models submenu to open upward */
|
||||
.dropdown-submenu .dropdown-menu {
|
||||
bottom: 0;
|
||||
top: auto;
|
||||
}
|
||||
}
|
||||
|
||||
.llm-chat-model-select.select-button {
|
||||
padding: 0.25rem 0.5rem;
|
||||
border: 1px solid var(--main-border-color);
|
||||
border-radius: 4px;
|
||||
background: var(--main-background-color);
|
||||
color: var(--main-text-color);
|
||||
font-family: inherit;
|
||||
font-size: 0.85rem;
|
||||
cursor: pointer;
|
||||
min-width: 140px;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.llm-chat-model-select.select-button:focus {
|
||||
outline: none;
|
||||
border-color: var(--main-selection-color);
|
||||
}
|
||||
|
||||
.llm-chat-model-select.select-button:disabled {
|
||||
opacity: 0.5;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
/* Note context toggle */
|
||||
.llm-chat-note-context.tn-low-profile {
|
||||
max-width: 150px;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
opacity: 0.5;
|
||||
background: none;
|
||||
border: none;
|
||||
}
|
||||
|
||||
.llm-chat-note-context.tn-low-profile:hover:not(:disabled) {
|
||||
opacity: 0.8;
|
||||
background: none;
|
||||
}
|
||||
|
||||
.llm-chat-note-context.tn-low-profile.active {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
/* Markdown styles */
|
||||
.llm-chat-markdown {
|
||||
line-height: 1.6;
|
||||
}
|
||||
|
||||
.llm-chat-markdown p {
|
||||
margin: 0 0 0.75em 0;
|
||||
}
|
||||
|
||||
.llm-chat-markdown p:last-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
.llm-chat-markdown h1,
|
||||
.llm-chat-markdown h2,
|
||||
.llm-chat-markdown h3,
|
||||
.llm-chat-markdown h4,
|
||||
.llm-chat-markdown h5,
|
||||
.llm-chat-markdown h6 {
|
||||
margin: 1em 0 0.5em 0;
|
||||
font-weight: 600;
|
||||
line-height: 1.3;
|
||||
}
|
||||
|
||||
.llm-chat-markdown h1:first-child,
|
||||
.llm-chat-markdown h2:first-child,
|
||||
.llm-chat-markdown h3:first-child {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
.llm-chat-markdown h1 { font-size: 1.4em; }
|
||||
.llm-chat-markdown h2 { font-size: 1.25em; }
|
||||
.llm-chat-markdown h3 { font-size: 1.1em; }
|
||||
|
||||
.llm-chat-markdown ul,
|
||||
.llm-chat-markdown ol {
|
||||
margin: 0.5em 0;
|
||||
padding-left: 1.5em;
|
||||
}
|
||||
|
||||
.llm-chat-markdown li {
|
||||
margin: 0.25em 0;
|
||||
}
|
||||
|
||||
.llm-chat-markdown code {
|
||||
background: var(--accented-background-color);
|
||||
padding: 0.15em 0.4em;
|
||||
border-radius: 4px;
|
||||
font-family: var(--monospace-font-family, monospace);
|
||||
font-size: 0.9em;
|
||||
}
|
||||
|
||||
.llm-chat-markdown pre {
|
||||
background: var(--accented-background-color);
|
||||
padding: 0.75em 1em;
|
||||
border-radius: 6px;
|
||||
overflow-x: auto;
|
||||
margin: 0.75em 0;
|
||||
}
|
||||
|
||||
.llm-chat-markdown pre code {
|
||||
background: none;
|
||||
padding: 0;
|
||||
font-size: 0.85em;
|
||||
}
|
||||
|
||||
.llm-chat-markdown blockquote {
|
||||
margin: 0.75em 0;
|
||||
padding: 0.5em 1em;
|
||||
border-left: 3px solid var(--main-border-color);
|
||||
background: var(--accented-background-color);
|
||||
}
|
||||
|
||||
.llm-chat-markdown blockquote p {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.llm-chat-markdown a {
|
||||
color: var(--link-color, #007bff);
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.llm-chat-markdown a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.llm-chat-markdown hr {
|
||||
border: none;
|
||||
border-top: 1px solid var(--main-border-color);
|
||||
margin: 1em 0;
|
||||
}
|
||||
|
||||
.llm-chat-markdown table {
|
||||
border-collapse: collapse;
|
||||
width: 100%;
|
||||
margin: 0.75em 0;
|
||||
}
|
||||
|
||||
.llm-chat-markdown th,
|
||||
.llm-chat-markdown td {
|
||||
border: 1px solid var(--main-border-color);
|
||||
padding: 0.5em 0.75em;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.llm-chat-markdown th {
|
||||
background: var(--accented-background-color);
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.llm-chat-markdown strong {
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.llm-chat-markdown em {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
/* Tool calls display */
|
||||
.llm-chat-tool-calls {
|
||||
margin-top: 0.75rem;
|
||||
padding-top: 0.75rem;
|
||||
border-top: 1px solid var(--main-border-color);
|
||||
}
|
||||
|
||||
.llm-chat-tool-calls-summary {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
font-size: 0.85rem;
|
||||
font-weight: 500;
|
||||
color: var(--muted-text-color);
|
||||
padding: 0.25rem 0;
|
||||
cursor: pointer;
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
.llm-chat-tool-calls-summary::-webkit-details-marker {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.llm-chat-tool-calls-summary::before {
|
||||
content: "▶";
|
||||
font-size: 0.7em;
|
||||
transition: transform 0.2s ease;
|
||||
}
|
||||
|
||||
.llm-chat-tool-calls[open] .llm-chat-tool-calls-summary::before {
|
||||
transform: rotate(90deg);
|
||||
}
|
||||
|
||||
.llm-chat-tool-calls-summary .bx {
|
||||
font-size: 1rem;
|
||||
}
|
||||
|
||||
.llm-chat-tool-calls-list {
|
||||
margin-top: 0.5rem;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 0.75rem;
|
||||
}
|
||||
|
||||
.llm-chat-tool-call {
|
||||
background: var(--accented-background-color);
|
||||
border-radius: 6px;
|
||||
padding: 0.75rem;
|
||||
font-size: 0.85rem;
|
||||
}
|
||||
|
||||
.llm-chat-tool-call-name {
|
||||
font-weight: 600;
|
||||
margin-bottom: 0.5rem;
|
||||
color: var(--main-text-color);
|
||||
font-family: var(--monospace-font-family, monospace);
|
||||
}
|
||||
|
||||
.llm-chat-tool-call-input,
|
||||
.llm-chat-tool-call-result {
|
||||
margin-top: 0.5rem;
|
||||
}
|
||||
|
||||
.llm-chat-tool-call-input strong,
|
||||
.llm-chat-tool-call-result strong {
|
||||
display: block;
|
||||
font-size: 0.75rem;
|
||||
color: var(--muted-text-color);
|
||||
margin-bottom: 0.25rem;
|
||||
}
|
||||
|
||||
.llm-chat-tool-call pre {
|
||||
margin: 0;
|
||||
padding: 0.5rem;
|
||||
background: var(--main-background-color);
|
||||
border-radius: 4px;
|
||||
overflow-x: auto;
|
||||
font-size: 0.8rem;
|
||||
font-family: var(--monospace-font-family, monospace);
|
||||
max-height: 200px;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
/* Token usage display */
|
||||
.llm-chat-usage {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.375rem;
|
||||
margin-top: 0.75rem;
|
||||
padding-top: 0.5rem;
|
||||
border-top: 1px solid var(--main-border-color);
|
||||
font-size: 0.75rem;
|
||||
color: var(--muted-text-color);
|
||||
}
|
||||
|
||||
.llm-chat-usage .bx {
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
.llm-chat-usage-text {
|
||||
font-family: var(--monospace-font-family, monospace);
|
||||
}
|
||||
|
||||
/* Context window indicator */
|
||||
.llm-chat-context-indicator {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.375rem;
|
||||
margin-left: 0.5rem;
|
||||
cursor: help;
|
||||
}
|
||||
|
||||
.llm-chat-context-pie {
|
||||
width: 14px;
|
||||
height: 14px;
|
||||
border-radius: 50%;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.llm-chat-context-text {
|
||||
font-size: 0.75rem;
|
||||
color: var(--muted-text-color);
|
||||
}
|
||||
|
||||
/* No provider state */
|
||||
.llm-chat-no-provider {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
padding: 1rem;
|
||||
border-top: 1px solid var(--main-border-color);
|
||||
}
|
||||
|
||||
.llm-chat-no-provider-content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
gap: 0.75rem;
|
||||
text-align: center;
|
||||
color: var(--muted-text-color);
|
||||
}
|
||||
|
||||
.llm-chat-no-provider-icon {
|
||||
font-size: 2rem;
|
||||
opacity: 0.5;
|
||||
}
|
||||
|
||||
.llm-chat-no-provider-content p {
|
||||
margin: 0;
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
110
apps/client/src/widgets/type_widgets/llm_chat/LlmChat.tsx
Normal file
110
apps/client/src/widgets/type_widgets/llm_chat/LlmChat.tsx
Normal file
@@ -0,0 +1,110 @@
|
||||
import { useCallback, useEffect, useState } from "preact/hooks";
|
||||
|
||||
import { t } from "../../../services/i18n.js";
|
||||
import NoItems from "../../react/NoItems.js";
|
||||
import { useEditorSpacedUpdate } from "../../react/hooks.js";
|
||||
import { TypeWidgetProps } from "../type_widget.js";
|
||||
import ChatInputBar from "./ChatInputBar.js";
|
||||
import ChatMessage from "./ChatMessage.js";
|
||||
import type { LlmChatContent } from "./llm_chat_types.js";
|
||||
import { useLlmChat } from "./useLlmChat.js";
|
||||
import "./LlmChat.css";
|
||||
|
||||
export default function LlmChat({ note, ntxId, noteContext }: TypeWidgetProps) {
|
||||
const [shouldSave, setShouldSave] = useState(false);
|
||||
|
||||
const chat = useLlmChat(
|
||||
// onMessagesChange - trigger save
|
||||
() => setShouldSave(true),
|
||||
{ defaultEnableNoteTools: false, supportsExtendedThinking: true }
|
||||
);
|
||||
|
||||
const spacedUpdate = useEditorSpacedUpdate({
|
||||
note,
|
||||
noteType: "llmChat",
|
||||
noteContext,
|
||||
getData: () => {
|
||||
const content = chat.getContent();
|
||||
return { content: JSON.stringify(content) };
|
||||
},
|
||||
onContentChange: (content) => {
|
||||
if (!content) {
|
||||
chat.clearMessages();
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const parsed: LlmChatContent = JSON.parse(content);
|
||||
chat.loadFromContent(parsed);
|
||||
} catch (e) {
|
||||
console.error("Failed to parse LLM chat content:", e);
|
||||
chat.clearMessages();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Trigger save after state updates when shouldSave is set
|
||||
useEffect(() => {
|
||||
if (shouldSave) {
|
||||
setShouldSave(false);
|
||||
spacedUpdate.scheduleUpdate();
|
||||
}
|
||||
}, [shouldSave, spacedUpdate]);
|
||||
|
||||
const triggerSave = useCallback(() => {
|
||||
setShouldSave(true);
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div className="llm-chat-container">
|
||||
<div className="llm-chat-messages">
|
||||
{chat.messages.length === 0 && !chat.isStreaming && (
|
||||
<NoItems
|
||||
icon="bx bx-conversation"
|
||||
text={t("llm_chat.empty_state")}
|
||||
/>
|
||||
)}
|
||||
{chat.messages.map(msg => (
|
||||
<ChatMessage key={msg.id} message={msg} />
|
||||
))}
|
||||
{chat.toolActivity && !chat.streamingThinking && (
|
||||
<div className="llm-chat-tool-activity">
|
||||
<span className="llm-chat-tool-spinner" />
|
||||
{chat.toolActivity}
|
||||
</div>
|
||||
)}
|
||||
{chat.isStreaming && chat.streamingThinking && (
|
||||
<ChatMessage
|
||||
message={{
|
||||
id: "streaming-thinking",
|
||||
role: "assistant",
|
||||
content: chat.streamingThinking,
|
||||
createdAt: new Date().toISOString(),
|
||||
type: "thinking"
|
||||
}}
|
||||
isStreaming
|
||||
/>
|
||||
)}
|
||||
{chat.isStreaming && chat.streamingContent && (
|
||||
<ChatMessage
|
||||
message={{
|
||||
id: "streaming",
|
||||
role: "assistant",
|
||||
content: chat.streamingContent,
|
||||
createdAt: new Date().toISOString(),
|
||||
citations: chat.pendingCitations.length > 0 ? chat.pendingCitations : undefined
|
||||
}}
|
||||
isStreaming
|
||||
/>
|
||||
)}
|
||||
<div ref={chat.messagesEndRef} />
|
||||
</div>
|
||||
<ChatInputBar
|
||||
chat={chat}
|
||||
onWebSearchChange={triggerSave}
|
||||
onNoteToolsChange={triggerSave}
|
||||
onExtendedThinkingChange={triggerSave}
|
||||
onModelChange={triggerSave}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
import type { LlmCitation, LlmUsage } from "@triliumnext/commons";
|
||||
|
||||
export type MessageType = "message" | "error" | "thinking";
|
||||
|
||||
export interface ToolCall {
|
||||
id: string;
|
||||
toolName: string;
|
||||
input: Record<string, unknown>;
|
||||
result?: string;
|
||||
}
|
||||
|
||||
export interface StoredMessage {
|
||||
id: string;
|
||||
role: "user" | "assistant" | "system";
|
||||
content: string;
|
||||
createdAt: string;
|
||||
citations?: LlmCitation[];
|
||||
/** Message type for special rendering. Defaults to "message" if omitted. */
|
||||
type?: MessageType;
|
||||
/** Tool calls made during this response */
|
||||
toolCalls?: ToolCall[];
|
||||
/** Token usage for this response */
|
||||
usage?: LlmUsage;
|
||||
}
|
||||
|
||||
export interface LlmChatContent {
|
||||
version: 1;
|
||||
messages: StoredMessage[];
|
||||
selectedModel?: string;
|
||||
enableWebSearch?: boolean;
|
||||
enableNoteTools?: boolean;
|
||||
enableExtendedThinking?: boolean;
|
||||
}
|
||||
369
apps/client/src/widgets/type_widgets/llm_chat/useLlmChat.ts
Normal file
369
apps/client/src/widgets/type_widgets/llm_chat/useLlmChat.ts
Normal file
@@ -0,0 +1,369 @@
|
||||
import type { LlmCitation, LlmMessage, LlmModelInfo, LlmUsage } from "@triliumnext/commons";
|
||||
import { useCallback, useEffect, useRef, useState } from "preact/hooks";
|
||||
|
||||
import { t } from "../../../services/i18n.js";
|
||||
import { getAvailableModels, streamChatCompletion } from "../../../services/llm_chat.js";
|
||||
import { randomString } from "../../../services/utils.js";
|
||||
import type { LlmChatContent, StoredMessage, ToolCall } from "./llm_chat_types.js";
|
||||
|
||||
export interface ModelOption extends LlmModelInfo {
|
||||
costDescription?: string;
|
||||
}
|
||||
|
||||
export interface LlmChatOptions {
|
||||
/** Default value for enableNoteTools */
|
||||
defaultEnableNoteTools?: boolean;
|
||||
/** Whether extended thinking is supported */
|
||||
supportsExtendedThinking?: boolean;
|
||||
/** Initial context note ID (the note the user is viewing) */
|
||||
contextNoteId?: string;
|
||||
}
|
||||
|
||||
export interface UseLlmChatReturn {
|
||||
// State
|
||||
messages: StoredMessage[];
|
||||
input: string;
|
||||
isStreaming: boolean;
|
||||
streamingContent: string;
|
||||
streamingThinking: string;
|
||||
toolActivity: string | null;
|
||||
pendingCitations: LlmCitation[];
|
||||
availableModels: ModelOption[];
|
||||
selectedModel: string;
|
||||
enableWebSearch: boolean;
|
||||
enableNoteTools: boolean;
|
||||
enableExtendedThinking: boolean;
|
||||
contextNoteId: string | undefined;
|
||||
lastPromptTokens: number;
|
||||
messagesEndRef: React.RefObject<HTMLDivElement>;
|
||||
textareaRef: React.RefObject<HTMLTextAreaElement>;
|
||||
/** Whether a provider is configured and available */
|
||||
hasProvider: boolean;
|
||||
/** Whether we're still checking for providers */
|
||||
isCheckingProvider: boolean;
|
||||
|
||||
// Setters
|
||||
setInput: (value: string) => void;
|
||||
setMessages: (messages: StoredMessage[]) => void;
|
||||
setSelectedModel: (model: string) => void;
|
||||
setEnableWebSearch: (value: boolean) => void;
|
||||
setEnableNoteTools: (value: boolean) => void;
|
||||
setEnableExtendedThinking: (value: boolean) => void;
|
||||
setContextNoteId: (noteId: string | undefined) => void;
|
||||
|
||||
// Actions
|
||||
handleSubmit: (e: Event) => Promise<void>;
|
||||
handleKeyDown: (e: KeyboardEvent) => void;
|
||||
loadFromContent: (content: LlmChatContent) => void;
|
||||
getContent: () => LlmChatContent;
|
||||
clearMessages: () => void;
|
||||
/** Refresh the provider/models list */
|
||||
refreshModels: () => void;
|
||||
}
|
||||
|
||||
export function useLlmChat(
|
||||
onMessagesChange?: (messages: StoredMessage[]) => void,
|
||||
options: LlmChatOptions = {}
|
||||
): UseLlmChatReturn {
|
||||
const { defaultEnableNoteTools = false, supportsExtendedThinking = false, contextNoteId: initialContextNoteId } = options;
|
||||
|
||||
const [messages, setMessagesInternal] = useState<StoredMessage[]>([]);
|
||||
const [input, setInput] = useState("");
|
||||
const [isStreaming, setIsStreaming] = useState(false);
|
||||
const [streamingContent, setStreamingContent] = useState("");
|
||||
const [streamingThinking, setStreamingThinking] = useState("");
|
||||
const [toolActivity, setToolActivity] = useState<string | null>(null);
|
||||
const [pendingCitations, setPendingCitations] = useState<LlmCitation[]>([]);
|
||||
const [availableModels, setAvailableModels] = useState<ModelOption[]>([]);
|
||||
const [selectedModel, setSelectedModel] = useState<string>("");
|
||||
const [enableWebSearch, setEnableWebSearch] = useState(true);
|
||||
const [enableNoteTools, setEnableNoteTools] = useState(defaultEnableNoteTools);
|
||||
const [enableExtendedThinking, setEnableExtendedThinking] = useState(false);
|
||||
const [contextNoteId, setContextNoteId] = useState<string | undefined>(initialContextNoteId);
|
||||
const [lastPromptTokens, setLastPromptTokens] = useState<number>(0);
|
||||
const [hasProvider, setHasProvider] = useState<boolean>(true); // Assume true initially
|
||||
const [isCheckingProvider, setIsCheckingProvider] = useState<boolean>(true);
|
||||
const messagesEndRef = useRef<HTMLDivElement>(null);
|
||||
const textareaRef = useRef<HTMLTextAreaElement>(null);
|
||||
|
||||
// Refs to get fresh values in getContent (avoids stale closures)
|
||||
const messagesRef = useRef(messages);
|
||||
messagesRef.current = messages;
|
||||
const selectedModelRef = useRef(selectedModel);
|
||||
selectedModelRef.current = selectedModel;
|
||||
const enableWebSearchRef = useRef(enableWebSearch);
|
||||
enableWebSearchRef.current = enableWebSearch;
|
||||
const enableNoteToolsRef = useRef(enableNoteTools);
|
||||
enableNoteToolsRef.current = enableNoteTools;
|
||||
const enableExtendedThinkingRef = useRef(enableExtendedThinking);
|
||||
enableExtendedThinkingRef.current = enableExtendedThinking;
|
||||
const contextNoteIdRef = useRef(contextNoteId);
|
||||
contextNoteIdRef.current = contextNoteId;
|
||||
|
||||
// Wrapper to call onMessagesChange when messages update
|
||||
const setMessages = useCallback((newMessages: StoredMessage[]) => {
|
||||
setMessagesInternal(newMessages);
|
||||
onMessagesChange?.(newMessages);
|
||||
}, [onMessagesChange]);
|
||||
|
||||
// Fetch available models on mount
|
||||
const refreshModels = useCallback(() => {
|
||||
setIsCheckingProvider(true);
|
||||
getAvailableModels().then(models => {
|
||||
const modelsWithDescription = models.map(m => ({
|
||||
...m,
|
||||
costDescription: m.costMultiplier ? `${m.costMultiplier}x` : undefined
|
||||
}));
|
||||
setAvailableModels(modelsWithDescription);
|
||||
setHasProvider(models.length > 0);
|
||||
setIsCheckingProvider(false);
|
||||
if (!selectedModel) {
|
||||
const defaultModel = models.find(m => m.isDefault) || models[0];
|
||||
if (defaultModel) {
|
||||
setSelectedModel(defaultModel.id);
|
||||
}
|
||||
}
|
||||
}).catch(err => {
|
||||
console.error("Failed to fetch available models:", err);
|
||||
setHasProvider(false);
|
||||
setIsCheckingProvider(false);
|
||||
});
|
||||
}, [selectedModel]);
|
||||
|
||||
useEffect(() => {
|
||||
refreshModels();
|
||||
}, []);
|
||||
|
||||
// Scroll to bottom when content changes
|
||||
const scrollToBottom = useCallback(() => {
|
||||
messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
scrollToBottom();
|
||||
}, [messages, streamingContent, streamingThinking, toolActivity, scrollToBottom]);
|
||||
|
||||
// Load state from content object
|
||||
const loadFromContent = useCallback((content: LlmChatContent) => {
|
||||
setMessagesInternal(content.messages || []);
|
||||
if (content.selectedModel) {
|
||||
setSelectedModel(content.selectedModel);
|
||||
}
|
||||
if (typeof content.enableWebSearch === "boolean") {
|
||||
setEnableWebSearch(content.enableWebSearch);
|
||||
}
|
||||
if (typeof content.enableNoteTools === "boolean") {
|
||||
setEnableNoteTools(content.enableNoteTools);
|
||||
}
|
||||
if (supportsExtendedThinking && typeof content.enableExtendedThinking === "boolean") {
|
||||
setEnableExtendedThinking(content.enableExtendedThinking);
|
||||
}
|
||||
// Restore last prompt tokens from the most recent message with usage
|
||||
const lastUsage = [...(content.messages || [])].reverse().find(m => m.usage)?.usage;
|
||||
if (lastUsage) {
|
||||
setLastPromptTokens(lastUsage.promptTokens);
|
||||
}
|
||||
}, [supportsExtendedThinking]);
|
||||
|
||||
// Get current state as content object (uses refs to avoid stale closures)
|
||||
const getContent = useCallback((): LlmChatContent => {
|
||||
const content: LlmChatContent = {
|
||||
version: 1,
|
||||
messages: messagesRef.current,
|
||||
selectedModel: selectedModelRef.current || undefined,
|
||||
enableWebSearch: enableWebSearchRef.current,
|
||||
enableNoteTools: enableNoteToolsRef.current
|
||||
};
|
||||
if (supportsExtendedThinking) {
|
||||
content.enableExtendedThinking = enableExtendedThinkingRef.current;
|
||||
}
|
||||
return content;
|
||||
}, [supportsExtendedThinking]);
|
||||
|
||||
const clearMessages = useCallback(() => {
|
||||
setMessages([]);
|
||||
}, [setMessages]);
|
||||
|
||||
const handleSubmit = useCallback(async (e: Event) => {
|
||||
e.preventDefault();
|
||||
if (!input.trim() || isStreaming) return;
|
||||
|
||||
setToolActivity(null);
|
||||
setPendingCitations([]);
|
||||
|
||||
const userMessage: StoredMessage = {
|
||||
id: randomString(),
|
||||
role: "user",
|
||||
content: input.trim(),
|
||||
createdAt: new Date().toISOString()
|
||||
};
|
||||
|
||||
const newMessages = [...messages, userMessage];
|
||||
setMessagesInternal(newMessages);
|
||||
setInput("");
|
||||
setIsStreaming(true);
|
||||
setStreamingContent("");
|
||||
setStreamingThinking("");
|
||||
|
||||
let assistantContent = "";
|
||||
let thinkingContent = "";
|
||||
const citations: LlmCitation[] = [];
|
||||
const toolCalls: ToolCall[] = [];
|
||||
let usage: LlmUsage | undefined;
|
||||
|
||||
const apiMessages: LlmMessage[] = newMessages.map(m => ({
|
||||
role: m.role,
|
||||
content: m.content
|
||||
}));
|
||||
|
||||
const streamOptions: Parameters<typeof streamChatCompletion>[1] = {
|
||||
model: selectedModel || undefined,
|
||||
enableWebSearch,
|
||||
enableNoteTools,
|
||||
contextNoteId
|
||||
};
|
||||
if (supportsExtendedThinking) {
|
||||
streamOptions.enableExtendedThinking = enableExtendedThinking;
|
||||
}
|
||||
|
||||
await streamChatCompletion(
|
||||
apiMessages,
|
||||
streamOptions,
|
||||
{
|
||||
onChunk: (text) => {
|
||||
assistantContent += text;
|
||||
setStreamingContent(assistantContent);
|
||||
setToolActivity(null);
|
||||
},
|
||||
onThinking: (text) => {
|
||||
thinkingContent += text;
|
||||
setStreamingThinking(thinkingContent);
|
||||
setToolActivity(t("llm_chat.thinking"));
|
||||
},
|
||||
onToolUse: (toolName, toolInput) => {
|
||||
const toolLabel = toolName === "web_search"
|
||||
? t("llm_chat.searching_web")
|
||||
: `Using ${toolName}...`;
|
||||
setToolActivity(toolLabel);
|
||||
toolCalls.push({
|
||||
id: randomString(),
|
||||
toolName,
|
||||
input: toolInput
|
||||
});
|
||||
},
|
||||
onToolResult: (toolName, result) => {
|
||||
const toolCall = [...toolCalls].reverse().find(tc => tc.toolName === toolName && !tc.result);
|
||||
if (toolCall) {
|
||||
toolCall.result = result;
|
||||
}
|
||||
},
|
||||
onCitation: (citation) => {
|
||||
citations.push(citation);
|
||||
setPendingCitations([...citations]);
|
||||
},
|
||||
onUsage: (u) => {
|
||||
usage = u;
|
||||
setLastPromptTokens(u.promptTokens);
|
||||
},
|
||||
onError: (errorMsg) => {
|
||||
console.error("Chat error:", errorMsg);
|
||||
const errorMessage: StoredMessage = {
|
||||
id: randomString(),
|
||||
role: "assistant",
|
||||
content: errorMsg,
|
||||
createdAt: new Date().toISOString(),
|
||||
type: "error"
|
||||
};
|
||||
const finalMessages = [...newMessages, errorMessage];
|
||||
setMessages(finalMessages);
|
||||
setStreamingContent("");
|
||||
setStreamingThinking("");
|
||||
setIsStreaming(false);
|
||||
setToolActivity(null);
|
||||
},
|
||||
onDone: () => {
|
||||
const finalNewMessages: StoredMessage[] = [];
|
||||
|
||||
if (thinkingContent) {
|
||||
finalNewMessages.push({
|
||||
id: randomString(),
|
||||
role: "assistant",
|
||||
content: thinkingContent,
|
||||
createdAt: new Date().toISOString(),
|
||||
type: "thinking"
|
||||
});
|
||||
}
|
||||
|
||||
if (assistantContent || toolCalls.length > 0) {
|
||||
finalNewMessages.push({
|
||||
id: randomString(),
|
||||
role: "assistant",
|
||||
content: assistantContent,
|
||||
createdAt: new Date().toISOString(),
|
||||
citations: citations.length > 0 ? citations : undefined,
|
||||
toolCalls: toolCalls.length > 0 ? toolCalls : undefined,
|
||||
usage
|
||||
});
|
||||
}
|
||||
|
||||
if (finalNewMessages.length > 0) {
|
||||
const allMessages = [...newMessages, ...finalNewMessages];
|
||||
setMessages(allMessages);
|
||||
}
|
||||
|
||||
setStreamingContent("");
|
||||
setStreamingThinking("");
|
||||
setPendingCitations([]);
|
||||
setIsStreaming(false);
|
||||
setToolActivity(null);
|
||||
}
|
||||
}
|
||||
);
|
||||
}, [input, isStreaming, messages, selectedModel, enableWebSearch, enableNoteTools, enableExtendedThinking, contextNoteId, supportsExtendedThinking, setMessages]);
|
||||
|
||||
const handleKeyDown = useCallback((e: KeyboardEvent) => {
|
||||
if (e.key === "Enter" && !e.shiftKey) {
|
||||
e.preventDefault();
|
||||
handleSubmit(e);
|
||||
}
|
||||
}, [handleSubmit]);
|
||||
|
||||
return {
|
||||
// State
|
||||
messages,
|
||||
input,
|
||||
isStreaming,
|
||||
streamingContent,
|
||||
streamingThinking,
|
||||
toolActivity,
|
||||
pendingCitations,
|
||||
availableModels,
|
||||
selectedModel,
|
||||
enableWebSearch,
|
||||
enableNoteTools,
|
||||
enableExtendedThinking,
|
||||
contextNoteId,
|
||||
lastPromptTokens,
|
||||
messagesEndRef,
|
||||
textareaRef,
|
||||
hasProvider,
|
||||
isCheckingProvider,
|
||||
|
||||
// Setters
|
||||
setInput,
|
||||
setMessages,
|
||||
setSelectedModel,
|
||||
setEnableWebSearch,
|
||||
setEnableNoteTools,
|
||||
setEnableExtendedThinking,
|
||||
setContextNoteId,
|
||||
|
||||
// Actions
|
||||
handleSubmit,
|
||||
handleKeyDown,
|
||||
loadFromContent,
|
||||
getContent,
|
||||
clearMessages,
|
||||
refreshModels
|
||||
};
|
||||
}
|
||||
104
apps/client/src/widgets/type_widgets/options/llm.tsx
Normal file
104
apps/client/src/widgets/type_widgets/options/llm.tsx
Normal file
@@ -0,0 +1,104 @@
|
||||
import { useCallback, useMemo, useState } from "preact/hooks";
|
||||
import { t } from "../../../services/i18n";
|
||||
import Button from "../../react/Button";
|
||||
import OptionsSection from "./components/OptionsSection";
|
||||
import AddProviderModal, { type LlmProviderConfig, PROVIDER_TYPES } from "./llm/AddProviderModal";
|
||||
import ActionButton from "../../react/ActionButton";
|
||||
import dialog from "../../../services/dialog";
|
||||
import { useTriliumOption } from "../../react/hooks";
|
||||
|
||||
export default function LlmSettings() {
|
||||
const [providersJson, setProvidersJson] = useTriliumOption("llmProviders");
|
||||
const providers = useMemo<LlmProviderConfig[]>(() => {
|
||||
try {
|
||||
return providersJson ? JSON.parse(providersJson) : [];
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}, [providersJson]);
|
||||
const setProviders = useCallback((newProviders: LlmProviderConfig[]) => {
|
||||
setProvidersJson(JSON.stringify(newProviders));
|
||||
}, [setProvidersJson]);
|
||||
const [showAddModal, setShowAddModal] = useState(false);
|
||||
|
||||
const handleAddProvider = useCallback((newProvider: LlmProviderConfig) => {
|
||||
setProviders([...providers, newProvider]);
|
||||
}, [providers, setProviders]);
|
||||
|
||||
const handleDeleteProvider = useCallback(async (providerId: string, providerName: string) => {
|
||||
if (!(await dialog.confirm(t("llm.delete_provider_confirmation", { name: providerName })))) {
|
||||
return;
|
||||
}
|
||||
setProviders(providers.filter(p => p.id !== providerId));
|
||||
}, [providers, setProviders]);
|
||||
|
||||
return (
|
||||
<OptionsSection title={t("llm.settings_title")}>
|
||||
<p>{t("llm.settings_description")}</p>
|
||||
|
||||
<Button
|
||||
size="small"
|
||||
icon="bx bx-plus"
|
||||
text={t("llm.add_provider")}
|
||||
onClick={() => setShowAddModal(true)}
|
||||
/>
|
||||
|
||||
<hr />
|
||||
|
||||
<h5>{t("llm.configured_providers")}</h5>
|
||||
<ProviderList
|
||||
providers={providers}
|
||||
onDelete={handleDeleteProvider}
|
||||
/>
|
||||
|
||||
<AddProviderModal
|
||||
show={showAddModal}
|
||||
onHidden={() => setShowAddModal(false)}
|
||||
onSave={handleAddProvider}
|
||||
/>
|
||||
</OptionsSection>
|
||||
);
|
||||
}
|
||||
|
||||
interface ProviderListProps {
|
||||
providers: LlmProviderConfig[];
|
||||
onDelete: (providerId: string, providerName: string) => Promise<void>;
|
||||
}
|
||||
|
||||
function ProviderList({ providers, onDelete }: ProviderListProps) {
|
||||
if (!providers.length) {
|
||||
return <div>{t("llm.no_providers_configured")}</div>;
|
||||
}
|
||||
|
||||
return (
|
||||
<div style={{ overflow: "auto" }}>
|
||||
<table className="table table-stripped">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>{t("llm.provider_name")}</th>
|
||||
<th>{t("llm.provider_type")}</th>
|
||||
<th>{t("llm.actions")}</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{providers.map((provider) => {
|
||||
const providerType = PROVIDER_TYPES.find(p => p.id === provider.provider);
|
||||
return (
|
||||
<tr key={provider.id}>
|
||||
<td>{provider.name}</td>
|
||||
<td>{providerType?.name || provider.provider}</td>
|
||||
<td>
|
||||
<ActionButton
|
||||
icon="bx bx-trash"
|
||||
text={t("llm.delete_provider")}
|
||||
onClick={() => onDelete(provider.id, provider.name)}
|
||||
/>
|
||||
</td>
|
||||
</tr>
|
||||
);
|
||||
})}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,106 @@
|
||||
import { createPortal } from "preact/compat";
|
||||
import { useState, useRef } from "preact/hooks";
|
||||
import Modal from "../../../react/Modal";
|
||||
import FormGroup from "../../../react/FormGroup";
|
||||
import FormSelect from "../../../react/FormSelect";
|
||||
import FormTextBox from "../../../react/FormTextBox";
|
||||
import { t } from "../../../../services/i18n";
|
||||
|
||||
export interface LlmProviderConfig {
|
||||
id: string;
|
||||
name: string;
|
||||
provider: string;
|
||||
apiKey: string;
|
||||
}
|
||||
|
||||
export interface ProviderType {
|
||||
id: string;
|
||||
name: string;
|
||||
}
|
||||
|
||||
export const PROVIDER_TYPES: ProviderType[] = [
|
||||
{ id: "anthropic", name: "Anthropic" }
|
||||
];
|
||||
|
||||
interface AddProviderModalProps {
|
||||
show: boolean;
|
||||
onHidden: () => void;
|
||||
onSave: (provider: LlmProviderConfig) => void;
|
||||
}
|
||||
|
||||
export default function AddProviderModal({ show, onHidden, onSave }: AddProviderModalProps) {
|
||||
const [selectedProvider, setSelectedProvider] = useState(PROVIDER_TYPES[0].id);
|
||||
const [apiKey, setApiKey] = useState("");
|
||||
const formRef = useRef<HTMLFormElement>(null);
|
||||
|
||||
function handleSubmit() {
|
||||
if (!apiKey.trim()) {
|
||||
return;
|
||||
}
|
||||
|
||||
const providerType = PROVIDER_TYPES.find(p => p.id === selectedProvider);
|
||||
const newProvider: LlmProviderConfig = {
|
||||
id: `${selectedProvider}_${Date.now()}`,
|
||||
name: providerType?.name || selectedProvider,
|
||||
provider: selectedProvider,
|
||||
apiKey: apiKey.trim()
|
||||
};
|
||||
|
||||
onSave(newProvider);
|
||||
resetForm();
|
||||
onHidden();
|
||||
}
|
||||
|
||||
function resetForm() {
|
||||
setSelectedProvider(PROVIDER_TYPES[0].id);
|
||||
setApiKey("");
|
||||
}
|
||||
|
||||
function handleCancel() {
|
||||
resetForm();
|
||||
onHidden();
|
||||
}
|
||||
|
||||
return createPortal(
|
||||
<Modal
|
||||
show={show}
|
||||
onHidden={handleCancel}
|
||||
onSubmit={handleSubmit}
|
||||
formRef={formRef}
|
||||
title={t("llm.add_provider_title")}
|
||||
className="add-provider-modal"
|
||||
size="md"
|
||||
footer={
|
||||
<>
|
||||
<button type="button" className="btn btn-secondary" onClick={handleCancel}>
|
||||
{t("llm.cancel")}
|
||||
</button>
|
||||
<button type="submit" className="btn btn-primary" disabled={!apiKey.trim()}>
|
||||
{t("llm.add_provider")}
|
||||
</button>
|
||||
</>
|
||||
}
|
||||
>
|
||||
<FormGroup name="provider-type" label={t("llm.provider_type")}>
|
||||
<FormSelect
|
||||
values={PROVIDER_TYPES}
|
||||
keyProperty="id"
|
||||
titleProperty="name"
|
||||
currentValue={selectedProvider}
|
||||
onChange={setSelectedProvider}
|
||||
/>
|
||||
</FormGroup>
|
||||
|
||||
<FormGroup name="api-key" label={t("llm.api_key")}>
|
||||
<FormTextBox
|
||||
type="password"
|
||||
currentValue={apiKey}
|
||||
onChange={setApiKey}
|
||||
placeholder={t("llm.api_key_placeholder")}
|
||||
autoFocus
|
||||
/>
|
||||
</FormGroup>
|
||||
</Modal>,
|
||||
document.body
|
||||
);
|
||||
}
|
||||
@@ -30,6 +30,8 @@
|
||||
"proxy-nginx-subdir": "docker run --name trilium-nginx-subdir --rm --network=host -v ./docker/nginx.conf:/etc/nginx/conf.d/default.conf:ro nginx:latest"
|
||||
},
|
||||
"dependencies": {
|
||||
"@ai-sdk/anthropic": "^2.0.0",
|
||||
"ai": "^5.0.0",
|
||||
"better-sqlite3": "12.8.0",
|
||||
"html-to-text": "9.0.5",
|
||||
"node-html-parser": "7.1.0",
|
||||
|
||||
@@ -55,7 +55,16 @@ export default async function buildApp() {
|
||||
});
|
||||
|
||||
if (!utils.isElectron) {
|
||||
app.use(compression()); // HTTP compression
|
||||
app.use(compression({
|
||||
// Skip compression for SSE endpoints to enable real-time streaming
|
||||
filter: (req, res) => {
|
||||
// Skip compression for LLM chat streaming endpoint
|
||||
if (req.path === "/api/llm-chat/stream") {
|
||||
return false;
|
||||
}
|
||||
return compression.filter(req, res);
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
let resourcePolicy = config["Network"]["corsResourcePolicy"] as 'same-origin' | 'same-site' | 'cross-origin' | undefined;
|
||||
|
||||
@@ -297,7 +297,8 @@
|
||||
},
|
||||
"quarterNumber": "Quarter {quarterNumber}",
|
||||
"special_notes": {
|
||||
"search_prefix": "Search:"
|
||||
"search_prefix": "Search:",
|
||||
"llm_chat_prefix": "Chat:"
|
||||
},
|
||||
"test_sync": {
|
||||
"not-configured": "Sync server host is not configured. Please configure sync first.",
|
||||
@@ -308,6 +309,7 @@
|
||||
"search-history-title": "Search History",
|
||||
"note-map-title": "Note Map",
|
||||
"sql-console-history-title": "SQL Console History",
|
||||
"llm-chat-history-title": "AI Chat History",
|
||||
"shared-notes-title": "Shared Notes",
|
||||
"bulk-action-title": "Bulk Action",
|
||||
"backend-log-title": "Backend Log",
|
||||
@@ -351,11 +353,13 @@
|
||||
"sync-title": "Sync",
|
||||
"other": "Other",
|
||||
"advanced-title": "Advanced",
|
||||
"llm-title": "AI / LLM",
|
||||
"visible-launchers-title": "Visible Launchers",
|
||||
"user-guide": "User Guide",
|
||||
"localization": "Language & Region",
|
||||
"inbox-title": "Inbox",
|
||||
"tab-switcher-title": "Tab Switcher"
|
||||
"tab-switcher-title": "Tab Switcher",
|
||||
"sidebar-chat-title": "AI Chat"
|
||||
},
|
||||
"notes": {
|
||||
"new-note": "New note",
|
||||
|
||||
5
apps/server/src/express.d.ts
vendored
5
apps/server/src/express.d.ts
vendored
@@ -17,6 +17,11 @@ export declare module "express-serve-static-core" {
|
||||
"user-agent"?: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface Response {
|
||||
/** Set to true to prevent apiResultHandler from double-handling the response (e.g., for SSE streams) */
|
||||
triliumResponseHandled?: boolean;
|
||||
}
|
||||
}
|
||||
|
||||
export declare module "express-session" {
|
||||
|
||||
92
apps/server/src/routes/api/llm_chat.ts
Normal file
92
apps/server/src/routes/api/llm_chat.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
import type { Request, Response } from "express";
|
||||
import type { LlmMessage } from "@triliumnext/commons";
|
||||
|
||||
import { getProviderByType, hasConfiguredProviders, type LlmProviderConfig } from "../../services/llm/index.js";
|
||||
import { streamToChunks } from "../../services/llm/stream.js";
|
||||
|
||||
interface ChatRequest {
|
||||
messages: LlmMessage[];
|
||||
config?: LlmProviderConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
* SSE endpoint for streaming chat completions.
|
||||
*
|
||||
* Response format (Server-Sent Events):
|
||||
* data: {"type":"text","content":"Hello"}
|
||||
* data: {"type":"text","content":" world"}
|
||||
* data: {"type":"done"}
|
||||
*
|
||||
* On error:
|
||||
* data: {"type":"error","error":"Error message"}
|
||||
*/
|
||||
async function streamChat(req: Request, res: Response) {
|
||||
const { messages, config = {} } = req.body as ChatRequest;
|
||||
|
||||
if (!messages || !Array.isArray(messages) || messages.length === 0) {
|
||||
res.status(400).json({ error: "messages array is required" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Set up SSE headers - disable compression and buffering for real-time streaming
|
||||
res.setHeader("Content-Type", "text/event-stream");
|
||||
res.setHeader("Cache-Control", "no-cache, no-transform");
|
||||
res.setHeader("Connection", "keep-alive");
|
||||
res.setHeader("X-Accel-Buffering", "no"); // Disable nginx buffering
|
||||
res.setHeader("Content-Encoding", "none"); // Disable compression
|
||||
res.flushHeaders();
|
||||
|
||||
// Mark response as handled to prevent double-handling by apiResultHandler
|
||||
res.triliumResponseHandled = true;
|
||||
|
||||
// Type assertion for flush method (available when compression is used)
|
||||
const flushableRes = res as Response & { flush?: () => void };
|
||||
|
||||
try {
|
||||
if (!hasConfiguredProviders()) {
|
||||
res.write(`data: ${JSON.stringify({ type: "error", error: "No LLM providers configured. Please add a provider in Options → AI / LLM." })}\n\n`);
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
|
||||
const provider = getProviderByType(config.provider || "anthropic");
|
||||
const result = provider.chat(messages, config);
|
||||
|
||||
// Get pricing from provider for cost calculation
|
||||
const model = config.model || "claude-sonnet-4-20250514";
|
||||
const pricing = provider.getModelPricing(model);
|
||||
for await (const chunk of streamToChunks(result, { model, pricing })) {
|
||||
res.write(`data: ${JSON.stringify(chunk)}\n\n`);
|
||||
// Flush immediately to ensure real-time streaming
|
||||
if (typeof flushableRes.flush === "function") {
|
||||
flushableRes.flush();
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : "Unknown error";
|
||||
res.write(`data: ${JSON.stringify({ type: "error", error: errorMessage })}\n\n`);
|
||||
} finally {
|
||||
res.end();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get available models for a provider.
|
||||
*/
|
||||
function getModels(req: Request, _res: Response) {
|
||||
const providerType = req.query.provider as string || "anthropic";
|
||||
|
||||
// Return empty array when no providers configured - client handles this gracefully
|
||||
if (!hasConfiguredProviders()) {
|
||||
return { models: [] };
|
||||
}
|
||||
|
||||
const llmProvider = getProviderByType(providerType);
|
||||
const models = llmProvider.getAvailableModels();
|
||||
return { models };
|
||||
}
|
||||
|
||||
export default {
|
||||
streamChat,
|
||||
getModels
|
||||
};
|
||||
@@ -104,7 +104,8 @@ const ALLOWED_OPTIONS = new Set<OptionNames>([
|
||||
"experimentalFeatures",
|
||||
"newLayout",
|
||||
"mfaEnabled",
|
||||
"mfaMethod"
|
||||
"mfaMethod",
|
||||
"llmProviders"
|
||||
]);
|
||||
|
||||
function getOptions() {
|
||||
|
||||
@@ -86,6 +86,29 @@ function createSearchNote(req: Request) {
|
||||
return specialNotesService.createSearchNote(searchString, ancestorNoteId);
|
||||
}
|
||||
|
||||
function createLlmChat() {
|
||||
return specialNotesService.createLlmChat();
|
||||
}
|
||||
|
||||
function getMostRecentLlmChat() {
|
||||
const chat = specialNotesService.getMostRecentLlmChat();
|
||||
// Return null explicitly if no chat found (not undefined)
|
||||
return chat || null;
|
||||
}
|
||||
|
||||
function getOrCreateLlmChat() {
|
||||
return specialNotesService.getOrCreateLlmChat();
|
||||
}
|
||||
|
||||
function getRecentLlmChats(req: Request) {
|
||||
const limit = parseInt(req.query.limit as string) || 10;
|
||||
return specialNotesService.getRecentLlmChats(limit);
|
||||
}
|
||||
|
||||
function saveLlmChat(req: Request) {
|
||||
return specialNotesService.saveLlmChat(req.body.llmChatNoteId);
|
||||
}
|
||||
|
||||
function getHoistedNote() {
|
||||
return becca.getNote(cls.getHoistedNoteId());
|
||||
}
|
||||
@@ -119,6 +142,11 @@ export default {
|
||||
saveSqlConsole,
|
||||
createSearchNote,
|
||||
saveSearchNote,
|
||||
createLlmChat,
|
||||
getMostRecentLlmChat,
|
||||
getOrCreateLlmChat,
|
||||
getRecentLlmChats,
|
||||
saveLlmChat,
|
||||
createLauncher,
|
||||
resetLauncher,
|
||||
createOrUpdateScriptLauncherFromApi
|
||||
|
||||
@@ -115,6 +115,7 @@ class FakeResponse extends EventEmitter implements Pick<Response<any, Record<str
|
||||
}
|
||||
|
||||
json(obj) {
|
||||
this.respHeaders["Content-Type"] = "application/json";
|
||||
this.send(JSON.stringify(obj));
|
||||
return this as unknown as MockedResponse;
|
||||
}
|
||||
|
||||
@@ -145,7 +145,7 @@ function internalRoute<P extends ParamsDictionary>(method: HttpMethod, path: str
|
||||
|
||||
function handleResponse(resultHandler: ApiResultHandler, req: express.Request, res: express.Response, result: unknown, start: number) {
|
||||
// Skip result handling if the response has already been handled
|
||||
if ((res as any).triliumResponseHandled) {
|
||||
if (res.triliumResponseHandled) {
|
||||
// Just log the request without additional processing
|
||||
log.request(req, res, Date.now() - start, 0);
|
||||
return;
|
||||
@@ -161,7 +161,7 @@ function handleException(e: unknown | Error, method: HttpMethod, path: string, r
|
||||
log.error(`${method} ${path} threw exception: '${errMessage}', stack: ${errStack}`);
|
||||
|
||||
// Skip sending response if it's already been handled by the route handler
|
||||
if ((res as unknown as { triliumResponseHandled?: boolean }).triliumResponseHandled || res.headersSent) {
|
||||
if (res.triliumResponseHandled || res.headersSent) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -34,6 +34,7 @@ import fontsRoute from "./api/fonts.js";
|
||||
import imageRoute from "./api/image.js";
|
||||
import importRoute from "./api/import.js";
|
||||
import keysRoute from "./api/keys.js";
|
||||
import llmChatRoute from "./api/llm_chat.js";
|
||||
import loginApiRoute from "./api/login.js";
|
||||
import metricsRoute from "./api/metrics.js";
|
||||
import noteMapRoute from "./api/note_map.js";
|
||||
@@ -291,6 +292,11 @@ function register(app: express.Application) {
|
||||
asyncApiRoute(PST, "/api/special-notes/save-sql-console", specialNotesRoute.saveSqlConsole);
|
||||
apiRoute(PST, "/api/special-notes/search-note", specialNotesRoute.createSearchNote);
|
||||
apiRoute(PST, "/api/special-notes/save-search-note", specialNotesRoute.saveSearchNote);
|
||||
apiRoute(PST, "/api/special-notes/llm-chat", specialNotesRoute.createLlmChat);
|
||||
apiRoute(GET, "/api/special-notes/most-recent-llm-chat", specialNotesRoute.getMostRecentLlmChat);
|
||||
apiRoute(GET, "/api/special-notes/get-or-create-llm-chat", specialNotesRoute.getOrCreateLlmChat);
|
||||
apiRoute(GET, "/api/special-notes/recent-llm-chats", specialNotesRoute.getRecentLlmChats);
|
||||
apiRoute(PST, "/api/special-notes/save-llm-chat", specialNotesRoute.saveLlmChat);
|
||||
apiRoute(PST, "/api/special-notes/launchers/:noteId/reset", specialNotesRoute.resetLauncher);
|
||||
apiRoute(PST, "/api/special-notes/launchers/:parentNoteId/:launcherType", specialNotesRoute.createLauncher);
|
||||
apiRoute(PUT, "/api/special-notes/api-script-launcher", specialNotesRoute.createOrUpdateScriptLauncherFromApi);
|
||||
@@ -323,6 +329,10 @@ function register(app: express.Application) {
|
||||
apiRoute(PST, "/api/script/bundle/:noteId", scriptRoute.getBundle);
|
||||
apiRoute(GET, "/api/script/relation/:noteId/:relationName", scriptRoute.getRelationBundles);
|
||||
|
||||
// LLM chat endpoints
|
||||
asyncRoute(PST, "/api/llm-chat/stream", [auth.checkApiAuthOrElectron, csrfMiddleware], llmChatRoute.streamChat, null);
|
||||
apiRoute(GET, "/api/llm-chat/models", llmChatRoute.getModels);
|
||||
|
||||
// no CSRF since this is called from android app
|
||||
route(PST, "/api/sender/login", [loginRateLimiter], loginApiRoute.token, apiResultHandler);
|
||||
asyncRoute(PST, "/api/sender/image", [auth.checkEtapiToken, uploadMiddlewareWithErrorHandling], senderRoute.uploadImage, apiResultHandler);
|
||||
|
||||
@@ -66,6 +66,12 @@ function buildHiddenSubtreeDefinition(helpSubtree: HiddenSubtreeItem[]): HiddenS
|
||||
type: "doc",
|
||||
icon: "bx-data"
|
||||
},
|
||||
{
|
||||
id: "_llmChat",
|
||||
title: t("hidden-subtree.llm-chat-history-title"),
|
||||
type: "doc",
|
||||
icon: "bx-message-square-dots"
|
||||
},
|
||||
{
|
||||
id: "_share",
|
||||
title: t("hidden-subtree.shared-notes-title"),
|
||||
@@ -247,6 +253,7 @@ function buildHiddenSubtreeDefinition(helpSubtree: HiddenSubtreeItem[]): HiddenS
|
||||
{ id: "_optionsEtapi", title: t("hidden-subtree.etapi-title"), type: "contentWidget", icon: "bx-extension" },
|
||||
{ id: "_optionsBackup", title: t("hidden-subtree.backup-title"), type: "contentWidget", icon: "bx-data" },
|
||||
{ id: "_optionsSync", title: t("hidden-subtree.sync-title"), type: "contentWidget", icon: "bx-wifi" },
|
||||
{ id: "_optionsLlm", title: t("hidden-subtree.llm-title"), type: "contentWidget", icon: "bx-bot" },
|
||||
{ id: "_optionsAi", title: "AI Chat", type: "contentWidget", enforceDeleted: true },
|
||||
{ id: "_optionsOther", title: t("hidden-subtree.other"), type: "contentWidget", icon: "bx-dots-horizontal" },
|
||||
{ id: "_optionsLocalization", title: t("hidden-subtree.localization"), type: "contentWidget", icon: "bx-world" },
|
||||
|
||||
@@ -78,6 +78,13 @@ export default function buildLaunchBarConfig() {
|
||||
type: "launcher",
|
||||
command: "toggleZenMode",
|
||||
icon: "bx bxs-yin-yang"
|
||||
},
|
||||
{
|
||||
id: "_lbSidebarChat",
|
||||
title: t("hidden-subtree.sidebar-chat-title"),
|
||||
type: "launcher",
|
||||
builtinWidget: "sidebarChat",
|
||||
icon: "bx bx-message-square-dots"
|
||||
}
|
||||
];
|
||||
|
||||
|
||||
105
apps/server/src/services/llm/index.ts
Normal file
105
apps/server/src/services/llm/index.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
import type { LlmProvider } from "./types.js";
|
||||
import { AnthropicProvider } from "./providers/anthropic.js";
|
||||
import optionService from "../options.js";
|
||||
import log from "../log.js";
|
||||
|
||||
/**
|
||||
* Configuration for a single LLM provider instance.
|
||||
* This matches the structure stored in the llmProviders option.
|
||||
*/
|
||||
export interface LlmProviderSetup {
|
||||
id: string;
|
||||
name: string;
|
||||
provider: string;
|
||||
apiKey: string;
|
||||
}
|
||||
|
||||
/** Factory functions for creating provider instances */
|
||||
const providerFactories: Record<string, (apiKey: string) => LlmProvider> = {
|
||||
anthropic: (apiKey) => new AnthropicProvider(apiKey)
|
||||
};
|
||||
|
||||
/** Cache of instantiated providers by their config ID */
|
||||
let cachedProviders: Record<string, LlmProvider> = {};
|
||||
|
||||
/**
|
||||
* Get configured providers from the options.
|
||||
*/
|
||||
function getConfiguredProviders(): LlmProviderSetup[] {
|
||||
try {
|
||||
const providersJson = optionService.getOptionOrNull("llmProviders");
|
||||
if (!providersJson) {
|
||||
return [];
|
||||
}
|
||||
return JSON.parse(providersJson) as LlmProviderSetup[];
|
||||
} catch (e) {
|
||||
log.error(`Failed to parse llmProviders option: ${e}`);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a provider instance by its configuration ID.
|
||||
* If no ID is provided, returns the first configured provider.
|
||||
*/
|
||||
export function getProvider(providerId?: string): LlmProvider {
|
||||
const configs = getConfiguredProviders();
|
||||
|
||||
if (configs.length === 0) {
|
||||
throw new Error("No LLM providers configured. Please add a provider in Options → AI / LLM.");
|
||||
}
|
||||
|
||||
// Find the requested provider or use the first one
|
||||
const config = providerId
|
||||
? configs.find(c => c.id === providerId)
|
||||
: configs[0];
|
||||
|
||||
if (!config) {
|
||||
throw new Error(`LLM provider not found: ${providerId}`);
|
||||
}
|
||||
|
||||
// Check cache
|
||||
if (cachedProviders[config.id]) {
|
||||
return cachedProviders[config.id];
|
||||
}
|
||||
|
||||
// Create new provider instance
|
||||
const factory = providerFactories[config.provider];
|
||||
if (!factory) {
|
||||
throw new Error(`Unknown LLM provider type: ${config.provider}. Available: ${Object.keys(providerFactories).join(", ")}`);
|
||||
}
|
||||
|
||||
const provider = factory(config.apiKey);
|
||||
cachedProviders[config.id] = provider;
|
||||
return provider;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the first configured provider of a specific type (e.g., "anthropic").
|
||||
*/
|
||||
export function getProviderByType(providerType: string): LlmProvider {
|
||||
const configs = getConfiguredProviders();
|
||||
const config = configs.find(c => c.provider === providerType);
|
||||
|
||||
if (!config) {
|
||||
throw new Error(`No ${providerType} provider configured. Please add one in Options → AI / LLM.`);
|
||||
}
|
||||
|
||||
return getProvider(config.id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if any providers are configured.
|
||||
*/
|
||||
export function hasConfiguredProviders(): boolean {
|
||||
return getConfiguredProviders().length > 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear the provider cache. Call this when provider configurations change.
|
||||
*/
|
||||
export function clearProviderCache(): void {
|
||||
cachedProviders = {};
|
||||
}
|
||||
|
||||
export type { LlmProvider, LlmProviderConfig, ModelInfo, ModelPricing } from "./types.js";
|
||||
225
apps/server/src/services/llm/providers/anthropic.ts
Normal file
225
apps/server/src/services/llm/providers/anthropic.ts
Normal file
@@ -0,0 +1,225 @@
|
||||
import { createAnthropic, type AnthropicProvider as AnthropicSDKProvider } from "@ai-sdk/anthropic";
|
||||
import { streamText, stepCountIs, type CoreMessage } from "ai";
|
||||
import type { LlmMessage } from "@triliumnext/commons";
|
||||
|
||||
import becca from "../../../becca/becca.js";
|
||||
import { noteTools } from "../tools.js";
|
||||
import type { LlmProvider, LlmProviderConfig, ModelInfo, ModelPricing, StreamResult } from "../types.js";
|
||||
|
||||
const DEFAULT_MODEL = "claude-sonnet-4-6";
|
||||
const DEFAULT_MAX_TOKENS = 8096;
|
||||
|
||||
/**
|
||||
* Calculate effective cost for comparison (weighted average: 1 input + 3 output).
|
||||
* Output is weighted more heavily as it's typically the dominant cost factor.
|
||||
*/
|
||||
function effectiveCost(pricing: ModelPricing): number {
|
||||
return (pricing.input + 3 * pricing.output) / 4;
|
||||
}
|
||||
|
||||
/**
|
||||
* Available Anthropic models with pricing (USD per million tokens).
|
||||
* Source: https://docs.anthropic.com/en/docs/about-claude/models
|
||||
*/
|
||||
const BASE_MODELS: Omit<ModelInfo, "costMultiplier">[] = [
|
||||
// ===== Current Models =====
|
||||
{
|
||||
id: "claude-sonnet-4-6",
|
||||
name: "Claude Sonnet 4.6",
|
||||
pricing: { input: 3, output: 15 },
|
||||
contextWindow: 1000000,
|
||||
isDefault: true
|
||||
},
|
||||
{
|
||||
id: "claude-opus-4-6",
|
||||
name: "Claude Opus 4.6",
|
||||
pricing: { input: 5, output: 25 },
|
||||
contextWindow: 1000000
|
||||
},
|
||||
{
|
||||
id: "claude-haiku-4-5-20251001",
|
||||
name: "Claude Haiku 4.5",
|
||||
pricing: { input: 1, output: 5 },
|
||||
contextWindow: 200000
|
||||
},
|
||||
// ===== Legacy Models =====
|
||||
{
|
||||
id: "claude-sonnet-4-5-20250929",
|
||||
name: "Claude Sonnet 4.5",
|
||||
pricing: { input: 3, output: 15 },
|
||||
contextWindow: 200000, // 1M available with beta header
|
||||
isLegacy: true
|
||||
},
|
||||
{
|
||||
id: "claude-opus-4-5-20251101",
|
||||
name: "Claude Opus 4.5",
|
||||
pricing: { input: 5, output: 25 },
|
||||
contextWindow: 200000,
|
||||
isLegacy: true
|
||||
},
|
||||
{
|
||||
id: "claude-opus-4-1-20250805",
|
||||
name: "Claude Opus 4.1",
|
||||
pricing: { input: 15, output: 75 },
|
||||
contextWindow: 200000,
|
||||
isLegacy: true
|
||||
},
|
||||
{
|
||||
id: "claude-sonnet-4-20250514",
|
||||
name: "Claude Sonnet 4.0",
|
||||
pricing: { input: 3, output: 15 },
|
||||
contextWindow: 200000, // 1M available with beta header
|
||||
isLegacy: true
|
||||
},
|
||||
{
|
||||
id: "claude-opus-4-20250514",
|
||||
name: "Claude Opus 4.0",
|
||||
pricing: { input: 15, output: 75 },
|
||||
contextWindow: 200000,
|
||||
isLegacy: true
|
||||
}
|
||||
];
|
||||
|
||||
// Use default model (Sonnet) as baseline for cost multiplier
|
||||
const baselineModel = BASE_MODELS.find(m => m.isDefault) || BASE_MODELS[0];
|
||||
const baselineCost = effectiveCost(baselineModel.pricing);
|
||||
|
||||
// Build models with cost multipliers
|
||||
const AVAILABLE_MODELS: ModelInfo[] = BASE_MODELS.map(m => ({
|
||||
...m,
|
||||
costMultiplier: Math.round((effectiveCost(m.pricing) / baselineCost) * 10) / 10
|
||||
}));
|
||||
|
||||
// Build pricing lookup from available models
|
||||
const MODEL_PRICING: Record<string, ModelPricing> = Object.fromEntries(
|
||||
AVAILABLE_MODELS.map(m => [m.id, m.pricing])
|
||||
);
|
||||
|
||||
/**
|
||||
* Build context string from the current note being viewed.
|
||||
*/
|
||||
function buildNoteContext(noteId: string): string | null {
|
||||
const note = becca.getNote(noteId);
|
||||
if (!note) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const parts: string[] = [];
|
||||
parts.push(`The user is currently viewing a note titled "${note.title}" (ID: ${noteId}).`);
|
||||
|
||||
// Add note type context
|
||||
if (note.type !== "text") {
|
||||
parts.push(`Note type: ${note.type}`);
|
||||
}
|
||||
|
||||
// Add content for text notes (truncate if too long)
|
||||
if (note.type === "text" || note.type === "code") {
|
||||
try {
|
||||
const content = note.getContent();
|
||||
if (typeof content === "string" && content.trim()) {
|
||||
const maxLength = 4000;
|
||||
const truncated = content.length > maxLength
|
||||
? content.substring(0, maxLength) + "\n... (content truncated)"
|
||||
: content;
|
||||
parts.push(`\nNote content:\n\`\`\`\n${truncated}\n\`\`\``);
|
||||
}
|
||||
} catch {
|
||||
// Content not available
|
||||
}
|
||||
}
|
||||
|
||||
return parts.join("\n");
|
||||
}
|
||||
|
||||
export class AnthropicProvider implements LlmProvider {
|
||||
name = "anthropic";
|
||||
private anthropic: AnthropicSDKProvider;
|
||||
|
||||
constructor(apiKey: string) {
|
||||
if (!apiKey) {
|
||||
throw new Error("API key is required for Anthropic provider");
|
||||
}
|
||||
this.anthropic = createAnthropic({ apiKey });
|
||||
}
|
||||
|
||||
chat(messages: LlmMessage[], config: LlmProviderConfig): StreamResult {
|
||||
let systemPrompt = config.systemPrompt || messages.find(m => m.role === "system")?.content;
|
||||
const chatMessages = messages.filter(m => m.role !== "system");
|
||||
|
||||
// Add note context if viewing a note
|
||||
if (config.contextNoteId) {
|
||||
const noteContext = buildNoteContext(config.contextNoteId);
|
||||
if (noteContext) {
|
||||
systemPrompt = systemPrompt
|
||||
? `${systemPrompt}\n\n${noteContext}`
|
||||
: noteContext;
|
||||
}
|
||||
}
|
||||
|
||||
// Convert to AI SDK message format
|
||||
const coreMessages: CoreMessage[] = chatMessages.map(m => ({
|
||||
role: m.role as "user" | "assistant",
|
||||
content: m.content
|
||||
}));
|
||||
|
||||
const model = this.anthropic(config.model || DEFAULT_MODEL);
|
||||
|
||||
// Build options for streamText
|
||||
const streamOptions: Parameters<typeof streamText>[0] = {
|
||||
model,
|
||||
messages: coreMessages,
|
||||
maxOutputTokens: config.maxTokens || DEFAULT_MAX_TOKENS,
|
||||
system: systemPrompt
|
||||
};
|
||||
|
||||
// Enable extended thinking for deeper reasoning
|
||||
if (config.enableExtendedThinking) {
|
||||
const thinkingBudget = config.thinkingBudget || 10000;
|
||||
streamOptions.providerOptions = {
|
||||
anthropic: {
|
||||
thinking: {
|
||||
type: "enabled",
|
||||
budgetTokens: thinkingBudget
|
||||
}
|
||||
}
|
||||
};
|
||||
streamOptions.maxOutputTokens = Math.max(
|
||||
streamOptions.maxOutputTokens || DEFAULT_MAX_TOKENS,
|
||||
thinkingBudget + 4000
|
||||
);
|
||||
}
|
||||
|
||||
// Build tools object
|
||||
const tools: Record<string, unknown> = {};
|
||||
|
||||
if (config.enableWebSearch) {
|
||||
tools.web_search = this.anthropic.tools.webSearch_20250305({
|
||||
maxUses: 5
|
||||
});
|
||||
}
|
||||
|
||||
if (config.enableNoteTools) {
|
||||
Object.assign(tools, noteTools);
|
||||
}
|
||||
|
||||
if (Object.keys(tools).length > 0) {
|
||||
streamOptions.tools = tools;
|
||||
// Allow multiple tool use cycles before final response
|
||||
streamOptions.maxSteps = 5;
|
||||
// Override default stopWhen which stops after 1 step
|
||||
streamOptions.stopWhen = stepCountIs(5);
|
||||
// Let model decide when to use tools vs respond with text
|
||||
streamOptions.toolChoice = "auto";
|
||||
}
|
||||
|
||||
return streamText(streamOptions);
|
||||
}
|
||||
|
||||
getModelPricing(model: string): ModelPricing | undefined {
|
||||
return MODEL_PRICING[model];
|
||||
}
|
||||
|
||||
getAvailableModels(): ModelInfo[] {
|
||||
return AVAILABLE_MODELS;
|
||||
}
|
||||
}
|
||||
102
apps/server/src/services/llm/stream.ts
Normal file
102
apps/server/src/services/llm/stream.ts
Normal file
@@ -0,0 +1,102 @@
|
||||
/**
|
||||
* Shared streaming utilities for converting AI SDK streams to SSE chunks.
|
||||
*/
|
||||
|
||||
import type { LlmStreamChunk } from "@triliumnext/commons";
|
||||
|
||||
import type { ModelPricing, StreamResult } from "./types.js";
|
||||
|
||||
/**
|
||||
* Calculate estimated cost in USD based on token usage and pricing.
|
||||
*/
|
||||
function calculateCost(inputTokens: number, outputTokens: number, pricing?: ModelPricing): number | undefined {
|
||||
if (!pricing) return undefined;
|
||||
|
||||
const inputCost = (inputTokens / 1_000_000) * pricing.input;
|
||||
const outputCost = (outputTokens / 1_000_000) * pricing.output;
|
||||
|
||||
return inputCost + outputCost;
|
||||
}
|
||||
|
||||
export interface StreamOptions {
|
||||
/** Model identifier for display */
|
||||
model?: string;
|
||||
/** Model pricing for cost calculation (from provider) */
|
||||
pricing?: ModelPricing;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert an AI SDK StreamResult to an async iterable of LlmStreamChunk.
|
||||
* This is provider-agnostic - works with any AI SDK provider.
|
||||
*/
|
||||
export async function* streamToChunks(result: StreamResult, options: StreamOptions = {}): AsyncIterable<LlmStreamChunk> {
|
||||
try {
|
||||
for await (const part of result.fullStream) {
|
||||
switch (part.type) {
|
||||
case "text-delta":
|
||||
yield { type: "text", content: part.text };
|
||||
break;
|
||||
|
||||
case "reasoning-delta":
|
||||
yield { type: "thinking", content: part.text };
|
||||
break;
|
||||
|
||||
case "tool-call":
|
||||
yield {
|
||||
type: "tool_use",
|
||||
toolName: part.toolName,
|
||||
toolInput: part.input as Record<string, unknown>
|
||||
};
|
||||
break;
|
||||
|
||||
case "tool-result":
|
||||
yield {
|
||||
type: "tool_result",
|
||||
toolName: part.toolName,
|
||||
result: typeof part.output === "string"
|
||||
? part.output
|
||||
: JSON.stringify(part.output)
|
||||
};
|
||||
break;
|
||||
|
||||
case "source":
|
||||
// Citation from web search (only URL sources have url property)
|
||||
if (part.sourceType === "url") {
|
||||
yield {
|
||||
type: "citation",
|
||||
citation: {
|
||||
url: part.url,
|
||||
title: part.title
|
||||
}
|
||||
};
|
||||
}
|
||||
break;
|
||||
|
||||
case "error":
|
||||
yield { type: "error", error: String(part.error) };
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Get usage information after stream completes
|
||||
const usage = await result.usage;
|
||||
if (usage && typeof usage.inputTokens === "number" && typeof usage.outputTokens === "number") {
|
||||
const cost = calculateCost(usage.inputTokens, usage.outputTokens, options.pricing);
|
||||
yield {
|
||||
type: "usage",
|
||||
usage: {
|
||||
promptTokens: usage.inputTokens,
|
||||
completionTokens: usage.outputTokens,
|
||||
totalTokens: usage.inputTokens + usage.outputTokens,
|
||||
cost,
|
||||
model: options.model
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
yield { type: "done" };
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error";
|
||||
yield { type: "error", error: message };
|
||||
}
|
||||
}
|
||||
189
apps/server/src/services/llm/tools.ts
Normal file
189
apps/server/src/services/llm/tools.ts
Normal file
@@ -0,0 +1,189 @@
|
||||
/**
|
||||
* LLM tools that wrap existing Trilium services.
|
||||
* These reuse the same logic as ETAPI without any HTTP overhead.
|
||||
*/
|
||||
|
||||
import { tool } from "ai";
|
||||
import { z } from "zod";
|
||||
|
||||
import becca from "../../becca/becca.js";
|
||||
import noteService from "../notes.js";
|
||||
import SearchContext from "../search/search_context.js";
|
||||
import searchService from "../search/services/search.js";
|
||||
|
||||
/**
|
||||
* Search for notes in the knowledge base.
|
||||
*/
|
||||
export const searchNotes = tool({
|
||||
description: "Search for notes in the user's knowledge base. Returns note metadata including title, type, and IDs.",
|
||||
inputSchema: z.object({
|
||||
query: z.string().describe("Search query (supports Trilium search syntax)")
|
||||
}),
|
||||
execute: async ({ query }) => {
|
||||
const searchContext = new SearchContext({});
|
||||
const results = searchService.findResultsWithQuery(query, searchContext);
|
||||
|
||||
return results.slice(0, 10).map(sr => {
|
||||
const note = becca.notes[sr.noteId];
|
||||
if (!note) return null;
|
||||
return {
|
||||
noteId: note.noteId,
|
||||
title: note.title,
|
||||
type: note.type
|
||||
};
|
||||
}).filter(Boolean);
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Read the content of a specific note.
|
||||
*/
|
||||
export const readNote = tool({
|
||||
description: "Read the full content of a note by its ID. Use search_notes first to find relevant note IDs.",
|
||||
inputSchema: z.object({
|
||||
noteId: z.string().describe("The ID of the note to read")
|
||||
}),
|
||||
execute: async ({ noteId }) => {
|
||||
const note = becca.getNote(noteId);
|
||||
if (!note) {
|
||||
return { error: "Note not found" };
|
||||
}
|
||||
if (note.isProtected) {
|
||||
return { error: "Note is protected" };
|
||||
}
|
||||
|
||||
const content = note.getContent();
|
||||
return {
|
||||
noteId: note.noteId,
|
||||
title: note.title,
|
||||
type: note.type,
|
||||
content: typeof content === "string" ? content : "[binary content]"
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Update the content of a note.
|
||||
*/
|
||||
export const updateNoteContent = tool({
|
||||
description: "Replace the entire content of a note. Use this to completely rewrite a note's content. For text notes, provide HTML content.",
|
||||
inputSchema: z.object({
|
||||
noteId: z.string().describe("The ID of the note to update"),
|
||||
content: z.string().describe("The new content for the note (HTML for text notes, plain text for code notes)")
|
||||
}),
|
||||
execute: async ({ noteId, content }) => {
|
||||
const note = becca.getNote(noteId);
|
||||
if (!note) {
|
||||
return { error: "Note not found" };
|
||||
}
|
||||
if (note.isProtected) {
|
||||
return { error: "Note is protected and cannot be modified" };
|
||||
}
|
||||
if (note.type !== "text" && note.type !== "code") {
|
||||
return { error: `Cannot update content for note type: ${note.type}` };
|
||||
}
|
||||
|
||||
note.setContent(content);
|
||||
return {
|
||||
success: true,
|
||||
noteId: note.noteId,
|
||||
title: note.title
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Append content to a note.
|
||||
*/
|
||||
export const appendToNote = tool({
|
||||
description: "Append content to the end of an existing note. For text notes, the content will be added as a new paragraph.",
|
||||
inputSchema: z.object({
|
||||
noteId: z.string().describe("The ID of the note to append to"),
|
||||
content: z.string().describe("The content to append (HTML for text notes, plain text for code notes)")
|
||||
}),
|
||||
execute: async ({ noteId, content }) => {
|
||||
const note = becca.getNote(noteId);
|
||||
if (!note) {
|
||||
return { error: "Note not found" };
|
||||
}
|
||||
if (note.isProtected) {
|
||||
return { error: "Note is protected and cannot be modified" };
|
||||
}
|
||||
if (note.type !== "text" && note.type !== "code") {
|
||||
return { error: `Cannot append to note type: ${note.type}` };
|
||||
}
|
||||
|
||||
const existingContent = note.getContent();
|
||||
if (typeof existingContent !== "string") {
|
||||
return { error: "Note has binary content" };
|
||||
}
|
||||
|
||||
let newContent: string;
|
||||
if (note.type === "text") {
|
||||
// For text notes, wrap in paragraph if not already HTML
|
||||
const contentToAppend = content.startsWith("<") ? content : `<p>${content}</p>`;
|
||||
newContent = existingContent + contentToAppend;
|
||||
} else {
|
||||
// For code notes, just append with newline
|
||||
newContent = existingContent + (existingContent.endsWith("\n") ? "" : "\n") + content;
|
||||
}
|
||||
|
||||
note.setContent(newContent);
|
||||
return {
|
||||
success: true,
|
||||
noteId: note.noteId,
|
||||
title: note.title
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Create a new note.
|
||||
*/
|
||||
export const createNote = tool({
|
||||
description: "Create a new note in the user's knowledge base. Returns the created note's ID and title.",
|
||||
inputSchema: z.object({
|
||||
parentNoteId: z.string().describe("The ID of the parent note where the new note will be created. Use 'root' for top-level notes."),
|
||||
title: z.string().describe("The title of the new note"),
|
||||
content: z.string().describe("The content of the note (HTML for text notes, plain text for code notes)"),
|
||||
type: z.enum(["text", "code"]).optional().describe("The type of note to create. Defaults to 'text'.")
|
||||
}),
|
||||
execute: async ({ parentNoteId, title, content, type = "text" }) => {
|
||||
const parentNote = becca.getNote(parentNoteId);
|
||||
if (!parentNote) {
|
||||
return { error: "Parent note not found" };
|
||||
}
|
||||
if (parentNote.isProtected) {
|
||||
return { error: "Cannot create note under a protected parent" };
|
||||
}
|
||||
|
||||
try {
|
||||
const { note } = noteService.createNewNote({
|
||||
parentNoteId,
|
||||
title,
|
||||
content,
|
||||
type
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
noteId: note.noteId,
|
||||
title: note.title,
|
||||
type: note.type
|
||||
};
|
||||
} catch (err) {
|
||||
return { error: err instanceof Error ? err.message : "Failed to create note" };
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* All available note tools.
|
||||
*/
|
||||
export const noteTools = {
|
||||
search_notes: searchNotes,
|
||||
read_note: readNote,
|
||||
update_note_content: updateNoteContent,
|
||||
append_to_note: appendToNote,
|
||||
create_note: createNote
|
||||
};
|
||||
72
apps/server/src/services/llm/types.ts
Normal file
72
apps/server/src/services/llm/types.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
/**
|
||||
* Server-specific LLM Provider types.
|
||||
* Shared types (LlmMessage, LlmCitation, LlmStreamChunk, LlmChatConfig)
|
||||
* should be imported from @triliumnext/commons.
|
||||
*/
|
||||
|
||||
import type { LlmChatConfig, LlmMessage } from "@triliumnext/commons";
|
||||
import type { streamText } from "ai";
|
||||
|
||||
/**
|
||||
* Extended provider config with server-specific options.
|
||||
*/
|
||||
export interface LlmProviderConfig extends LlmChatConfig {
|
||||
maxTokens?: number;
|
||||
temperature?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Result type from streamText - the AI SDK's unified streaming interface.
|
||||
*/
|
||||
export type StreamResult = ReturnType<typeof streamText>;
|
||||
|
||||
/**
|
||||
* Pricing per million tokens for a model.
|
||||
*/
|
||||
export interface ModelPricing {
|
||||
/** Cost per million input tokens in USD */
|
||||
input: number;
|
||||
/** Cost per million output tokens in USD */
|
||||
output: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Information about an available model.
|
||||
*/
|
||||
export interface ModelInfo {
|
||||
/** Model identifier (e.g., "claude-sonnet-4-20250514") */
|
||||
id: string;
|
||||
/** Human-readable name (e.g., "Claude Sonnet 4") */
|
||||
name: string;
|
||||
/** Pricing per million tokens */
|
||||
pricing: ModelPricing;
|
||||
/** Whether this is the default model */
|
||||
isDefault?: boolean;
|
||||
/** Cost multiplier relative to the cheapest model (1x = cheapest) */
|
||||
costMultiplier?: number;
|
||||
/** Maximum context window size in tokens */
|
||||
contextWindow?: number;
|
||||
}
|
||||
|
||||
export interface LlmProvider {
|
||||
name: string;
|
||||
|
||||
/**
|
||||
* Create a streaming chat completion.
|
||||
* Returns the AI SDK StreamResult which is provider-agnostic.
|
||||
*/
|
||||
chat(
|
||||
messages: LlmMessage[],
|
||||
config: LlmProviderConfig
|
||||
): StreamResult;
|
||||
|
||||
/**
|
||||
* Get pricing for a model. Returns undefined if pricing is not available.
|
||||
*/
|
||||
getModelPricing(model: string): ModelPricing | undefined;
|
||||
|
||||
/**
|
||||
* Get list of available models for this provider.
|
||||
*/
|
||||
getAvailableModels(): ModelInfo[];
|
||||
}
|
||||
@@ -15,7 +15,8 @@ const noteTypes = [
|
||||
{ type: "doc", defaultMime: "" },
|
||||
{ type: "contentWidget", defaultMime: "" },
|
||||
{ type: "mindMap", defaultMime: "application/json" },
|
||||
{ type: "spreadsheet", defaultMime: "application/json" }
|
||||
{ type: "spreadsheet", defaultMime: "application/json" },
|
||||
{ type: "llmChat", defaultMime: "application/json" }
|
||||
];
|
||||
|
||||
function getDefaultMimeForNoteType(typeName: string) {
|
||||
|
||||
@@ -209,7 +209,10 @@ const defaultOptions: DefaultOption[] = [
|
||||
]),
|
||||
isSynced: true
|
||||
},
|
||||
{ name: "experimentalFeatures", value: "[]", isSynced: true }
|
||||
{ name: "experimentalFeatures", value: "[]", isSynced: true },
|
||||
|
||||
// AI / LLM
|
||||
{ name: "llmProviders", value: "[]", isSynced: false }
|
||||
];
|
||||
|
||||
/**
|
||||
|
||||
@@ -10,7 +10,7 @@ import SearchContext from "./search/search_context.js";
|
||||
import { LBTPL_NOTE_LAUNCHER, LBTPL_CUSTOM_WIDGET, LBTPL_SPACER, LBTPL_SCRIPT } from "./hidden_subtree.js";
|
||||
import { t } from "i18next";
|
||||
import BNote from '../becca/entities/bnote.js';
|
||||
import { SaveSearchNoteResponse, SaveSqlConsoleResponse } from "@triliumnext/commons";
|
||||
import { SaveSearchNoteResponse, SaveSqlConsoleResponse, SaveLlmChatResponse } from "@triliumnext/commons";
|
||||
|
||||
function getInboxNote(date: string) {
|
||||
const workspaceNote = hoistedNoteService.getWorkspaceNote();
|
||||
@@ -123,6 +123,114 @@ function saveSearchNote(searchNoteId: string) {
|
||||
return result satisfies SaveSearchNoteResponse;
|
||||
}
|
||||
|
||||
function createLlmChat() {
|
||||
const { note } = noteService.createNewNote({
|
||||
parentNoteId: getMonthlyParentNoteId("_llmChat", "llmChat"),
|
||||
title: `${t("special_notes.llm_chat_prefix")} ${dateUtils.localNowDateTime()}`,
|
||||
content: JSON.stringify({
|
||||
version: 1,
|
||||
messages: []
|
||||
}),
|
||||
type: "llmChat",
|
||||
mime: "application/json"
|
||||
});
|
||||
|
||||
note.setLabel("iconClass", "bx bx-message-square-dots");
|
||||
note.setLabel("keepCurrentHoisting");
|
||||
|
||||
return note;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the most recently modified LLM chat note.
|
||||
* Used by sidebar chat to persist conversation across page refreshes.
|
||||
* Returns null if no chat exists.
|
||||
*/
|
||||
function getMostRecentLlmChat() {
|
||||
// Search for all llmChat notes and return the most recently modified
|
||||
const results = searchService.searchNotes(
|
||||
"note.type = llmChat",
|
||||
new SearchContext({
|
||||
ancestorNoteId: "_llmChat",
|
||||
limit: 1,
|
||||
orderBy: "note.utcDateModified",
|
||||
orderDirection: "desc"
|
||||
})
|
||||
);
|
||||
|
||||
return results.length > 0 ? results[0] : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the most recent LLM chat or creates a new one if none exists.
|
||||
* Used by sidebar chat for persistent conversations.
|
||||
*/
|
||||
function getOrCreateLlmChat() {
|
||||
const existingChat = getMostRecentLlmChat();
|
||||
|
||||
if (existingChat) {
|
||||
return existingChat;
|
||||
}
|
||||
|
||||
return createLlmChat();
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a list of recent LLM chat notes.
|
||||
* Used by sidebar chat history popup.
|
||||
*/
|
||||
function getRecentLlmChats(limit: number = 10) {
|
||||
const results = searchService.searchNotes(
|
||||
"note.type = llmChat",
|
||||
new SearchContext({
|
||||
ancestorNoteId: "_llmChat",
|
||||
limit,
|
||||
orderBy: "note.utcDateModified",
|
||||
orderDirection: "desc"
|
||||
})
|
||||
);
|
||||
|
||||
return results.map(note => ({
|
||||
noteId: note.noteId,
|
||||
title: note.title,
|
||||
dateModified: note.utcDateModified
|
||||
}));
|
||||
}
|
||||
|
||||
function getLlmChatHome() {
|
||||
const workspaceNote = hoistedNoteService.getWorkspaceNote();
|
||||
if (!workspaceNote) {
|
||||
throw new Error("Unable to find workspace note");
|
||||
}
|
||||
|
||||
if (!workspaceNote.isRoot()) {
|
||||
return workspaceNote.searchNoteInSubtree("#workspaceLlmChatHome") || workspaceNote.searchNoteInSubtree("#llmChatHome") || workspaceNote;
|
||||
} else {
|
||||
const today = dateUtils.localNowDate();
|
||||
|
||||
return workspaceNote.searchNoteInSubtree("#llmChatHome") || dateNoteService.getDayNote(today);
|
||||
}
|
||||
}
|
||||
|
||||
function saveLlmChat(llmChatNoteId: string) {
|
||||
const llmChatNote = becca.getNote(llmChatNoteId);
|
||||
if (!llmChatNote) {
|
||||
throw new Error(`Unable to find LLM chat note ID: ${llmChatNoteId}`);
|
||||
}
|
||||
|
||||
const llmChatHome = getLlmChatHome();
|
||||
|
||||
const result = llmChatNote.cloneTo(llmChatHome.noteId);
|
||||
|
||||
for (const parentBranch of llmChatNote.getParentBranches()) {
|
||||
if (parentBranch.parentNote?.hasAncestor("_hidden")) {
|
||||
parentBranch.markAsDeleted();
|
||||
}
|
||||
}
|
||||
|
||||
return result satisfies SaveLlmChatResponse;
|
||||
}
|
||||
|
||||
function getMonthlyParentNoteId(rootNoteId: string, prefix: string) {
|
||||
const month = dateUtils.localNowDate().substring(0, 7);
|
||||
const labelName = `${prefix}MonthNote`;
|
||||
@@ -282,6 +390,11 @@ export default {
|
||||
saveSqlConsole,
|
||||
createSearchNote,
|
||||
saveSearchNote,
|
||||
createLlmChat,
|
||||
getMostRecentLlmChat,
|
||||
getOrCreateLlmChat,
|
||||
getRecentLlmChats,
|
||||
saveLlmChat,
|
||||
createLauncher,
|
||||
resetLauncher,
|
||||
createOrUpdateScriptLauncherFromApi
|
||||
|
||||
@@ -16,3 +16,4 @@ export * from "./lib/notes.js";
|
||||
export * from "./lib/week_utils.js";
|
||||
export { default as BUILTIN_ATTRIBUTES } from "./lib/builtin_attributes.js";
|
||||
export * from "./lib/spreadsheet/render_to_html.js";
|
||||
export * from "./lib/llm_api.js";
|
||||
|
||||
101
packages/commons/src/lib/llm_api.ts
Normal file
101
packages/commons/src/lib/llm_api.ts
Normal file
@@ -0,0 +1,101 @@
|
||||
/**
|
||||
* Shared LLM types for chat integration.
|
||||
* Used by both client and server for API communication.
|
||||
*/
|
||||
|
||||
/**
|
||||
* A chat message in the conversation.
|
||||
*/
|
||||
export interface LlmMessage {
|
||||
role: "user" | "assistant" | "system";
|
||||
content: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Citation information extracted from LLM responses.
|
||||
* May include URL (for web search) or document metadata (for document citations).
|
||||
*/
|
||||
export interface LlmCitation {
|
||||
/** Source URL (typically from web search) */
|
||||
url?: string;
|
||||
/** Document or page title */
|
||||
title?: string;
|
||||
/** The text that was cited */
|
||||
citedText?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration for LLM chat requests.
|
||||
*/
|
||||
export interface LlmChatConfig {
|
||||
provider?: string;
|
||||
model?: string;
|
||||
systemPrompt?: string;
|
||||
/** Enable web search tool */
|
||||
enableWebSearch?: boolean;
|
||||
/** Enable note tools (search and read notes) */
|
||||
enableNoteTools?: boolean;
|
||||
/** Enable extended thinking for deeper reasoning */
|
||||
enableExtendedThinking?: boolean;
|
||||
/** Token budget for extended thinking (default: 10000) */
|
||||
thinkingBudget?: number;
|
||||
/** Current note context (note ID the user is viewing) */
|
||||
contextNoteId?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Pricing per million tokens for a model.
|
||||
*/
|
||||
export interface LlmModelPricing {
|
||||
/** Cost per million input tokens in USD */
|
||||
input: number;
|
||||
/** Cost per million output tokens in USD */
|
||||
output: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Information about an available LLM model.
|
||||
*/
|
||||
export interface LlmModelInfo {
|
||||
/** Model identifier (e.g., "claude-sonnet-4-20250514") */
|
||||
id: string;
|
||||
/** Human-readable name (e.g., "Claude Sonnet 4") */
|
||||
name: string;
|
||||
/** Pricing per million tokens */
|
||||
pricing: LlmModelPricing;
|
||||
/** Whether this is the default model */
|
||||
isDefault?: boolean;
|
||||
/** Whether this is a legacy/older model */
|
||||
isLegacy?: boolean;
|
||||
/** Cost multiplier relative to the cheapest model (1x = cheapest) */
|
||||
costMultiplier?: number;
|
||||
/** Maximum context window size in tokens */
|
||||
contextWindow?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Token usage information from the LLM response.
|
||||
*/
|
||||
export interface LlmUsage {
|
||||
promptTokens: number;
|
||||
completionTokens: number;
|
||||
totalTokens: number;
|
||||
/** Estimated cost in USD (if available) */
|
||||
cost?: number;
|
||||
/** Model identifier used for this response */
|
||||
model?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stream chunk types for real-time SSE updates.
|
||||
* Defines the protocol between server and client.
|
||||
*/
|
||||
export type LlmStreamChunk =
|
||||
| { type: "text"; content: string }
|
||||
| { type: "thinking"; content: string }
|
||||
| { type: "tool_use"; toolName: string; toolInput: Record<string, unknown> }
|
||||
| { type: "tool_result"; toolName: string; result: string }
|
||||
| { type: "citation"; citation: LlmCitation }
|
||||
| { type: "usage"; usage: LlmUsage }
|
||||
| { type: "error"; error: string }
|
||||
| { type: "done" };
|
||||
@@ -21,7 +21,8 @@ export const NOTE_TYPE_ICONS = {
|
||||
doc: "bx bxs-file-doc",
|
||||
contentWidget: "bx bxs-widget",
|
||||
mindMap: "bx bx-sitemap",
|
||||
spreadsheet: "bx bx-table"
|
||||
spreadsheet: "bx bx-table",
|
||||
llmChat: "bx bx-message-square-dots"
|
||||
};
|
||||
|
||||
const FILE_MIME_MAPPINGS = {
|
||||
|
||||
@@ -140,6 +140,10 @@ export interface OptionDefinitions extends KeyboardShortcutsOptions<KeyboardActi
|
||||
|
||||
seenCallToActions: string;
|
||||
experimentalFeatures: string;
|
||||
|
||||
// AI / LLM
|
||||
/** JSON array of configured LLM providers with their API keys */
|
||||
llmProviders: string;
|
||||
}
|
||||
|
||||
export type OptionNames = keyof OptionDefinitions;
|
||||
|
||||
@@ -122,7 +122,8 @@ export const ALLOWED_NOTE_TYPES = [
|
||||
"webView",
|
||||
"code",
|
||||
"mindMap",
|
||||
"spreadsheet"
|
||||
"spreadsheet",
|
||||
"llmChat"
|
||||
] as const;
|
||||
export type NoteType = (typeof ALLOWED_NOTE_TYPES)[number];
|
||||
|
||||
|
||||
@@ -214,6 +214,8 @@ export interface ConvertAttachmentToNoteResponse {
|
||||
|
||||
export type SaveSqlConsoleResponse = CloneResponse;
|
||||
|
||||
export type SaveLlmChatResponse = CloneResponse;
|
||||
|
||||
export interface BacklinkCountResponse {
|
||||
count: number;
|
||||
}
|
||||
|
||||
127
pnpm-lock.yaml
generated
127
pnpm-lock.yaml
generated
@@ -552,6 +552,12 @@ importers:
|
||||
|
||||
apps/server:
|
||||
dependencies:
|
||||
'@ai-sdk/anthropic':
|
||||
specifier: ^2.0.0
|
||||
version: 2.0.71(zod@4.1.12)
|
||||
ai:
|
||||
specifier: ^5.0.0
|
||||
version: 5.0.161(zod@4.1.12)
|
||||
better-sqlite3:
|
||||
specifier: 12.8.0
|
||||
version: 12.8.0
|
||||
@@ -1526,6 +1532,28 @@ packages:
|
||||
'@adobe/css-tools@4.4.4':
|
||||
resolution: {integrity: sha512-Elp+iwUx5rN5+Y8xLt5/GRoG20WGoDCQ/1Fb+1LiGtvwbDavuSk0jhD/eZdckHAuzcDzccnkv+rEjyWfRx18gg==}
|
||||
|
||||
'@ai-sdk/anthropic@2.0.71':
|
||||
resolution: {integrity: sha512-JXTtAwlyxGzzRtpiAXk/O93aOTgdfoVX28EoUuRNVqZRgtkoniLQTtqeb8uZ4oXljNJlXzaJLNasS/U90w/wjw==}
|
||||
engines: {node: '>=18'}
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
|
||||
'@ai-sdk/gateway@2.0.65':
|
||||
resolution: {integrity: sha512-yaWzvQQWgAzV0m3eidfpRub1+PggDOr2hLnSOI+L2ZispyJ/7EoSzhjKzNCADj6PHnnPaOMH933Xhl1Z/NSxJw==}
|
||||
engines: {node: '>=18'}
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
|
||||
'@ai-sdk/provider-utils@3.0.22':
|
||||
resolution: {integrity: sha512-fFT1KfUUKktfAFm5mClJhS1oux9tP2qgzmEZVl5UdwltQ1LO/s8hd7znVrgKzivwv1s1FIPza0s9OpJaNB/vHw==}
|
||||
engines: {node: '>=18'}
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
|
||||
'@ai-sdk/provider@2.0.1':
|
||||
resolution: {integrity: sha512-KCUwswvsC5VsW2PWFqF8eJgSCu5Ysj7m1TxiHTVA6g7k360bk0RNQENT8KTMAYEs+8fWPD3Uu4dEmzGHc+jGng==}
|
||||
engines: {node: '>=18'}
|
||||
|
||||
'@aklinker1/rollup-plugin-visualizer@5.12.0':
|
||||
resolution: {integrity: sha512-X24LvEGw6UFmy0lpGJDmXsMyBD58XmX1bbwsaMLhNoM+UMQfQ3b2RtC+nz4b/NoRK5r6QJSKJHBNVeUdwqybaQ==}
|
||||
engines: {node: '>=14'}
|
||||
@@ -7003,6 +7031,10 @@ packages:
|
||||
'@upsetjs/venn.js@2.0.0':
|
||||
resolution: {integrity: sha512-WbBhLrooyePuQ1VZxrJjtLvTc4NVfpOyKx0sKqioq9bX1C1m7Jgykkn8gLrtwumBioXIqam8DLxp88Adbue6Hw==}
|
||||
|
||||
'@vercel/oidc@3.1.0':
|
||||
resolution: {integrity: sha512-Fw28YZpRnA3cAHHDlkt7xQHiJ0fcL+NRcIqsocZQUSmbzeIKRpwttJjik5ZGanXP+vlA4SbTg+AbA3bP363l+w==}
|
||||
engines: {node: '>= 20'}
|
||||
|
||||
'@vitest/browser-webdriverio@4.1.2':
|
||||
resolution: {integrity: sha512-5VKfMSq6ZoEAmvVu3sJGkDjEjGuxwk72tOgoNJfJYv+c+UQX1D4UqSdL8kXUMJcTQx1tKeWwQ9Zym0gRdMfyrA==}
|
||||
peerDependencies:
|
||||
@@ -7313,6 +7345,12 @@ packages:
|
||||
resolution: {integrity: sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==}
|
||||
engines: {node: '>=8'}
|
||||
|
||||
ai@5.0.161:
|
||||
resolution: {integrity: sha512-CVANs7auUNEi/hRhdJDKcPYaCLWXveIfmoiekNSRel3i8WUieB6iEncDS5smcubWsx7hGtTgXxNRTg0YG0ljtA==}
|
||||
engines: {node: '>=18'}
|
||||
peerDependencies:
|
||||
zod: ^3.25.76 || ^4.1.8
|
||||
|
||||
ajv-draft-04@1.0.0:
|
||||
resolution: {integrity: sha512-mv00Te6nmYbRp5DCwclxtt7yV/joXJPGS7nM+97GdxvuttCOfgI3K4U25zboyeX0O+myI8ERluxQe5wljMmVIw==}
|
||||
peerDependencies:
|
||||
@@ -9465,6 +9503,10 @@ packages:
|
||||
resolution: {integrity: sha512-6RxOBZ/cYgd8usLwsEl+EC09Au/9BcmCKYF2/xbml6DNczf7nv0MQb+7BA2F+li6//I+28VNlQR37XfQtcAJuA==}
|
||||
engines: {node: '>=18.0.0'}
|
||||
|
||||
eventsource-parser@3.0.6:
|
||||
resolution: {integrity: sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg==}
|
||||
engines: {node: '>=18.0.0'}
|
||||
|
||||
execa@1.0.0:
|
||||
resolution: {integrity: sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==}
|
||||
engines: {node: '>=6'}
|
||||
@@ -11001,6 +11043,9 @@ packages:
|
||||
json-schema-traverse@1.0.0:
|
||||
resolution: {integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==}
|
||||
|
||||
json-schema@0.4.0:
|
||||
resolution: {integrity: sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==}
|
||||
|
||||
json-stable-stringify-without-jsonify@1.0.1:
|
||||
resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==}
|
||||
|
||||
@@ -15977,6 +16022,30 @@ snapshots:
|
||||
|
||||
'@adobe/css-tools@4.4.4': {}
|
||||
|
||||
'@ai-sdk/anthropic@2.0.71(zod@4.1.12)':
|
||||
dependencies:
|
||||
'@ai-sdk/provider': 2.0.1
|
||||
'@ai-sdk/provider-utils': 3.0.22(zod@4.1.12)
|
||||
zod: 4.1.12
|
||||
|
||||
'@ai-sdk/gateway@2.0.65(zod@4.1.12)':
|
||||
dependencies:
|
||||
'@ai-sdk/provider': 2.0.1
|
||||
'@ai-sdk/provider-utils': 3.0.22(zod@4.1.12)
|
||||
'@vercel/oidc': 3.1.0
|
||||
zod: 4.1.12
|
||||
|
||||
'@ai-sdk/provider-utils@3.0.22(zod@4.1.12)':
|
||||
dependencies:
|
||||
'@ai-sdk/provider': 2.0.1
|
||||
'@standard-schema/spec': 1.1.0
|
||||
eventsource-parser: 3.0.6
|
||||
zod: 4.1.12
|
||||
|
||||
'@ai-sdk/provider@2.0.1':
|
||||
dependencies:
|
||||
json-schema: 0.4.0
|
||||
|
||||
'@aklinker1/rollup-plugin-visualizer@5.12.0(rollup@4.52.0)':
|
||||
dependencies:
|
||||
open: 8.4.2
|
||||
@@ -16781,6 +16850,8 @@ snapshots:
|
||||
'@ckeditor/ckeditor5-core': 47.6.1
|
||||
'@ckeditor/ckeditor5-upload': 47.6.1
|
||||
ckeditor5: 47.6.1
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@ckeditor/ckeditor5-ai@47.6.1(bufferutil@4.0.9)(utf-8-validate@6.0.5)':
|
||||
dependencies:
|
||||
@@ -16922,12 +16993,16 @@ snapshots:
|
||||
'@ckeditor/ckeditor5-utils': 47.6.1
|
||||
'@ckeditor/ckeditor5-widget': 47.6.1
|
||||
es-toolkit: 1.39.5
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@ckeditor/ckeditor5-cloud-services@47.6.1':
|
||||
dependencies:
|
||||
'@ckeditor/ckeditor5-core': 47.6.1
|
||||
'@ckeditor/ckeditor5-utils': 47.6.1
|
||||
ckeditor5: 47.6.1
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@ckeditor/ckeditor5-code-block@47.6.1(patch_hash=2361d8caad7d6b5bddacc3a3b4aa37dbfba260b1c1b22a450413a79c1bb1ce95)':
|
||||
dependencies:
|
||||
@@ -17125,6 +17200,8 @@ snapshots:
|
||||
'@ckeditor/ckeditor5-utils': 47.6.1
|
||||
ckeditor5: 47.6.1
|
||||
es-toolkit: 1.39.5
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@ckeditor/ckeditor5-editor-decoupled@47.6.1':
|
||||
dependencies:
|
||||
@@ -17134,6 +17211,8 @@ snapshots:
|
||||
'@ckeditor/ckeditor5-utils': 47.6.1
|
||||
ckeditor5: 47.6.1
|
||||
es-toolkit: 1.39.5
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@ckeditor/ckeditor5-editor-inline@47.6.1':
|
||||
dependencies:
|
||||
@@ -17143,6 +17222,8 @@ snapshots:
|
||||
'@ckeditor/ckeditor5-utils': 47.6.1
|
||||
ckeditor5: 47.6.1
|
||||
es-toolkit: 1.39.5
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@ckeditor/ckeditor5-editor-multi-root@47.6.1':
|
||||
dependencies:
|
||||
@@ -17190,8 +17271,6 @@ snapshots:
|
||||
'@ckeditor/ckeditor5-core': 47.6.1
|
||||
'@ckeditor/ckeditor5-engine': 47.6.1
|
||||
'@ckeditor/ckeditor5-utils': 47.6.1
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@ckeditor/ckeditor5-essentials@47.6.1':
|
||||
dependencies:
|
||||
@@ -17223,8 +17302,6 @@ snapshots:
|
||||
'@ckeditor/ckeditor5-ui': 47.6.1
|
||||
'@ckeditor/ckeditor5-utils': 47.6.1
|
||||
ckeditor5: 47.6.1
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@ckeditor/ckeditor5-export-word@47.6.1':
|
||||
dependencies:
|
||||
@@ -17249,6 +17326,8 @@ snapshots:
|
||||
'@ckeditor/ckeditor5-utils': 47.6.1
|
||||
ckeditor5: 47.6.1
|
||||
es-toolkit: 1.39.5
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@ckeditor/ckeditor5-font@47.6.1':
|
||||
dependencies:
|
||||
@@ -17324,6 +17403,8 @@ snapshots:
|
||||
'@ckeditor/ckeditor5-utils': 47.6.1
|
||||
'@ckeditor/ckeditor5-widget': 47.6.1
|
||||
ckeditor5: 47.6.1
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@ckeditor/ckeditor5-html-embed@47.6.1':
|
||||
dependencies:
|
||||
@@ -17350,6 +17431,8 @@ snapshots:
|
||||
'@ckeditor/ckeditor5-widget': 47.6.1
|
||||
ckeditor5: 47.6.1
|
||||
es-toolkit: 1.39.5
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@ckeditor/ckeditor5-icons@47.6.1': {}
|
||||
|
||||
@@ -17381,8 +17464,6 @@ snapshots:
|
||||
'@ckeditor/ckeditor5-ui': 47.6.1
|
||||
'@ckeditor/ckeditor5-utils': 47.6.1
|
||||
ckeditor5: 47.6.1
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@ckeditor/ckeditor5-indent@47.6.1':
|
||||
dependencies:
|
||||
@@ -17522,8 +17603,6 @@ snapshots:
|
||||
'@ckeditor/ckeditor5-widget': 47.6.1
|
||||
ckeditor5: 47.6.1
|
||||
es-toolkit: 1.39.5
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@ckeditor/ckeditor5-minimap@47.6.1':
|
||||
dependencies:
|
||||
@@ -17532,8 +17611,6 @@ snapshots:
|
||||
'@ckeditor/ckeditor5-ui': 47.6.1
|
||||
'@ckeditor/ckeditor5-utils': 47.6.1
|
||||
ckeditor5: 47.6.1
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@ckeditor/ckeditor5-operations-compressor@47.6.1':
|
||||
dependencies:
|
||||
@@ -17586,8 +17663,6 @@ snapshots:
|
||||
'@ckeditor/ckeditor5-utils': 47.6.1
|
||||
'@ckeditor/ckeditor5-widget': 47.6.1
|
||||
ckeditor5: 47.6.1
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@ckeditor/ckeditor5-pagination@47.6.1':
|
||||
dependencies:
|
||||
@@ -17695,8 +17770,6 @@ snapshots:
|
||||
'@ckeditor/ckeditor5-ui': 47.6.1
|
||||
'@ckeditor/ckeditor5-utils': 47.6.1
|
||||
ckeditor5: 47.6.1
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@ckeditor/ckeditor5-slash-command@47.6.1':
|
||||
dependencies:
|
||||
@@ -17709,8 +17782,6 @@ snapshots:
|
||||
'@ckeditor/ckeditor5-ui': 47.6.1
|
||||
'@ckeditor/ckeditor5-utils': 47.6.1
|
||||
ckeditor5: 47.6.1
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@ckeditor/ckeditor5-source-editing-enhanced@47.6.1':
|
||||
dependencies:
|
||||
@@ -17758,8 +17829,6 @@ snapshots:
|
||||
'@ckeditor/ckeditor5-utils': 47.6.1
|
||||
ckeditor5: 47.6.1
|
||||
es-toolkit: 1.39.5
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@ckeditor/ckeditor5-table@47.6.1':
|
||||
dependencies:
|
||||
@@ -17772,8 +17841,6 @@ snapshots:
|
||||
'@ckeditor/ckeditor5-widget': 47.6.1
|
||||
ckeditor5: 47.6.1
|
||||
es-toolkit: 1.39.5
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@ckeditor/ckeditor5-template@47.6.1':
|
||||
dependencies:
|
||||
@@ -17883,8 +17950,6 @@ snapshots:
|
||||
'@ckeditor/ckeditor5-engine': 47.6.1
|
||||
'@ckeditor/ckeditor5-utils': 47.6.1
|
||||
es-toolkit: 1.39.5
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@ckeditor/ckeditor5-widget@47.6.1':
|
||||
dependencies:
|
||||
@@ -17904,8 +17969,6 @@ snapshots:
|
||||
'@ckeditor/ckeditor5-utils': 47.6.1
|
||||
ckeditor5: 47.6.1
|
||||
es-toolkit: 1.39.5
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@codemirror/autocomplete@6.18.6':
|
||||
dependencies:
|
||||
@@ -20234,7 +20297,7 @@ snapshots:
|
||||
detect-libc: 2.1.2
|
||||
is-glob: 4.0.3
|
||||
node-addon-api: 7.1.1
|
||||
picomatch: 4.0.3
|
||||
picomatch: 4.0.4
|
||||
optionalDependencies:
|
||||
'@parcel/watcher-android-arm64': 2.5.6
|
||||
'@parcel/watcher-darwin-arm64': 2.5.6
|
||||
@@ -24047,6 +24110,8 @@ snapshots:
|
||||
d3-selection: 3.0.0
|
||||
d3-transition: 3.0.1(d3-selection@3.0.0)
|
||||
|
||||
'@vercel/oidc@3.1.0': {}
|
||||
|
||||
'@vitest/browser-webdriverio@4.1.2(bufferutil@4.0.9)(msw@2.7.5(@types/node@24.12.0)(typescript@5.9.3))(utf-8-validate@6.0.5)(vite@8.0.3(@types/node@24.12.0)(esbuild@0.27.4)(jiti@2.6.1)(less@4.1.3)(sass-embedded@1.91.0)(sass@1.91.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2))(vitest@4.1.2)(webdriverio@9.27.0(bufferutil@4.0.9)(utf-8-validate@6.0.5))':
|
||||
dependencies:
|
||||
'@vitest/browser': 4.1.2(bufferutil@4.0.9)(msw@2.7.5(@types/node@24.12.0)(typescript@5.9.3))(utf-8-validate@6.0.5)(vite@8.0.3(@types/node@24.12.0)(esbuild@0.27.4)(jiti@2.6.1)(less@4.1.3)(sass-embedded@1.91.0)(sass@1.91.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.2))(vitest@4.1.2)
|
||||
@@ -24459,6 +24524,14 @@ snapshots:
|
||||
clean-stack: 2.2.0
|
||||
indent-string: 4.0.0
|
||||
|
||||
ai@5.0.161(zod@4.1.12):
|
||||
dependencies:
|
||||
'@ai-sdk/gateway': 2.0.65(zod@4.1.12)
|
||||
'@ai-sdk/provider': 2.0.1
|
||||
'@ai-sdk/provider-utils': 3.0.22(zod@4.1.12)
|
||||
'@opentelemetry/api': 1.9.0
|
||||
zod: 4.1.12
|
||||
|
||||
ajv-draft-04@1.0.0(ajv@8.13.0):
|
||||
optionalDependencies:
|
||||
ajv: 8.13.0
|
||||
@@ -27197,6 +27270,8 @@ snapshots:
|
||||
|
||||
eventsource-parser@3.0.2: {}
|
||||
|
||||
eventsource-parser@3.0.6: {}
|
||||
|
||||
execa@1.0.0:
|
||||
dependencies:
|
||||
cross-spawn: 6.0.6
|
||||
@@ -29011,6 +29086,8 @@ snapshots:
|
||||
|
||||
json-schema-traverse@1.0.0: {}
|
||||
|
||||
json-schema@0.4.0: {}
|
||||
|
||||
json-stable-stringify-without-jsonify@1.0.1: {}
|
||||
|
||||
json-stringify-pretty-compact@4.0.0: {}
|
||||
|
||||
Reference in New Issue
Block a user