Skip to content
Closed
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 22 additions & 4 deletions apps/client/src/services/llm_chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,21 +13,38 @@ export async function getAvailableModels(): Promise<LlmModelInfo[]> {
export interface StreamCallbacks {
onChunk: (text: string) => void;
onThinking?: (text: string) => void;
onToolUse?: (toolName: string, input: Record<string, unknown>) => void;
onToolUse?: (toolName: string, input: Record<string, unknown>, requiresApproval?: boolean) => void;
onToolResult?: (toolName: string, result: string, isError?: boolean) => void;
onCitation?: (citation: LlmCitation) => void;
onUsage?: (usage: LlmUsage) => void;
onError: (error: string) => void;
onDone: () => void;
}

/**
* Execute a mutating tool call after user approval.
*/
export async function executeToolCall(toolName: string, toolInput: Record<string, unknown>): Promise<{ result: string; isError?: boolean }> {
const response = await server.post<{ result?: object; error?: string }>("llm-chat/execute-tool", { toolName, toolInput });

if (response.error) {
return { result: response.error, isError: true };
}

return {
result: typeof response.result === "string" ? response.result : JSON.stringify(response.result)
};
}

/**
* Stream a chat completion from the LLM API using Server-Sent Events.
* Returns an AbortController that can be used to cancel the stream.
*/
export async function streamChatCompletion(
messages: LlmMessage[],
config: LlmChatConfig,
callbacks: StreamCallbacks
callbacks: StreamCallbacks,
abortSignal?: AbortSignal
): Promise<void> {
const headers = await server.getHeaders();

Expand All @@ -37,7 +54,8 @@ export async function streamChatCompletion(
...headers,
"Content-Type": "application/json"
} as HeadersInit,
body: JSON.stringify({ messages, config })
body: JSON.stringify({ messages, config }),
signal: abortSignal
});

if (!response.ok) {
Expand Down Expand Up @@ -76,7 +94,7 @@ export async function streamChatCompletion(
callbacks.onThinking?.(data.content);
break;
case "tool_use":
callbacks.onToolUse?.(data.toolName, data.toolInput);
callbacks.onToolUse?.(data.toolName, data.toolInput, data.requiresApproval);
// Yield to force Preact to commit the pending tool call
// state before we process the result.
await new Promise((r) => setTimeout(r, 1));
Expand Down
30 changes: 28 additions & 2 deletions apps/client/src/translations/en/translation.json
Original file line number Diff line number Diff line change
Expand Up @@ -1641,6 +1641,10 @@
"sources": "Sources",
"sources_summary": "{{count}} sources from {{sites}} sites",
"extended_thinking": "Extended thinking",
"knowledge_base": "Knowledge base",
"knowledge_base_sources": "Knowledge base sources",
"knowledge_base_add": "Add a note as source...",
"knowledge_base_remove": "Remove source",
"legacy_models": "Legacy models",
"thinking": "Thinking...",
"thought_process": "Thought process",
Expand All @@ -1649,6 +1653,11 @@
"result": "Result",
"error": "Error",
"tool_error": "failed",
"approve": "Approve",
"reject": "Reject",
"pending_approval": "This action requires your approval",
"rejected_by_user": "Rejected by user",
"stop": "Stop",
"total_tokens": "{{total}} tokens",
"tokens_detail": "{{prompt}} prompt + {{completion}} completion",
"tokens_used": "{{prompt}} prompt + {{completion}} completion = {{total}} tokens",
Expand All @@ -1660,7 +1669,8 @@
"note_context_enabled": "Click to disable note context: {{title}}",
"note_context_disabled": "Click to include current note in context",
"no_provider_message": "No AI provider configured. Add one to start chatting.",
"add_provider": "Add AI Provider"
"add_provider": "Add AI Provider",
"free": "Free"
},
"sidebar_chat": {
"title": "AI Chat",
Expand Down Expand Up @@ -2335,7 +2345,19 @@
"delete_provider_confirmation": "Are you sure you want to delete the provider \"{{name}}\"?",
"api_key": "API Key",
"api_key_placeholder": "Enter your API key",
"base_url": "Base URL",
"cancel": "Cancel",
"web_search_title": "Web Search Engine",
"web_search_description": "Choose which search engine the AI agent uses for web searches. Provider default uses the built-in search of each LLM provider (Anthropic, OpenAI, Google). Tavily and SearXNG work with all providers including Ollama.",
"web_search_engine": "Search engine",
"web_search_engine_description": "Select the search engine to use for AI web searches",
"web_search_provider_default": "Provider default (built-in)",
"tavily_api_key": "Tavily API key",
"tavily_api_key_description": "Get a free API key at tavily.com (1,000 searches/month free)",
"searxng_url": "SearXNG instance URL",
"searxng_url_description": "URL of your self-hosted SearXNG instance",
"search_timeout": "Search timeout (seconds)",
"search_timeout_description": "Maximum time to wait for web search results before timing out",
"mcp_title": "MCP (Model Context Protocol)",
"mcp_enabled": "MCP server",
"mcp_enabled_description": "Expose a Model Context Protocol (MCP) endpoint so that AI coding assistants (e.g. Claude Code, GitHub Copilot) can read and modify your notes. The endpoint is only accessible from localhost.",
Expand All @@ -2358,7 +2380,11 @@
"web_search": "Web search",
"note_in_parent": "<Note/> in <Parent/>",
"get_attachment": "Get attachment",
"get_attachment_content": "Read attachment content"
"get_attachment_content": "Read attachment content",
"rename_note": "Rename note",
"delete_note": "Delete note",
"move_note": "Move note",
"clone_note": "Clone note"
}
}
}
7 changes: 6 additions & 1 deletion apps/client/src/widgets/sidebar/SidebarChat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,12 @@ export default function SidebarChat() {
/>
)}
{chat.messages.map(msg => (
<ChatMessage key={msg.id} message={msg} />
<ChatMessage
key={msg.id}
message={msg}
onApproveToolCall={chat.approveToolCall}
onRejectToolCall={chat.rejectToolCall}
/>
))}
{chat.isStreaming && chat.streamingThinking && (
<ChatMessage
Expand Down
79 changes: 79 additions & 0 deletions apps/client/src/widgets/type_widgets/llm_chat/ChatInputBar.css
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,10 @@
opacity: 0.4;
}

.llm-chat-stop-btn {
color: var(--danger-color, #dc3545);
}

/* Model selector */
.llm-chat-model-selector {
display: flex;
Expand Down Expand Up @@ -167,3 +171,78 @@
margin: 0;
font-size: 0.9rem;
}

/* Knowledge base sources */
.llm-chat-kb-sources {
display: flex;
flex-direction: column;
gap: 0.375rem;
padding: 0.5rem;
border: 1px solid var(--main-border-color);
border-radius: 6px;
background: var(--accented-background-color);
}

.llm-chat-kb-header {
display: flex;
align-items: center;
gap: 0.375rem;
font-size: 0.8rem;
color: var(--muted-text-color);
font-weight: 600;
}

.llm-chat-kb-chips {
display: flex;
flex-wrap: wrap;
gap: 0.25rem;
}

.llm-chat-kb-chip {
display: inline-flex;
align-items: center;
gap: 0.25rem;
padding: 0.125rem 0.375rem;
border-radius: 4px;
background: var(--main-background-color);
border: 1px solid var(--main-border-color);
font-size: 0.8rem;
max-width: 200px;
}

.llm-chat-kb-chip-title {
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}

.llm-chat-kb-chip-remove {
display: flex;
align-items: center;
justify-content: center;
background: none;
border: none;
cursor: pointer;
padding: 0;
color: var(--muted-text-color);
font-size: 0.9rem;
line-height: 1;
}

.llm-chat-kb-chip-remove:hover:not(:disabled) {
color: var(--danger-color, #d9534f);
}

.llm-chat-kb-chip-remove:disabled {
opacity: 0.5;
cursor: not-allowed;
}

.llm-chat-kb-sources .note-autocomplete-container {
margin-top: 0.25rem;
}

.llm-chat-kb-sources .note-autocomplete-container input {
font-size: 0.8rem;
padding: 0.25rem 0.5rem;
}
97 changes: 89 additions & 8 deletions apps/client/src/widgets/type_widgets/llm_chat/ChatInputBar.tsx
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import "./ChatInputBar.css";

import type { RefObject } from "preact";
import { useState, useCallback } from "preact/hooks";
import { useState, useCallback, useEffect } from "preact/hooks";

import { t } from "../../../services/i18n.js";
import ActionButton from "../../react/ActionButton.js";
Expand All @@ -10,6 +10,8 @@ import Dropdown from "../../react/Dropdown.js";
import { FormDropdownDivider, FormDropdownSubmenu, FormListItem, FormListToggleableItem } from "../../react/FormList.js";
import type { UseLlmChatReturn } from "./useLlmChat.js";
import AddProviderModal, { type LlmProviderConfig } from "../options/llm/AddProviderModal.js";
import NoteAutocomplete from "../../react/NoteAutocomplete.js";
import froca from "../../../services/froca.js";
import options from "../../../services/options.js";

/** Format token count with thousands separators */
Expand Down Expand Up @@ -96,6 +98,36 @@ export default function ChatInputBar({

const isNoteContextEnabled = !!chat.contextNoteId && !!activeNoteId;

const [sourceTitles, setSourceTitles] = useState<Record<string, string>>({});
const [kbPanelOpen, setKbPanelOpen] = useState(chat.sourceNoteIds.length > 0);

// Open KB panel when sources are loaded from saved content
useEffect(() => {
if (chat.sourceNoteIds.length > 0) {
setKbPanelOpen(true);
}
}, [chat.sourceNoteIds.length]);

// Resolve note titles for source note chips
useEffect(() => {
const ids = chat.sourceNoteIds.filter(id => !sourceTitles[id]);
if (ids.length === 0) return;

Promise.all(ids.map(id => froca.getNote(id, true))).then(notes => {
const newTitles: Record<string, string> = {};
for (let i = 0; i < ids.length; i++) {
newTitles[ids[i]] = notes[i]?.title ?? ids[i];
}
setSourceTitles(prev => ({ ...prev, ...newTitles }));
});
}, [chat.sourceNoteIds]);

const handleAddSourceNote = useCallback((noteId: string) => {
if (noteId && !chat.sourceNoteIds.includes(noteId)) {
chat.addSourceNote(noteId);
}
}, [chat.sourceNoteIds, chat.addSourceNote]);

const currentModel = chat.availableModels.find(m => m.id === chat.selectedModel);
const currentModels = chat.availableModels.filter(m => !m.isLegacy);
const legacyModels = chat.availableModels.filter(m => m.isLegacy);
Expand Down Expand Up @@ -139,6 +171,41 @@ export default function ChatInputBar({
onKeyDown={handleKeyDown}
rows={rows}
/>
{kbPanelOpen && (
<div className="llm-chat-kb-sources">
<div className="llm-chat-kb-header">
<span className="bx bx-book-open" />
<span>{t("llm_chat.knowledge_base_sources")}</span>
</div>
<div className="llm-chat-kb-chips">
{chat.sourceNoteIds.map(noteId => (
<span key={noteId} className="llm-chat-kb-chip">
<span className="llm-chat-kb-chip-title">{sourceTitles[noteId] ?? noteId}</span>
<button
type="button"
className="llm-chat-kb-chip-remove"
onClick={() => {
chat.removeSourceNote(noteId);
setSourceTitles(prev => {
const next = { ...prev };
delete next[noteId];
return next;
});
}}
disabled={chat.isStreaming}
title={t("llm_chat.knowledge_base_remove")}
>
<span className="bx bx-x" />
</button>
</span>
))}
</div>
<NoteAutocomplete
placeholder={t("llm_chat.knowledge_base_add")}
noteIdChanged={handleAddSourceNote}
/>
</div>
)}
<div className="llm-chat-options">
<div className="llm-chat-model-selector">
<span className="bx bx-chip" />
Expand All @@ -153,7 +220,7 @@ export default function ChatInputBar({
onClick={() => handleModelSelect(model.id)}
checked={chat.selectedModel === model.id}
>
{model.name} <small>({model.costDescription})</small>
{model.name}{model.costDescription && <> <small>({model.costDescription})</small></>}
</FormListItem>
))}
{legacyModels.length > 0 && (
Expand All @@ -169,7 +236,7 @@ export default function ChatInputBar({
onClick={() => handleModelSelect(model.id)}
checked={chat.selectedModel === model.id}
>
{model.name} <small>({model.costDescription})</small>
{model.name}{model.costDescription && <> <small>({model.costDescription})</small></>}
</FormListItem>
))}
</FormDropdownSubmenu>
Expand Down Expand Up @@ -197,6 +264,20 @@ export default function ChatInputBar({
onChange={handleExtendedThinkingToggle}
disabled={chat.isStreaming}
/>
<FormDropdownDivider />
<FormListToggleableItem
icon="bx bx-book-open"
title={t("llm_chat.knowledge_base")}
currentValue={kbPanelOpen}
onChange={(newValue) => {
setKbPanelOpen(newValue);
if (!newValue) {
chat.setSourceNoteIds([]);
setSourceTitles({});
}
}}
disabled={chat.isStreaming}
/>
</Dropdown>
{activeNoteId && activeNoteTitle && (
<Button
Expand Down Expand Up @@ -228,11 +309,11 @@ export default function ChatInputBar({
)}
</div>
<ActionButton
icon={chat.isStreaming ? "bx bx-loader-alt bx-spin" : "bx bx-send"}
text={chat.isStreaming ? t("llm_chat.sending") : t("llm_chat.send")}
onClick={handleSubmit}
disabled={chat.isStreaming || !chat.input.trim()}
className="llm-chat-send-btn"
icon={chat.isStreaming ? "bx bx-stop" : "bx bx-send"}
text={chat.isStreaming ? t("llm_chat.stop") : t("llm_chat.send")}
onClick={chat.isStreaming ? chat.stopStreaming : handleSubmit}
disabled={!chat.isStreaming && !chat.input.trim()}
className={`llm-chat-send-btn ${chat.isStreaming ? "llm-chat-stop-btn" : ""}`}
/>
</div>
</form>
Expand Down
Loading
Loading