Enhance LLM chat with provider config and conversation list

Adds configurable provider settings, conversation browsing, and
enhanced command interface for LLM chat functionality.

**New Features:**
- Provider dropdown in config panel (ready for multi-provider)
- Base URL field for custom endpoints (OpenRouter, local servers, proxies)
- `llm list` command - Browse all conversations with search/sort
- `llm open <id>` - Open specific conversation by ID
- Enhanced command parser supporting list and open subcommands

**Components:**
- LLMConversationListViewer: Searchable, sortable conversation list
  - Search by title or message content
  - Sort by recent, oldest, or token usage
  - Shows message count, tokens, cost, last update
  - Click to open, hover to delete
  - Empty state with helpful instructions

**Config Panel Enhancements:**
- Provider dropdown (currently shows OpenAI, ready for more)
- Base URL input (optional, for custom endpoints)
- Supports OpenAI-compatible APIs with custom URLs

**Command Improvements:**
- `llm` - Start new conversation (existing)
- `llm list` - Open conversation browser
- `llm open <id>` - Open by UUID
- `llm <id>` - Shorthand for open
- Updated man page with all options and examples

**Type System:**
- Added `llm-list` appId for conversation browser
- Extended LLMCommandResult with `showList` flag
- Updated command parser to route to correct appId

**Window System:**
- Lazy-loaded LLMConversationListViewer
- Proper routing based on command subcommand
- WindowRenderer case for llm-list

This sets the stage for custom provider management while making
the current single-provider setup much more flexible with base URL
overrides for OpenAI-compatible APIs.
This commit is contained in:
Claude
2026-01-15 22:38:14 +00:00
parent 444eedfdc2
commit 54fe1aafe8
7 changed files with 306 additions and 12 deletions

View File

@@ -0,0 +1,202 @@
/**
* LLM Conversation List Viewer
* Displays all LLM conversations with search and sort
*/
import { useState, useMemo } from "react";
import { useLiveQuery } from "dexie-react-hooks";
import { MessageSquare, Trash2 } from "lucide-react";
import db from "@/services/db";
import type { LLMConversation } from "@/types/llm";
import { Button } from "./ui/button";
import { Input } from "./ui/input";
import { useGrimoire } from "@/core/state";
import Timestamp from "./Timestamp";
export function LLMConversationListViewer() {
const [searchQuery, setSearchQuery] = useState("");
const [sortBy, setSortBy] = useState<"recent" | "oldest" | "tokens">(
"recent",
);
const { addWindow } = useGrimoire();
// Load conversations from Dexie
const conversations = useLiveQuery(() =>
db.llmConversations.orderBy("updatedAt").reverse().toArray(),
);
// Filter and sort conversations
const filteredConversations = useMemo(() => {
if (!conversations) return [];
let filtered = conversations;
// Filter by search query
if (searchQuery.trim()) {
const query = searchQuery.toLowerCase();
filtered = filtered.filter(
(conv) =>
conv.title.toLowerCase().includes(query) ||
conv.messages.some((m) => m.content.toLowerCase().includes(query)),
);
}
// Sort
const sorted = [...filtered];
switch (sortBy) {
case "recent":
sorted.sort((a, b) => b.updatedAt - a.updatedAt);
break;
case "oldest":
sorted.sort((a, b) => a.updatedAt - b.updatedAt);
break;
case "tokens":
sorted.sort((a, b) => b.totalTokens.total - a.totalTokens.total);
break;
}
return sorted;
}, [conversations, searchQuery, sortBy]);
// Handle opening a conversation
const handleOpen = (conversation: LLMConversation) => {
addWindow("llm-chat", {
conversationId: conversation.id,
});
};
// Handle deleting a conversation
const handleDelete = async (conversation: LLMConversation) => {
if (
confirm(
`Delete conversation "${conversation.title}"? This cannot be undone.`,
)
) {
await db.llmConversations.delete(conversation.id);
}
};
if (!conversations) {
return (
<div className="flex h-full items-center justify-center text-muted-foreground">
Loading conversations...
</div>
);
}
return (
<div className="flex h-full flex-col">
{/* Header with search and sort */}
<div className="border-b p-4">
<div className="flex items-center gap-2">
<Input
type="text"
placeholder="Search conversations..."
value={searchQuery}
onChange={(e) => setSearchQuery(e.target.value)}
className="flex-1"
/>
<select
value={sortBy}
onChange={(e) =>
setSortBy(e.target.value as "recent" | "oldest" | "tokens")
}
className="flex h-9 rounded-md border border-input bg-transparent px-3 py-1 text-sm shadow-sm transition-colors focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring"
>
<option value="recent">Most Recent</option>
<option value="oldest">Oldest First</option>
<option value="tokens">Most Tokens</option>
</select>
</div>
<div className="mt-2 text-xs text-muted-foreground">
{filteredConversations.length} conversation
{filteredConversations.length !== 1 ? "s" : ""}
</div>
</div>
{/* Conversation list */}
<div className="flex-1 overflow-y-auto">
{filteredConversations.length === 0 ? (
<div className="flex h-full flex-col items-center justify-center gap-2 text-center text-sm text-muted-foreground p-4">
{searchQuery ? (
<>
<p>No conversations found matching "{searchQuery}"</p>
<Button
onClick={() => setSearchQuery("")}
variant="outline"
size="sm"
>
Clear Search
</Button>
</>
) : (
<>
<MessageSquare className="size-12 opacity-20" />
<p>No conversations yet</p>
<p className="text-xs">
Use <code className="rounded bg-muted px-1 py-0.5">llm</code>{" "}
command to start
</p>
</>
)}
</div>
) : (
<div className="divide-y">
{filteredConversations.map((conversation) => (
<div
key={conversation.id}
className="group flex items-start gap-3 p-4 hover:bg-muted/50 cursor-pointer transition-colors"
onClick={() => handleOpen(conversation)}
>
<div className="flex-1 min-w-0">
<div className="flex items-start justify-between gap-2">
<h3 className="font-semibold truncate">
{conversation.title}
</h3>
<div className="flex items-center gap-1 opacity-0 group-hover:opacity-100 transition-opacity">
<Button
onClick={(e) => {
e.stopPropagation();
handleDelete(conversation);
}}
variant="ghost"
size="sm"
className="h-7 w-7 p-0"
title="Delete conversation"
>
<Trash2 className="size-3" />
</Button>
</div>
</div>
<div className="mt-1 text-xs text-muted-foreground">
{conversation.messages.length} message
{conversation.messages.length !== 1 ? "s" : ""} {" "}
{conversation.totalTokens.total.toLocaleString()} tokens
{conversation.totalCost > 0 && (
<> ${conversation.totalCost.toFixed(4)}</>
)}
</div>
<div className="mt-1 text-xs text-muted-foreground">
Updated <Timestamp timestamp={conversation.updatedAt} />
</div>
{/* Preview of last message */}
{conversation.messages.length > 0 && (
<div className="mt-2 text-sm text-muted-foreground truncate">
{conversation.messages[
conversation.messages.length - 1
].content.slice(0, 100)}
...
</div>
)}
</div>
</div>
))}
</div>
)}
</div>
</div>
);
}

View File

@@ -33,6 +33,11 @@ const ChatViewer = lazy(() =>
const LLMChatViewer = lazy(() =>
import("./LLMChatViewer").then((m) => ({ default: m.LLMChatViewer })),
);
const LLMConversationListViewer = lazy(() =>
import("./LLMConversationListViewer").then((m) => ({
default: m.LLMConversationListViewer,
})),
);
const SpellsViewer = lazy(() =>
import("./SpellsViewer").then((m) => ({ default: m.SpellsViewer })),
);
@@ -192,6 +197,9 @@ export function WindowRenderer({ window, onClose }: WindowRendererProps) {
/>
);
break;
case "llm-list":
content = <LLMConversationListViewer />;
break;
case "spells":
content = <SpellsViewer />;
break;

View File

@@ -4,7 +4,7 @@
*/
import { useState, useEffect } from "react";
import { getProvider } from "@/lib/llm/providers/registry";
import { getProvider, getAllProviders } from "@/lib/llm/providers/registry";
import type { LLMConfig, ModelInfo } from "@/types/llm";
import { Button } from "@/components/ui/button";
import { Input } from "@/components/ui/input";
@@ -68,6 +68,7 @@ export function ConfigPanel({ config, onChange, onClear }: ConfigPanelProps) {
}
const currentModel = availableModels.find((m) => m.id === config.model);
const availableProviders = getAllProviders();
return (
<div className="flex h-full flex-col gap-4 overflow-y-auto p-4">
@@ -75,9 +76,34 @@ export function ConfigPanel({ config, onChange, onClear }: ConfigPanelProps) {
<h3 className="mb-3 font-semibold">Configuration</h3>
</div>
{/* Provider Selection */}
<div>
<Label>Provider</Label>
<select
value={config.provider.provider}
onChange={(e) => {
const newProvider = e.target.value as any;
onChange({
...config,
provider: {
...config.provider,
provider: newProvider,
},
});
}}
className="mt-1 flex h-9 w-full rounded-md border border-input bg-transparent px-3 py-1 text-sm shadow-sm transition-colors focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring"
>
{availableProviders.map((p) => (
<option key={p.provider} value={p.provider}>
{p.name}
</option>
))}
</select>
</div>
{/* API Key */}
<div>
<Label>OpenAI API Key</Label>
<Label>API Key</Label>
<div className="mt-1 flex gap-2">
<Input
type={showApiKey ? "text" : "password"}
@@ -118,6 +144,25 @@ export function ConfigPanel({ config, onChange, onClear }: ConfigPanelProps) {
</p>
</div>
{/* Base URL (Optional) */}
<div>
<Label>Base URL (Optional)</Label>
<Input
value={config.provider.baseUrl || ""}
onChange={(e) =>
onChange({
...config,
provider: { ...config.provider, baseUrl: e.target.value },
})
}
placeholder="https://api.openai.com/v1"
className="mt-1 font-mono text-xs"
/>
<p className="mt-1 text-xs text-muted-foreground">
For custom endpoints (OpenRouter, local servers, proxies)
</p>
</div>
{/* Model Selection */}
<div>
<Label>Model</Label>

View File

@@ -1,6 +1,6 @@
/**
* Parser for the llm command
* Syntax: llm [conversation-id]
* Supports: llm, llm list, llm open <id>, llm <id>
*/
import type { LLMCommandResult } from "@/types/llm";
@@ -11,7 +11,27 @@ export function parseLLMCommand(args: string[]): LLMCommandResult {
return {};
}
// If first arg looks like a UUID, treat as conversation ID
// Handle subcommands
const subcommand = args[0].toLowerCase();
if (subcommand === "list") {
// Open conversation list viewer
return { showList: true };
}
if (subcommand === "open") {
// llm open <conversation-id>
if (args.length < 2) {
throw new Error("Usage: llm open <conversation-id>");
}
const conversationId = args[1];
if (!conversationId.match(/^[a-f0-9-]{36}$/i)) {
throw new Error(`Invalid conversation ID: ${conversationId}`);
}
return { conversationId };
}
// If first arg looks like a UUID, treat as conversation ID (shorthand for "open")
if (args[0].match(/^[a-f0-9-]{36}$/i)) {
return { conversationId: args[0] };
}
@@ -19,6 +39,8 @@ export function parseLLMCommand(args: string[]): LLMCommandResult {
throw new Error(
`Invalid LLM command. Usage:
llm # Start new conversation
llm <conversation-id> # Resume existing conversation`,
llm list # Browse all conversations
llm open <id> # Open specific conversation
llm <id> # Open specific conversation (shorthand)`,
);
}

View File

@@ -18,6 +18,7 @@ export type AppId =
| "conn"
| "chat"
| "llm-chat"
| "llm-list"
| "spells"
| "spellbooks"
| "win";

View File

@@ -69,4 +69,5 @@ export interface LLMCommandResult {
conversationId?: string;
provider?: LLMProvider;
model?: string;
showList?: boolean; // Show conversation list viewer
}

View File

@@ -378,24 +378,39 @@ export const manPages: Record<string, ManPageEntry> = {
llm: {
name: "llm",
section: "1",
synopsis: "llm [conversation-id]",
synopsis: "llm [list|open <id>|<id>]",
description:
"Chat with AI language models (OpenAI GPT, etc.). Start a new conversation or resume an existing one. Configure your API key and model settings via the configuration panel. Messages are streamed in real-time with token usage and cost tracking.",
"Chat with AI language models (OpenAI GPT, etc.). Start a new conversation, browse existing conversations, or resume a specific one. Configure your API key, model, and other settings via the configuration panel. Messages are streamed in real-time with token usage and cost tracking.",
options: [
{
flag: "[conversation-id]",
description: "Optional conversation ID to resume (UUID format)",
flag: "list",
description: "Browse all conversations with search and sort",
},
{
flag: "open <id>",
description: "Open specific conversation by ID",
},
{
flag: "<id>",
description: "Open conversation by ID (shorthand for 'open')",
},
],
examples: [
"llm Start a new conversation",
"llm abc123-def456-... Resume existing conversation",
"llm list Browse all conversations",
"llm open abc123-... Open specific conversation",
"llm abc123-def456-... Open conversation (shorthand)",
],
seeAlso: ["chat"],
appId: "llm-chat",
appId: "llm-chat", // Default, overridden by argParser
category: "System",
argParser: async (args: string[]) => {
return parseLLMCommand(args);
const result = parseLLMCommand(args);
// Override appId if showing list
if (result.showList) {
return { appId: "llm-list" as const };
}
return result;
},
},
profile: {