Add functional thread modification endpoints (#1668)

Makes it so if you change which LLM you are using in a given ChatSession, that is persisted and sticks around if you reload the page / come back to the ChatSession later
This commit is contained in:
pablodanswer
2024-06-21 18:10:30 -07:00
committed by GitHub
parent 5cafc96cae
commit 8178d536b4
12 changed files with 185 additions and 41 deletions

View File

@@ -34,6 +34,7 @@ import {
removeMessage,
sendMessage,
setMessageAsLatest,
updateModelOverrideForChatSession,
updateParentChildren,
uploadFilesForChat,
} from "./lib";
@@ -59,7 +60,12 @@ import { AnswerPiecePacket, DanswerDocument } from "@/lib/search/interfaces";
import { buildFilters } from "@/lib/search/utils";
import { SettingsContext } from "@/components/settings/SettingsProvider";
import Dropzone from "react-dropzone";
import { checkLLMSupportsImageInput, getFinalLLM } from "@/lib/llm/utils";
import {
checkLLMSupportsImageInput,
destructureValue,
getFinalLLM,
structureValue,
} from "@/lib/llm/utils";
import { ChatInputBar } from "./input/ChatInputBar";
import { ConfigurationModal } from "./modal/configuration/ConfigurationModal";
import { useChatContext } from "@/components/context/ChatContext";
@@ -92,6 +98,7 @@ export function ChatPage({
folders,
openedFolders,
} = useChatContext();
const filteredAssistants = orderAssistantsForUser(availablePersonas, user);
const router = useRouter();
@@ -104,6 +111,9 @@ export function ChatPage({
const selectedChatSession = chatSessions.find(
(chatSession) => chatSession.id === existingChatSessionId
);
const llmOverrideManager = useLlmOverride(selectedChatSession);
const existingChatSessionPersonaId = selectedChatSession?.persona_id;
// used to track whether or not the initial "submit on load" has been performed
@@ -124,25 +134,37 @@ export function ChatPage({
// this is triggered every time the user switches which chat
// session they are using
useEffect(() => {
if (
chatSessionId &&
!urlChatSessionId.current &&
llmOverrideManager.llmOverride
) {
updateModelOverrideForChatSession(
chatSessionId,
structureValue(
llmOverrideManager.llmOverride.name,
llmOverrideManager.llmOverride.provider,
llmOverrideManager.llmOverride.modelName
) as string
);
}
urlChatSessionId.current = existingChatSessionId;
textAreaRef.current?.focus();
// only clear things if we're going from one chat session to another
if (chatSessionId !== null && existingChatSessionId !== chatSessionId) {
// de-select documents
clearSelectedDocuments();
// reset all filters
filterManager.setSelectedDocumentSets([]);
filterManager.setSelectedSources([]);
filterManager.setSelectedTags([]);
filterManager.setTimeRange(null);
// reset LLM overrides
llmOverrideManager.setLlmOverride({
name: "",
provider: "",
modelName: "",
});
// reset LLM overrides (based on chat session!)
llmOverrideManager.updateModelOverrideForChatSession(selectedChatSession);
llmOverrideManager.setTemperature(null);
// remove uploaded files
setCurrentMessageFiles([]);
@@ -177,7 +199,6 @@ export function ChatPage({
submitOnLoadPerformed.current = true;
await onSubmit();
}
return;
}
@@ -186,6 +207,7 @@ export function ChatPage({
`/api/chat/get-chat-session/${existingChatSessionId}`
);
const chatSession = (await response.json()) as BackendChatSession;
setSelectedPersona(
filteredAssistants.find(
(persona) => persona.id === chatSession.persona_id
@@ -386,8 +408,6 @@ export function ChatPage({
availableDocumentSets,
});
const llmOverrideManager = useLlmOverride();
// state for cancelling streaming
const [isCancelled, setIsCancelled] = useState(false);
const isCancelledRef = useRef(isCancelled);
@@ -595,6 +615,7 @@ export function ChatPage({
.map((document) => document.db_doc_id as number),
queryOverride,
forceSearch,
modelProvider: llmOverrideManager.llmOverride.name || undefined,
modelVersion:
llmOverrideManager.llmOverride.modelName ||
@@ -893,6 +914,7 @@ export function ChatPage({
)}
<ConfigurationModal
chatSessionId={chatSessionId!}
activeTab={configModalActiveTab}
setActiveTab={setConfigModalActiveTab}
onClose={() => setConfigModalActiveTab(null)}
@@ -1044,7 +1066,9 @@ export function ChatPage({
citedDocuments={getCitedDocumentsFromMessage(
message
)}
toolCall={message.toolCalls[0]}
toolCall={
message.toolCalls && message.toolCalls[0]
}
isComplete={
i !== messageHistory.length - 1 ||
!isStreaming
@@ -1212,7 +1236,6 @@ export function ChatPage({
)}
</div>
)}
<div ref={endDivRef} />
</div>
</div>

View File

@@ -163,6 +163,7 @@ export function ChatInputBar({
icon={FaBrain}
onClick={() => setConfigModalActiveTab("assistants")}
/>
<ChatInputOption
name={
llmOverrideManager.llmOverride.modelName ||

View File

@@ -53,6 +53,7 @@ export interface ChatSession {
time_created: string;
shared_status: ChatSessionSharedStatus;
folder_id: number | null;
current_alternate_model: string;
}
export interface Message {
@@ -79,6 +80,7 @@ export interface BackendChatSession {
messages: BackendMessage[];
time_created: string;
shared_status: ChatSessionSharedStatus;
current_alternate_model?: string;
}
export interface BackendMessage {

View File

@@ -21,6 +21,23 @@ import { Persona } from "../admin/assistants/interfaces";
import { ReadonlyURLSearchParams } from "next/navigation";
import { SEARCH_PARAM_NAMES } from "./searchParams";
export async function updateModelOverrideForChatSession(
chatSessionId: number,
newAlternateModel: string
) {
const response = await fetch("/api/chat/update-chat-session-model", {
method: "PUT",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
chat_session_id: chatSessionId,
new_alternate_model: newAlternateModel,
}),
});
return response;
}
export async function createChatSession(
personaId: number,
description: string | null

View File

@@ -55,6 +55,7 @@ export function ConfigurationModal({
filterManager,
llmProviders,
llmOverrideManager,
chatSessionId,
}: {
activeTab: string | null;
setActiveTab: (tab: string | null) => void;
@@ -65,6 +66,7 @@ export function ConfigurationModal({
filterManager: FilterManager;
llmProviders: LLMProviderDescriptor[];
llmOverrideManager: LlmOverrideManager;
chatSessionId?: number;
}) {
useEffect(() => {
const handleKeyDown = (event: KeyboardEvent) => {
@@ -149,6 +151,7 @@ export function ConfigurationModal({
{activeTab === "llms" && (
<LlmTab
chatSessionId={chatSessionId}
llmOverrideManager={llmOverrideManager}
currentAssistant={selectedAssistant}
/>

View File

@@ -5,14 +5,18 @@ import { debounce } from "lodash";
import { DefaultDropdown } from "@/components/Dropdown";
import { Text } from "@tremor/react";
import { Persona } from "@/app/admin/assistants/interfaces";
import { getFinalLLM } from "@/lib/llm/utils";
import { destructureValue, getFinalLLM, structureValue } from "@/lib/llm/utils";
import { updateModelOverrideForChatSession } from "../../lib";
import { PopupSpec } from "@/components/admin/connectors/Popup";
export function LlmTab({
llmOverrideManager,
currentAssistant,
chatSessionId,
}: {
llmOverrideManager: LlmOverrideManager;
currentAssistant: Persona;
chatSessionId?: number;
}) {
const { llmProviders } = useChatContext();
const { llmOverride, setLlmOverride, temperature, setTemperature } =
@@ -37,21 +41,6 @@ export function LlmTab({
const [_, defaultLlmName] = getFinalLLM(llmProviders, currentAssistant, null);
const llmOptions: { name: string; value: string }[] = [];
const structureValue = (
name: string,
provider: string,
modelName: string
) => {
return `${name}__${provider}__${modelName}`;
};
const destructureValue = (value: string): LlmOverride => {
const [displayName, provider, modelName] = value.split("__");
return {
name: displayName,
provider,
modelName,
};
};
llmProviders.forEach((llmProvider) => {
llmProvider.model_names.forEach((modelName) => {
llmOptions.push({
@@ -76,6 +65,7 @@ export function LlmTab({
<Text className="mb-3">
Default Model: <i className="font-medium">{defaultLlmName}</i>.
</Text>
<div className="w-96">
<DefaultDropdown
options={llmOptions}
@@ -84,9 +74,12 @@ export function LlmTab({
llmOverride.provider,
llmOverride.modelName
)}
onSelect={(value) =>
setLlmOverride(destructureValue(value as string))
}
onSelect={(value) => {
setLlmOverride(destructureValue(value as string));
if (chatSessionId) {
updateModelOverrideForChatSession(chatSessionId, value as string);
}
}}
/>
</div>

View File

@@ -12,6 +12,8 @@ import { useState } from "react";
import { DateRangePickerValue } from "@tremor/react";
import { SourceMetadata } from "./search/interfaces";
import { EE_ENABLED } from "./constants";
import { destructureValue } from "./llm/utils";
import { ChatSession } from "@/app/chat/interfaces";
const CREDENTIAL_URL = "/api/manage/admin/credential";
@@ -136,17 +138,38 @@ export interface LlmOverrideManager {
setLlmOverride: React.Dispatch<React.SetStateAction<LlmOverride>>;
temperature: number | null;
setTemperature: React.Dispatch<React.SetStateAction<number | null>>;
updateModelOverrideForChatSession: (chatSession?: ChatSession) => void;
}
export function useLlmOverride(): LlmOverrideManager {
const [llmOverride, setLlmOverride] = useState<LlmOverride>({
name: "",
provider: "",
modelName: "",
});
export function useLlmOverride(
currentChatSession?: ChatSession
): LlmOverrideManager {
const [llmOverride, setLlmOverride] = useState<LlmOverride>(
currentChatSession && currentChatSession.current_alternate_model
? destructureValue(currentChatSession.current_alternate_model)
: {
name: "",
provider: "",
modelName: "",
}
);
const updateModelOverrideForChatSession = (chatSession?: ChatSession) => {
setLlmOverride(
chatSession && chatSession.current_alternate_model
? destructureValue(chatSession.current_alternate_model)
: {
name: "",
provider: "",
modelName: "",
}
);
};
const [temperature, setTemperature] = useState<number | null>(null);
return {
updateModelOverrideForChatSession,
llmOverride,
setLlmOverride,
temperature,

View File

@@ -43,3 +43,20 @@ export function checkLLMSupportsImageInput(provider: string, model: string) {
([p, m]) => p === provider && m === model
);
}
export const structureValue = (
name: string,
provider: string,
modelName: string
) => {
return `${name}__${provider}__${modelName}`;
};
export const destructureValue = (value: string): LlmOverride => {
const [displayName, provider, modelName] = value.split("__");
return {
name: displayName,
provider,
modelName,
};
};