mirror of
https://github.com/danswer-ai/danswer.git
synced 2025-10-10 13:15:18 +02:00
Fix LLM selection (#4078)
This commit is contained in:
@@ -47,6 +47,7 @@ import {
|
||||
removeMessage,
|
||||
sendMessage,
|
||||
setMessageAsLatest,
|
||||
updateLlmOverrideForChatSession,
|
||||
updateParentChildren,
|
||||
uploadFilesForChat,
|
||||
useScrollonStream,
|
||||
@@ -65,7 +66,7 @@ import {
|
||||
import { usePopup } from "@/components/admin/connectors/Popup";
|
||||
import { SEARCH_PARAM_NAMES, shouldSubmitOnLoad } from "./searchParams";
|
||||
import { useDocumentSelection } from "./useDocumentSelection";
|
||||
import { LlmOverride, useFilters, useLlmOverride } from "@/lib/hooks";
|
||||
import { LlmDescriptor, useFilters, useLlmManager } from "@/lib/hooks";
|
||||
import { ChatState, FeedbackType, RegenerationState } from "./types";
|
||||
import { DocumentResults } from "./documentSidebar/DocumentResults";
|
||||
import { OnyxInitializingLoader } from "@/components/OnyxInitializingLoader";
|
||||
@@ -89,7 +90,11 @@ import {
|
||||
import { buildFilters } from "@/lib/search/utils";
|
||||
import { SettingsContext } from "@/components/settings/SettingsProvider";
|
||||
import Dropzone from "react-dropzone";
|
||||
import { checkLLMSupportsImageInput, getFinalLLM } from "@/lib/llm/utils";
|
||||
import {
|
||||
checkLLMSupportsImageInput,
|
||||
getFinalLLM,
|
||||
structureValue,
|
||||
} from "@/lib/llm/utils";
|
||||
import { ChatInputBar } from "./input/ChatInputBar";
|
||||
import { useChatContext } from "@/components/context/ChatContext";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
@@ -356,7 +361,7 @@ export function ChatPage({
|
||||
]
|
||||
);
|
||||
|
||||
const llmOverrideManager = useLlmOverride(
|
||||
const llmManager = useLlmManager(
|
||||
llmProviders,
|
||||
selectedChatSession,
|
||||
liveAssistant
|
||||
@@ -1138,7 +1143,7 @@ export function ChatPage({
|
||||
forceSearch,
|
||||
isSeededChat,
|
||||
alternativeAssistantOverride = null,
|
||||
modelOverRide,
|
||||
modelOverride,
|
||||
regenerationRequest,
|
||||
overrideFileDescriptors,
|
||||
}: {
|
||||
@@ -1148,7 +1153,7 @@ export function ChatPage({
|
||||
forceSearch?: boolean;
|
||||
isSeededChat?: boolean;
|
||||
alternativeAssistantOverride?: Persona | null;
|
||||
modelOverRide?: LlmOverride;
|
||||
modelOverride?: LlmDescriptor;
|
||||
regenerationRequest?: RegenerationRequest | null;
|
||||
overrideFileDescriptors?: FileDescriptor[];
|
||||
} = {}) => {
|
||||
@@ -1191,6 +1196,22 @@ export function ChatPage({
|
||||
currChatSessionId = chatSessionIdRef.current as string;
|
||||
}
|
||||
frozenSessionId = currChatSessionId;
|
||||
// update the selected model for the chat session if one is specified so that
|
||||
// it persists across page reloads. Do not `await` here so that the message
|
||||
// request can continue and this will just happen in the background.
|
||||
// NOTE: only set the model override for the chat session once we send a
|
||||
// message with it. If the user switches models and then starts a new
|
||||
// chat session, it is unexpected for that model to be used when they
|
||||
// return to this session the next day.
|
||||
let finalLLM = modelOverride || llmManager.currentLlm;
|
||||
updateLlmOverrideForChatSession(
|
||||
currChatSessionId,
|
||||
structureValue(
|
||||
finalLLM.name || "",
|
||||
finalLLM.provider || "",
|
||||
finalLLM.modelName || ""
|
||||
)
|
||||
);
|
||||
|
||||
updateStatesWithNewSessionId(currChatSessionId);
|
||||
|
||||
@@ -1250,11 +1271,14 @@ export function ChatPage({
|
||||
: null) ||
|
||||
(messageMap.size === 1 ? Array.from(messageMap.values())[0] : null);
|
||||
|
||||
const currentAssistantId = alternativeAssistantOverride
|
||||
? alternativeAssistantOverride.id
|
||||
: alternativeAssistant
|
||||
? alternativeAssistant.id
|
||||
: liveAssistant.id;
|
||||
let currentAssistantId;
|
||||
if (alternativeAssistantOverride) {
|
||||
currentAssistantId = alternativeAssistantOverride.id;
|
||||
} else if (alternativeAssistant) {
|
||||
currentAssistantId = alternativeAssistant.id;
|
||||
} else {
|
||||
currentAssistantId = liveAssistant.id;
|
||||
}
|
||||
|
||||
resetInputBar();
|
||||
let messageUpdates: Message[] | null = null;
|
||||
@@ -1326,15 +1350,13 @@ export function ChatPage({
|
||||
forceSearch,
|
||||
regenerate: regenerationRequest !== undefined,
|
||||
modelProvider:
|
||||
modelOverRide?.name ||
|
||||
llmOverrideManager.llmOverride.name ||
|
||||
undefined,
|
||||
modelOverride?.name || llmManager.currentLlm.name || undefined,
|
||||
modelVersion:
|
||||
modelOverRide?.modelName ||
|
||||
llmOverrideManager.llmOverride.modelName ||
|
||||
modelOverride?.modelName ||
|
||||
llmManager.currentLlm.modelName ||
|
||||
searchParams.get(SEARCH_PARAM_NAMES.MODEL_VERSION) ||
|
||||
undefined,
|
||||
temperature: llmOverrideManager.temperature || undefined,
|
||||
temperature: llmManager.temperature || undefined,
|
||||
systemPromptOverride:
|
||||
searchParams.get(SEARCH_PARAM_NAMES.SYSTEM_PROMPT) || undefined,
|
||||
useExistingUserMessage: isSeededChat,
|
||||
@@ -1802,7 +1824,7 @@ export function ChatPage({
|
||||
const [_, llmModel] = getFinalLLM(
|
||||
llmProviders,
|
||||
liveAssistant,
|
||||
llmOverrideManager.llmOverride
|
||||
llmManager.currentLlm
|
||||
);
|
||||
const llmAcceptsImages = checkLLMSupportsImageInput(llmModel);
|
||||
|
||||
@@ -2121,7 +2143,7 @@ export function ChatPage({
|
||||
}, [searchParams, router]);
|
||||
|
||||
useEffect(() => {
|
||||
llmOverrideManager.updateImageFilesPresent(imageFileInMessageHistory);
|
||||
llmManager.updateImageFilesPresent(imageFileInMessageHistory);
|
||||
}, [imageFileInMessageHistory]);
|
||||
|
||||
const pathname = usePathname();
|
||||
@@ -2175,9 +2197,9 @@ export function ChatPage({
|
||||
|
||||
function createRegenerator(regenerationRequest: RegenerationRequest) {
|
||||
// Returns new function that only needs `modelOverRide` to be specified when called
|
||||
return async function (modelOverRide: LlmOverride) {
|
||||
return async function (modelOverride: LlmDescriptor) {
|
||||
return await onSubmit({
|
||||
modelOverRide,
|
||||
modelOverride,
|
||||
messageIdToResend: regenerationRequest.parentMessage.messageId,
|
||||
regenerationRequest,
|
||||
forceSearch: regenerationRequest.forceSearch,
|
||||
@@ -2258,9 +2280,7 @@ export function ChatPage({
|
||||
{(settingsToggled || userSettingsToggled) && (
|
||||
<UserSettingsModal
|
||||
setPopup={setPopup}
|
||||
setLlmOverride={(newOverride) =>
|
||||
llmOverrideManager.updateLLMOverride(newOverride)
|
||||
}
|
||||
setCurrentLlm={(newLlm) => llmManager.updateCurrentLlm(newLlm)}
|
||||
defaultModel={user?.preferences.default_model!}
|
||||
llmProviders={llmProviders}
|
||||
onClose={() => {
|
||||
@@ -2324,7 +2344,7 @@ export function ChatPage({
|
||||
<ShareChatSessionModal
|
||||
assistantId={liveAssistant?.id}
|
||||
message={message}
|
||||
modelOverride={llmOverrideManager.llmOverride}
|
||||
modelOverride={llmManager.currentLlm}
|
||||
chatSessionId={sharedChatSession.id}
|
||||
existingSharedStatus={sharedChatSession.shared_status}
|
||||
onClose={() => setSharedChatSession(null)}
|
||||
@@ -2342,7 +2362,7 @@ export function ChatPage({
|
||||
<ShareChatSessionModal
|
||||
message={message}
|
||||
assistantId={liveAssistant?.id}
|
||||
modelOverride={llmOverrideManager.llmOverride}
|
||||
modelOverride={llmManager.currentLlm}
|
||||
chatSessionId={chatSessionIdRef.current}
|
||||
existingSharedStatus={chatSessionSharedStatus}
|
||||
onClose={() => setSharingModalVisible(false)}
|
||||
@@ -3058,7 +3078,7 @@ export function ChatPage({
|
||||
messageId: message.messageId,
|
||||
parentMessage: parentMessage!,
|
||||
forceSearch: true,
|
||||
})(llmOverrideManager.llmOverride);
|
||||
})(llmManager.currentLlm);
|
||||
} else {
|
||||
setPopup({
|
||||
type: "error",
|
||||
@@ -3203,7 +3223,7 @@ export function ChatPage({
|
||||
availableDocumentSets={documentSets}
|
||||
availableTags={tags}
|
||||
filterManager={filterManager}
|
||||
llmOverrideManager={llmOverrideManager}
|
||||
llmManager={llmManager}
|
||||
removeDocs={() => {
|
||||
clearSelectedDocuments();
|
||||
}}
|
||||
|
Reference in New Issue
Block a user