@@ -199,7 +197,7 @@ export default function LLMPopover({
void;
overriddenModel?: string;
- regenerate?: (modelOverRide: LlmOverride) => Promise;
+ regenerate?: (modelOverRide: LlmDescriptor) => Promise;
setPresentingDocument?: (document: OnyxDocument) => void;
toggleDocDisplay?: (agentic: boolean) => void;
error?: string | null;
diff --git a/web/src/app/chat/message/Messages.tsx b/web/src/app/chat/message/Messages.tsx
index d64d20a32..981c29a7a 100644
--- a/web/src/app/chat/message/Messages.tsx
+++ b/web/src/app/chat/message/Messages.tsx
@@ -58,7 +58,7 @@ import { useMouseTracking } from "./hooks";
import { SettingsContext } from "@/components/settings/SettingsProvider";
import GeneratingImageDisplay from "../tools/GeneratingImageDisplay";
import RegenerateOption from "../RegenerateOption";
-import { LlmOverride } from "@/lib/hooks";
+import { LlmDescriptor } from "@/lib/hooks";
import { ContinueGenerating } from "./ContinueMessage";
import { MemoizedAnchor, MemoizedParagraph } from "./MemoizedTextComponents";
import { extractCodeText, preprocessLaTeX } from "./codeUtils";
@@ -213,7 +213,7 @@ export const AIMessage = ({
handleForceSearch?: () => void;
retrievalDisabled?: boolean;
overriddenModel?: string;
- regenerate?: (modelOverRide: LlmOverride) => Promise;
+ regenerate?: (modelOverRide: LlmDescriptor) => Promise;
setPresentingDocument: (document: OnyxDocument) => void;
removePadding?: boolean;
}) => {
diff --git a/web/src/app/chat/modal/ShareChatSessionModal.tsx b/web/src/app/chat/modal/ShareChatSessionModal.tsx
index 52acaf4d4..5b2bd8235 100644
--- a/web/src/app/chat/modal/ShareChatSessionModal.tsx
+++ b/web/src/app/chat/modal/ShareChatSessionModal.tsx
@@ -11,7 +11,7 @@ import { CopyButton } from "@/components/CopyButton";
import { SEARCH_PARAM_NAMES } from "../searchParams";
import { usePopup } from "@/components/admin/connectors/Popup";
import { structureValue } from "@/lib/llm/utils";
-import { LlmOverride } from "@/lib/hooks";
+import { LlmDescriptor } from "@/lib/hooks";
import { Separator } from "@/components/ui/separator";
import { AdvancedOptionsToggle } from "@/components/AdvancedOptionsToggle";
@@ -38,7 +38,7 @@ async function generateShareLink(chatSessionId: string) {
async function generateSeedLink(
message?: string,
assistantId?: number,
- modelOverride?: LlmOverride
+ modelOverride?: LlmDescriptor
) {
const baseUrl = `${window.location.protocol}//${window.location.host}`;
const model = modelOverride
@@ -92,7 +92,7 @@ export function ShareChatSessionModal({
onClose: () => void;
message?: string;
assistantId?: number;
- modelOverride?: LlmOverride;
+ modelOverride?: LlmDescriptor;
}) {
const [shareLink, setShareLink] = useState(
existingSharedStatus === ChatSessionSharedStatus.Public
diff --git a/web/src/app/chat/modal/UserSettingsModal.tsx b/web/src/app/chat/modal/UserSettingsModal.tsx
index e69c34a17..61d7e7d54 100644
--- a/web/src/app/chat/modal/UserSettingsModal.tsx
+++ b/web/src/app/chat/modal/UserSettingsModal.tsx
@@ -1,6 +1,6 @@
import { useContext, useEffect, useRef, useState } from "react";
import { Modal } from "@/components/Modal";
-import { getDisplayNameForModel, LlmOverride } from "@/lib/hooks";
+import { getDisplayNameForModel, LlmDescriptor } from "@/lib/hooks";
import { LLMProviderDescriptor } from "@/app/admin/configuration/llm/interfaces";
import { destructureValue, structureValue } from "@/lib/llm/utils";
@@ -31,12 +31,12 @@ export function UserSettingsModal({
setPopup,
llmProviders,
onClose,
- setLlmOverride,
+ setCurrentLlm,
defaultModel,
}: {
setPopup: (popupSpec: PopupSpec | null) => void;
llmProviders: LLMProviderDescriptor[];
- setLlmOverride?: (newOverride: LlmOverride) => void;
+ setCurrentLlm?: (newLlm: LlmDescriptor) => void;
onClose: () => void;
defaultModel: string | null;
}) {
@@ -127,18 +127,14 @@ export function UserSettingsModal({
);
});
- const llmOptions = Object.entries(llmOptionsByProvider).flatMap(
- ([provider, options]) => [...options]
- );
-
const router = useRouter();
const handleChangedefaultModel = async (defaultModel: string | null) => {
try {
const response = await setUserDefaultModel(defaultModel);
if (response.ok) {
- if (defaultModel && setLlmOverride) {
- setLlmOverride(destructureValue(defaultModel));
+ if (defaultModel && setCurrentLlm) {
+ setCurrentLlm(destructureValue(defaultModel));
}
setPopup({
message: "Default model updated successfully",
diff --git a/web/src/components/settings/lib.ts b/web/src/components/settings/lib.ts
index 72c731265..7288a6490 100644
--- a/web/src/components/settings/lib.ts
+++ b/web/src/components/settings/lib.ts
@@ -95,7 +95,7 @@ export async function fetchSettingsSS(): Promise {
}
}
- if (enterpriseSettings && settings.pro_search_enabled == null) {
+ if (settings.pro_search_enabled == null) {
settings.pro_search_enabled = true;
}
diff --git a/web/src/lib/hooks.ts b/web/src/lib/hooks.ts
index dc187c454..ae750c052 100644
--- a/web/src/lib/hooks.ts
+++ b/web/src/lib/hooks.ts
@@ -360,18 +360,18 @@ export const useUsers = ({ includeApiKeys }: UseUsersParams) => {
};
};
-export interface LlmOverride {
+export interface LlmDescriptor {
name: string;
provider: string;
modelName: string;
}
-export interface LlmOverrideManager {
- llmOverride: LlmOverride;
- updateLLMOverride: (newOverride: LlmOverride) => void;
+export interface LlmManager {
+ currentLlm: LlmDescriptor;
+ updateCurrentLlm: (newOverride: LlmDescriptor) => void;
temperature: number;
updateTemperature: (temperature: number) => void;
- updateModelOverrideForChatSession: (chatSession?: ChatSession) => void;
+ updateModelOverrideBasedOnChatSession: (chatSession?: ChatSession) => void;
imageFilesPresent: boolean;
updateImageFilesPresent: (present: boolean) => void;
liveAssistant: Persona | null;
@@ -400,7 +400,7 @@ Thus, the input should be
Changes take place as
- liveAssistant or currentChatSession changes (and the associated model override is set)
-- (uploadLLMOverride) User explicitly setting a model override (and we explicitly override and set the userSpecifiedOverride which we'll use in place of the user preferences unless overridden by an assistant)
+- (updateCurrentLlm) User explicitly setting a model override (and we explicitly override and set the userSpecifiedOverride which we'll use in place of the user preferences unless overridden by an assistant)
If we have a live assistant, we should use that model override
@@ -419,55 +419,78 @@ This approach ensures that user preferences are maintained for existing chats wh
providing appropriate defaults for new conversations based on the available tools.
*/
-export function useLlmOverride(
+export function useLlmManager(
llmProviders: LLMProviderDescriptor[],
currentChatSession?: ChatSession,
liveAssistant?: Persona
-): LlmOverrideManager {
+): LlmManager {
const { user } = useUser();
+ const [userHasManuallyOverriddenLLM, setUserHasManuallyOverriddenLLM] =
+ useState(false);
const [chatSession, setChatSession] = useState(null);
+ const [currentLlm, setCurrentLlm] = useState({
+ name: "",
+ provider: "",
+ modelName: "",
+ });
- const llmOverrideUpdate = () => {
- if (liveAssistant?.llm_model_version_override) {
- setLlmOverride(
- getValidLlmOverride(liveAssistant.llm_model_version_override)
- );
- } else if (currentChatSession?.current_alternate_model) {
- setLlmOverride(
- getValidLlmOverride(currentChatSession.current_alternate_model)
- );
- } else if (user?.preferences?.default_model) {
- setLlmOverride(getValidLlmOverride(user.preferences.default_model));
- return;
- } else {
- const defaultProvider = llmProviders.find(
- (provider) => provider.is_default_provider
- );
+ const llmUpdate = () => {
+ /* Should be called when the live assistant or current chat session changes */
- if (defaultProvider) {
- setLlmOverride({
- name: defaultProvider.name,
- provider: defaultProvider.provider,
- modelName: defaultProvider.default_model_name,
- });
+ // separate function so we can `return` to break out
+ const _llmUpdate = () => {
+ // if the user has overridden in this session and just switched to a brand
+ // new session, use their manually specified model
+ if (userHasManuallyOverriddenLLM && !currentChatSession) {
+ return;
}
- }
+
+ if (currentChatSession?.current_alternate_model) {
+ setCurrentLlm(
+ getValidLlmDescriptor(currentChatSession.current_alternate_model)
+ );
+ } else if (liveAssistant?.llm_model_version_override) {
+ setCurrentLlm(
+ getValidLlmDescriptor(liveAssistant.llm_model_version_override)
+ );
+ } else if (userHasManuallyOverriddenLLM) {
+ // if the user has an override and there's nothing special about the
+ // current chat session, use the override
+ return;
+ } else if (user?.preferences?.default_model) {
+ setCurrentLlm(getValidLlmDescriptor(user.preferences.default_model));
+ } else {
+ const defaultProvider = llmProviders.find(
+ (provider) => provider.is_default_provider
+ );
+
+ if (defaultProvider) {
+ setCurrentLlm({
+ name: defaultProvider.name,
+ provider: defaultProvider.provider,
+ modelName: defaultProvider.default_model_name,
+ });
+ }
+ }
+ };
+
+ _llmUpdate();
setChatSession(currentChatSession || null);
};
- const getValidLlmOverride = (
- overrideModel: string | null | undefined
- ): LlmOverride => {
- if (overrideModel) {
- const model = destructureValue(overrideModel);
+ const getValidLlmDescriptor = (
+ modelName: string | null | undefined
+ ): LlmDescriptor => {
+ if (modelName) {
+ const model = destructureValue(modelName);
if (!(model.modelName && model.modelName.length > 0)) {
const provider = llmProviders.find((p) =>
- p.model_names.includes(overrideModel)
+ p.model_names.includes(modelName)
);
if (provider) {
return {
- modelName: overrideModel,
+ modelName: modelName,
name: provider.name,
provider: provider.provider,
};
@@ -491,38 +514,32 @@ export function useLlmOverride(
setImageFilesPresent(present);
};
- const [llmOverride, setLlmOverride] = useState({
- name: "",
- provider: "",
- modelName: "",
- });
-
- // Manually set the override
- const updateLLMOverride = (newOverride: LlmOverride) => {
+ // Manually set the LLM
+ const updateCurrentLlm = (newLlm: LlmDescriptor) => {
const provider =
- newOverride.provider ||
- findProviderForModel(llmProviders, newOverride.modelName);
+ newLlm.provider || findProviderForModel(llmProviders, newLlm.modelName);
const structuredValue = structureValue(
- newOverride.name,
+ newLlm.name,
provider,
- newOverride.modelName
+ newLlm.modelName
);
- setLlmOverride(getValidLlmOverride(structuredValue));
+ setCurrentLlm(getValidLlmDescriptor(structuredValue));
+ setUserHasManuallyOverriddenLLM(true);
};
- const updateModelOverrideForChatSession = (chatSession?: ChatSession) => {
+ const updateModelOverrideBasedOnChatSession = (chatSession?: ChatSession) => {
if (chatSession && chatSession.current_alternate_model?.length > 0) {
- setLlmOverride(getValidLlmOverride(chatSession.current_alternate_model));
+ setCurrentLlm(getValidLlmDescriptor(chatSession.current_alternate_model));
}
};
const [temperature, setTemperature] = useState(() => {
- llmOverrideUpdate();
+ llmUpdate();
if (currentChatSession?.current_temperature_override != null) {
return Math.min(
currentChatSession.current_temperature_override,
- isAnthropic(llmOverride.provider, llmOverride.modelName) ? 1.0 : 2.0
+ isAnthropic(currentLlm.provider, currentLlm.modelName) ? 1.0 : 2.0
);
} else if (
liveAssistant?.tools.some((tool) => tool.name === SEARCH_TOOL_ID)
@@ -533,22 +550,23 @@ export function useLlmOverride(
});
const maxTemperature = useMemo(() => {
- return isAnthropic(llmOverride.provider, llmOverride.modelName) ? 1.0 : 2.0;
- }, [llmOverride]);
+ return isAnthropic(currentLlm.provider, currentLlm.modelName) ? 1.0 : 2.0;
+ }, [currentLlm]);
useEffect(() => {
- if (isAnthropic(llmOverride.provider, llmOverride.modelName)) {
+ if (isAnthropic(currentLlm.provider, currentLlm.modelName)) {
const newTemperature = Math.min(temperature, 1.0);
setTemperature(newTemperature);
if (chatSession?.id) {
updateTemperatureOverrideForChatSession(chatSession.id, newTemperature);
}
}
- }, [llmOverride]);
+ }, [currentLlm]);
useEffect(() => {
+ llmUpdate();
+
if (!chatSession && currentChatSession) {
- setChatSession(currentChatSession || null);
if (temperature) {
updateTemperatureOverrideForChatSession(
currentChatSession.id,
@@ -570,7 +588,7 @@ export function useLlmOverride(
}, [liveAssistant, currentChatSession]);
const updateTemperature = (temperature: number) => {
- if (isAnthropic(llmOverride.provider, llmOverride.modelName)) {
+ if (isAnthropic(currentLlm.provider, currentLlm.modelName)) {
setTemperature((prevTemp) => Math.min(temperature, 1.0));
} else {
setTemperature(temperature);
@@ -581,9 +599,9 @@ export function useLlmOverride(
};
return {
- updateModelOverrideForChatSession,
- llmOverride,
- updateLLMOverride,
+ updateModelOverrideBasedOnChatSession,
+ currentLlm,
+ updateCurrentLlm,
temperature,
updateTemperature,
imageFilesPresent,
diff --git a/web/src/lib/llm/utils.ts b/web/src/lib/llm/utils.ts
index 1880385e0..90ef39859 100644
--- a/web/src/lib/llm/utils.ts
+++ b/web/src/lib/llm/utils.ts
@@ -1,11 +1,11 @@
import { Persona } from "@/app/admin/assistants/interfaces";
import { LLMProviderDescriptor } from "@/app/admin/configuration/llm/interfaces";
-import { LlmOverride } from "@/lib/hooks";
+import { LlmDescriptor } from "@/lib/hooks";
export function getFinalLLM(
llmProviders: LLMProviderDescriptor[],
persona: Persona | null,
- llmOverride: LlmOverride | null
+ currentLlm: LlmDescriptor | null
): [string, string] {
const defaultProvider = llmProviders.find(
(llmProvider) => llmProvider.is_default_provider
@@ -26,9 +26,9 @@ export function getFinalLLM(
model = persona.llm_model_version_override || model;
}
- if (llmOverride) {
- provider = llmOverride.provider || provider;
- model = llmOverride.modelName || model;
+ if (currentLlm) {
+ provider = currentLlm.provider || provider;
+ model = currentLlm.modelName || model;
}
return [provider, model];
@@ -37,7 +37,7 @@ export function getFinalLLM(
export function getLLMProviderOverrideForPersona(
liveAssistant: Persona,
llmProviders: LLMProviderDescriptor[]
-): LlmOverride | null {
+): LlmDescriptor | null {
const overrideProvider = liveAssistant.llm_model_provider_override;
const overrideModel = liveAssistant.llm_model_version_override;
@@ -135,7 +135,7 @@ export const structureValue = (
return `${name}__${provider}__${modelName}`;
};
-export const destructureValue = (value: string): LlmOverride => {
+export const destructureValue = (value: string): LlmDescriptor => {
const [displayName, provider, modelName] = value.split("__");
return {
name: displayName,
diff --git a/web/src/lib/users/UserSettings.tsx b/web/src/lib/users/UserSettings.tsx
index c99a23917..9ce55a27c 100644
--- a/web/src/lib/users/UserSettings.tsx
+++ b/web/src/lib/users/UserSettings.tsx
@@ -1,5 +1,3 @@
-import { LlmOverride } from "../hooks";
-
export async function setUserDefaultModel(
model: string | null
): Promise {