Fix LLM selection (#4078)

This commit is contained in:
Chris Weaver 2025-02-21 11:32:57 -08:00 committed by GitHub
parent ba21bacbbf
commit e1ff9086a4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
12 changed files with 172 additions and 142 deletions

View File

@ -47,6 +47,7 @@ import {
removeMessage,
sendMessage,
setMessageAsLatest,
updateLlmOverrideForChatSession,
updateParentChildren,
uploadFilesForChat,
useScrollonStream,
@ -65,7 +66,7 @@ import {
import { usePopup } from "@/components/admin/connectors/Popup";
import { SEARCH_PARAM_NAMES, shouldSubmitOnLoad } from "./searchParams";
import { useDocumentSelection } from "./useDocumentSelection";
import { LlmOverride, useFilters, useLlmOverride } from "@/lib/hooks";
import { LlmDescriptor, useFilters, useLlmManager } from "@/lib/hooks";
import { ChatState, FeedbackType, RegenerationState } from "./types";
import { DocumentResults } from "./documentSidebar/DocumentResults";
import { OnyxInitializingLoader } from "@/components/OnyxInitializingLoader";
@ -89,7 +90,11 @@ import {
import { buildFilters } from "@/lib/search/utils";
import { SettingsContext } from "@/components/settings/SettingsProvider";
import Dropzone from "react-dropzone";
import { checkLLMSupportsImageInput, getFinalLLM } from "@/lib/llm/utils";
import {
checkLLMSupportsImageInput,
getFinalLLM,
structureValue,
} from "@/lib/llm/utils";
import { ChatInputBar } from "./input/ChatInputBar";
import { useChatContext } from "@/components/context/ChatContext";
import { v4 as uuidv4 } from "uuid";
@ -356,7 +361,7 @@ export function ChatPage({
]
);
const llmOverrideManager = useLlmOverride(
const llmManager = useLlmManager(
llmProviders,
selectedChatSession,
liveAssistant
@ -1138,7 +1143,7 @@ export function ChatPage({
forceSearch,
isSeededChat,
alternativeAssistantOverride = null,
modelOverRide,
modelOverride,
regenerationRequest,
overrideFileDescriptors,
}: {
@ -1148,7 +1153,7 @@ export function ChatPage({
forceSearch?: boolean;
isSeededChat?: boolean;
alternativeAssistantOverride?: Persona | null;
modelOverRide?: LlmOverride;
modelOverride?: LlmDescriptor;
regenerationRequest?: RegenerationRequest | null;
overrideFileDescriptors?: FileDescriptor[];
} = {}) => {
@ -1191,6 +1196,22 @@ export function ChatPage({
currChatSessionId = chatSessionIdRef.current as string;
}
frozenSessionId = currChatSessionId;
// update the selected model for the chat session if one is specified so that
// it persists across page reloads. Do not `await` here so that the message
// request can continue and this will just happen in the background.
// NOTE: only set the model override for the chat session once we send a
// message with it. If the user switches models and then starts a new
// chat session, it is unexpected for that model to be used when they
// return to this session the next day.
let finalLLM = modelOverride || llmManager.currentLlm;
updateLlmOverrideForChatSession(
currChatSessionId,
structureValue(
finalLLM.name || "",
finalLLM.provider || "",
finalLLM.modelName || ""
)
);
updateStatesWithNewSessionId(currChatSessionId);
@ -1250,11 +1271,14 @@ export function ChatPage({
: null) ||
(messageMap.size === 1 ? Array.from(messageMap.values())[0] : null);
const currentAssistantId = alternativeAssistantOverride
? alternativeAssistantOverride.id
: alternativeAssistant
? alternativeAssistant.id
: liveAssistant.id;
let currentAssistantId;
if (alternativeAssistantOverride) {
currentAssistantId = alternativeAssistantOverride.id;
} else if (alternativeAssistant) {
currentAssistantId = alternativeAssistant.id;
} else {
currentAssistantId = liveAssistant.id;
}
resetInputBar();
let messageUpdates: Message[] | null = null;
@ -1326,15 +1350,13 @@ export function ChatPage({
forceSearch,
regenerate: regenerationRequest !== undefined,
modelProvider:
modelOverRide?.name ||
llmOverrideManager.llmOverride.name ||
undefined,
modelOverride?.name || llmManager.currentLlm.name || undefined,
modelVersion:
modelOverRide?.modelName ||
llmOverrideManager.llmOverride.modelName ||
modelOverride?.modelName ||
llmManager.currentLlm.modelName ||
searchParams.get(SEARCH_PARAM_NAMES.MODEL_VERSION) ||
undefined,
temperature: llmOverrideManager.temperature || undefined,
temperature: llmManager.temperature || undefined,
systemPromptOverride:
searchParams.get(SEARCH_PARAM_NAMES.SYSTEM_PROMPT) || undefined,
useExistingUserMessage: isSeededChat,
@ -1802,7 +1824,7 @@ export function ChatPage({
const [_, llmModel] = getFinalLLM(
llmProviders,
liveAssistant,
llmOverrideManager.llmOverride
llmManager.currentLlm
);
const llmAcceptsImages = checkLLMSupportsImageInput(llmModel);
@ -2121,7 +2143,7 @@ export function ChatPage({
}, [searchParams, router]);
useEffect(() => {
llmOverrideManager.updateImageFilesPresent(imageFileInMessageHistory);
llmManager.updateImageFilesPresent(imageFileInMessageHistory);
}, [imageFileInMessageHistory]);
const pathname = usePathname();
@ -2175,9 +2197,9 @@ export function ChatPage({
function createRegenerator(regenerationRequest: RegenerationRequest) {
// Returns new function that only needs `modelOverRide` to be specified when called
return async function (modelOverRide: LlmOverride) {
return async function (modelOverride: LlmDescriptor) {
return await onSubmit({
modelOverRide,
modelOverride,
messageIdToResend: regenerationRequest.parentMessage.messageId,
regenerationRequest,
forceSearch: regenerationRequest.forceSearch,
@ -2258,9 +2280,7 @@ export function ChatPage({
{(settingsToggled || userSettingsToggled) && (
<UserSettingsModal
setPopup={setPopup}
setLlmOverride={(newOverride) =>
llmOverrideManager.updateLLMOverride(newOverride)
}
setCurrentLlm={(newLlm) => llmManager.updateCurrentLlm(newLlm)}
defaultModel={user?.preferences.default_model!}
llmProviders={llmProviders}
onClose={() => {
@ -2324,7 +2344,7 @@ export function ChatPage({
<ShareChatSessionModal
assistantId={liveAssistant?.id}
message={message}
modelOverride={llmOverrideManager.llmOverride}
modelOverride={llmManager.currentLlm}
chatSessionId={sharedChatSession.id}
existingSharedStatus={sharedChatSession.shared_status}
onClose={() => setSharedChatSession(null)}
@ -2342,7 +2362,7 @@ export function ChatPage({
<ShareChatSessionModal
message={message}
assistantId={liveAssistant?.id}
modelOverride={llmOverrideManager.llmOverride}
modelOverride={llmManager.currentLlm}
chatSessionId={chatSessionIdRef.current}
existingSharedStatus={chatSessionSharedStatus}
onClose={() => setSharingModalVisible(false)}
@ -3058,7 +3078,7 @@ export function ChatPage({
messageId: message.messageId,
parentMessage: parentMessage!,
forceSearch: true,
})(llmOverrideManager.llmOverride);
})(llmManager.currentLlm);
} else {
setPopup({
type: "error",
@ -3203,7 +3223,7 @@ export function ChatPage({
availableDocumentSets={documentSets}
availableTags={tags}
filterManager={filterManager}
llmOverrideManager={llmOverrideManager}
llmManager={llmManager}
removeDocs={() => {
clearSelectedDocuments();
}}

View File

@ -1,8 +1,8 @@
import { useChatContext } from "@/components/context/ChatContext";
import {
getDisplayNameForModel,
LlmOverride,
useLlmOverride,
LlmDescriptor,
useLlmManager,
} from "@/lib/hooks";
import { StringOrNumberOption } from "@/components/Dropdown";
@ -106,13 +106,13 @@ export default function RegenerateOption({
onDropdownVisibleChange,
}: {
selectedAssistant: Persona;
regenerate: (modelOverRide: LlmOverride) => Promise<void>;
regenerate: (modelOverRide: LlmDescriptor) => Promise<void>;
overriddenModel?: string;
onHoverChange: (isHovered: boolean) => void;
onDropdownVisibleChange: (isVisible: boolean) => void;
}) {
const { llmProviders } = useChatContext();
const llmOverrideManager = useLlmOverride(llmProviders);
const llmManager = useLlmManager(llmProviders);
const [_, llmName] = getFinalLLM(llmProviders, selectedAssistant, null);
@ -148,7 +148,7 @@ export default function RegenerateOption({
);
const currentModelName =
llmOverrideManager?.llmOverride.modelName ||
llmManager?.currentLlm.modelName ||
(selectedAssistant
? selectedAssistant.llm_model_version_override || llmName
: llmName);

View File

@ -6,7 +6,7 @@ import { Persona } from "@/app/admin/assistants/interfaces";
import LLMPopover from "./LLMPopover";
import { InputPrompt } from "@/app/chat/interfaces";
import { FilterManager, LlmOverrideManager } from "@/lib/hooks";
import { FilterManager, LlmManager } from "@/lib/hooks";
import { useChatContext } from "@/components/context/ChatContext";
import { ChatFileType, FileDescriptor } from "../interfaces";
import {
@ -180,7 +180,7 @@ interface ChatInputBarProps {
setMessage: (message: string) => void;
stopGenerating: () => void;
onSubmit: () => void;
llmOverrideManager: LlmOverrideManager;
llmManager: LlmManager;
chatState: ChatState;
alternativeAssistant: Persona | null;
// assistants
@ -225,7 +225,7 @@ export function ChatInputBar({
availableSources,
availableDocumentSets,
availableTags,
llmOverrideManager,
llmManager,
proSearchEnabled,
setProSearchEnabled,
}: ChatInputBarProps) {
@ -781,7 +781,7 @@ export function ChatInputBar({
<LLMPopover
llmProviders={llmProviders}
llmOverrideManager={llmOverrideManager}
llmManager={llmManager}
requiresImageGeneration={false}
currentAssistant={selectedAssistant}
/>

View File

@ -16,7 +16,7 @@ import {
LLMProviderDescriptor,
} from "@/app/admin/configuration/llm/interfaces";
import { Persona } from "@/app/admin/assistants/interfaces";
import { LlmOverrideManager } from "@/lib/hooks";
import { LlmManager } from "@/lib/hooks";
import {
Tooltip,
@ -31,21 +31,19 @@ import { useUser } from "@/components/user/UserProvider";
interface LLMPopoverProps {
llmProviders: LLMProviderDescriptor[];
llmOverrideManager: LlmOverrideManager;
llmManager: LlmManager;
requiresImageGeneration?: boolean;
currentAssistant?: Persona;
}
export default function LLMPopover({
llmProviders,
llmOverrideManager,
llmManager,
requiresImageGeneration,
currentAssistant,
}: LLMPopoverProps) {
const [isOpen, setIsOpen] = useState(false);
const { user } = useUser();
const { llmOverride, updateLLMOverride } = llmOverrideManager;
const currentLlm = llmOverride.modelName;
const llmOptionsByProvider: {
[provider: string]: {
@ -93,19 +91,19 @@ export default function LLMPopover({
: null;
const [localTemperature, setLocalTemperature] = useState(
llmOverrideManager.temperature ?? 0.5
llmManager.temperature ?? 0.5
);
useEffect(() => {
setLocalTemperature(llmOverrideManager.temperature ?? 0.5);
}, [llmOverrideManager.temperature]);
setLocalTemperature(llmManager.temperature ?? 0.5);
}, [llmManager.temperature]);
const handleTemperatureChange = (value: number[]) => {
setLocalTemperature(value[0]);
};
const handleTemperatureChangeComplete = (value: number[]) => {
llmOverrideManager.updateTemperature(value[0]);
llmManager.updateTemperature(value[0]);
};
return (
@ -120,15 +118,15 @@ export default function LLMPopover({
toggle
flexPriority="stiff"
name={getDisplayNameForModel(
llmOverrideManager?.llmOverride.modelName ||
llmManager?.currentLlm.modelName ||
defaultModelDisplayName ||
"Models"
)}
Icon={getProviderIcon(
llmOverrideManager?.llmOverride.provider ||
llmManager?.currentLlm.provider ||
defaultProvider?.provider ||
"anthropic",
llmOverrideManager?.llmOverride.modelName ||
llmManager?.currentLlm.modelName ||
defaultProvider?.default_model_name ||
"claude-3-5-sonnet-20240620"
)}
@ -147,12 +145,12 @@ export default function LLMPopover({
<button
key={index}
className={`w-full flex items-center gap-x-2 px-3 py-2 text-sm text-left hover:bg-background-100 dark:hover:bg-neutral-800 transition-colors duration-150 ${
currentLlm === name
llmManager.currentLlm.modelName === name
? "bg-background-100 dark:bg-neutral-900 text-text"
: "text-text-darker"
}`}
onClick={() => {
updateLLMOverride(destructureValue(value));
llmManager.updateCurrentLlm(destructureValue(value));
setIsOpen(false);
}}
>
@ -172,7 +170,7 @@ export default function LLMPopover({
);
}
})()}
{llmOverrideManager.imageFilesPresent &&
{llmManager.imageFilesPresent &&
!checkLLMSupportsImageInput(name) && (
<TooltipProvider>
<Tooltip delayDuration={0}>
@ -199,7 +197,7 @@ export default function LLMPopover({
<div className="w-full px-3 py-2">
<Slider
value={[localTemperature]}
max={llmOverrideManager.maxTemperature}
max={llmManager.maxTemperature}
min={0}
step={0.01}
onValueChange={handleTemperatureChange}

View File

@ -65,7 +65,7 @@ export function getChatRetentionInfo(
};
}
export async function updateModelOverrideForChatSession(
export async function updateLlmOverrideForChatSession(
chatSessionId: string,
newAlternateModel: string
) {

View File

@ -44,7 +44,7 @@ import { ValidSources } from "@/lib/types";
import { useMouseTracking } from "./hooks";
import { SettingsContext } from "@/components/settings/SettingsProvider";
import RegenerateOption from "../RegenerateOption";
import { LlmOverride } from "@/lib/hooks";
import { LlmDescriptor } from "@/lib/hooks";
import { ContinueGenerating } from "./ContinueMessage";
import { MemoizedAnchor, MemoizedParagraph } from "./MemoizedTextComponents";
import { extractCodeText, preprocessLaTeX } from "./codeUtils";
@ -117,7 +117,7 @@ export const AgenticMessage = ({
isComplete?: boolean;
handleFeedback?: (feedbackType: FeedbackType) => void;
overriddenModel?: string;
regenerate?: (modelOverRide: LlmOverride) => Promise<void>;
regenerate?: (modelOverRide: LlmDescriptor) => Promise<void>;
setPresentingDocument?: (document: OnyxDocument) => void;
toggleDocDisplay?: (agentic: boolean) => void;
error?: string | null;

View File

@ -58,7 +58,7 @@ import { useMouseTracking } from "./hooks";
import { SettingsContext } from "@/components/settings/SettingsProvider";
import GeneratingImageDisplay from "../tools/GeneratingImageDisplay";
import RegenerateOption from "../RegenerateOption";
import { LlmOverride } from "@/lib/hooks";
import { LlmDescriptor } from "@/lib/hooks";
import { ContinueGenerating } from "./ContinueMessage";
import { MemoizedAnchor, MemoizedParagraph } from "./MemoizedTextComponents";
import { extractCodeText, preprocessLaTeX } from "./codeUtils";
@ -213,7 +213,7 @@ export const AIMessage = ({
handleForceSearch?: () => void;
retrievalDisabled?: boolean;
overriddenModel?: string;
regenerate?: (modelOverRide: LlmOverride) => Promise<void>;
regenerate?: (modelOverRide: LlmDescriptor) => Promise<void>;
setPresentingDocument: (document: OnyxDocument) => void;
removePadding?: boolean;
}) => {

View File

@ -11,7 +11,7 @@ import { CopyButton } from "@/components/CopyButton";
import { SEARCH_PARAM_NAMES } from "../searchParams";
import { usePopup } from "@/components/admin/connectors/Popup";
import { structureValue } from "@/lib/llm/utils";
import { LlmOverride } from "@/lib/hooks";
import { LlmDescriptor } from "@/lib/hooks";
import { Separator } from "@/components/ui/separator";
import { AdvancedOptionsToggle } from "@/components/AdvancedOptionsToggle";
@ -38,7 +38,7 @@ async function generateShareLink(chatSessionId: string) {
async function generateSeedLink(
message?: string,
assistantId?: number,
modelOverride?: LlmOverride
modelOverride?: LlmDescriptor
) {
const baseUrl = `${window.location.protocol}//${window.location.host}`;
const model = modelOverride
@ -92,7 +92,7 @@ export function ShareChatSessionModal({
onClose: () => void;
message?: string;
assistantId?: number;
modelOverride?: LlmOverride;
modelOverride?: LlmDescriptor;
}) {
const [shareLink, setShareLink] = useState<string>(
existingSharedStatus === ChatSessionSharedStatus.Public

View File

@ -1,6 +1,6 @@
import { useContext, useEffect, useRef, useState } from "react";
import { Modal } from "@/components/Modal";
import { getDisplayNameForModel, LlmOverride } from "@/lib/hooks";
import { getDisplayNameForModel, LlmDescriptor } from "@/lib/hooks";
import { LLMProviderDescriptor } from "@/app/admin/configuration/llm/interfaces";
import { destructureValue, structureValue } from "@/lib/llm/utils";
@ -31,12 +31,12 @@ export function UserSettingsModal({
setPopup,
llmProviders,
onClose,
setLlmOverride,
setCurrentLlm,
defaultModel,
}: {
setPopup: (popupSpec: PopupSpec | null) => void;
llmProviders: LLMProviderDescriptor[];
setLlmOverride?: (newOverride: LlmOverride) => void;
setCurrentLlm?: (newLlm: LlmDescriptor) => void;
onClose: () => void;
defaultModel: string | null;
}) {
@ -127,18 +127,14 @@ export function UserSettingsModal({
);
});
const llmOptions = Object.entries(llmOptionsByProvider).flatMap(
([provider, options]) => [...options]
);
const router = useRouter();
const handleChangedefaultModel = async (defaultModel: string | null) => {
try {
const response = await setUserDefaultModel(defaultModel);
if (response.ok) {
if (defaultModel && setLlmOverride) {
setLlmOverride(destructureValue(defaultModel));
if (defaultModel && setCurrentLlm) {
setCurrentLlm(destructureValue(defaultModel));
}
setPopup({
message: "Default model updated successfully",

View File

@ -360,18 +360,18 @@ export const useUsers = ({ includeApiKeys }: UseUsersParams) => {
};
};
export interface LlmOverride {
export interface LlmDescriptor {
name: string;
provider: string;
modelName: string;
}
export interface LlmOverrideManager {
llmOverride: LlmOverride;
updateLLMOverride: (newOverride: LlmOverride) => void;
export interface LlmManager {
currentLlm: LlmDescriptor;
updateCurrentLlm: (newOverride: LlmDescriptor) => void;
temperature: number;
updateTemperature: (temperature: number) => void;
updateModelOverrideForChatSession: (chatSession?: ChatSession) => void;
updateModelOverrideBasedOnChatSession: (chatSession?: ChatSession) => void;
imageFilesPresent: boolean;
updateImageFilesPresent: (present: boolean) => void;
liveAssistant: Persona | null;
@ -400,7 +400,7 @@ Thus, the input should be
Changes take place as
- liveAssistant or currentChatSession changes (and the associated model override is set)
- (uploadLLMOverride) User explicitly setting a model override (and we explicitly override and set the userSpecifiedOverride which we'll use in place of the user preferences unless overridden by an assistant)
- (updateCurrentLlm) User explicitly setting a model override (and we explicitly override and set the userSpecifiedOverride which we'll use in place of the user preferences unless overridden by an assistant)
If we have a live assistant, we should use that model override
@ -419,55 +419,78 @@ This approach ensures that user preferences are maintained for existing chats wh
providing appropriate defaults for new conversations based on the available tools.
*/
export function useLlmOverride(
export function useLlmManager(
llmProviders: LLMProviderDescriptor[],
currentChatSession?: ChatSession,
liveAssistant?: Persona
): LlmOverrideManager {
): LlmManager {
const { user } = useUser();
const [userHasManuallyOverriddenLLM, setUserHasManuallyOverriddenLLM] =
useState(false);
const [chatSession, setChatSession] = useState<ChatSession | null>(null);
const [currentLlm, setCurrentLlm] = useState<LlmDescriptor>({
name: "",
provider: "",
modelName: "",
});
const llmOverrideUpdate = () => {
if (liveAssistant?.llm_model_version_override) {
setLlmOverride(
getValidLlmOverride(liveAssistant.llm_model_version_override)
);
} else if (currentChatSession?.current_alternate_model) {
setLlmOverride(
getValidLlmOverride(currentChatSession.current_alternate_model)
);
} else if (user?.preferences?.default_model) {
setLlmOverride(getValidLlmOverride(user.preferences.default_model));
return;
} else {
const defaultProvider = llmProviders.find(
(provider) => provider.is_default_provider
);
const llmUpdate = () => {
/* Should be called when the live assistant or current chat session changes */
if (defaultProvider) {
setLlmOverride({
name: defaultProvider.name,
provider: defaultProvider.provider,
modelName: defaultProvider.default_model_name,
});
// separate function so we can `return` to break out
const _llmUpdate = () => {
// if the user has overridden in this session and just switched to a brand
// new session, use their manually specified model
if (userHasManuallyOverriddenLLM && !currentChatSession) {
return;
}
}
if (currentChatSession?.current_alternate_model) {
setCurrentLlm(
getValidLlmDescriptor(currentChatSession.current_alternate_model)
);
} else if (liveAssistant?.llm_model_version_override) {
setCurrentLlm(
getValidLlmDescriptor(liveAssistant.llm_model_version_override)
);
} else if (userHasManuallyOverriddenLLM) {
// if the user has an override and there's nothing special about the
// current chat session, use the override
return;
} else if (user?.preferences?.default_model) {
setCurrentLlm(getValidLlmDescriptor(user.preferences.default_model));
} else {
const defaultProvider = llmProviders.find(
(provider) => provider.is_default_provider
);
if (defaultProvider) {
setCurrentLlm({
name: defaultProvider.name,
provider: defaultProvider.provider,
modelName: defaultProvider.default_model_name,
});
}
}
};
_llmUpdate();
setChatSession(currentChatSession || null);
};
const getValidLlmOverride = (
overrideModel: string | null | undefined
): LlmOverride => {
if (overrideModel) {
const model = destructureValue(overrideModel);
const getValidLlmDescriptor = (
modelName: string | null | undefined
): LlmDescriptor => {
if (modelName) {
const model = destructureValue(modelName);
if (!(model.modelName && model.modelName.length > 0)) {
const provider = llmProviders.find((p) =>
p.model_names.includes(overrideModel)
p.model_names.includes(modelName)
);
if (provider) {
return {
modelName: overrideModel,
modelName: modelName,
name: provider.name,
provider: provider.provider,
};
@ -491,38 +514,32 @@ export function useLlmOverride(
setImageFilesPresent(present);
};
const [llmOverride, setLlmOverride] = useState<LlmOverride>({
name: "",
provider: "",
modelName: "",
});
// Manually set the override
const updateLLMOverride = (newOverride: LlmOverride) => {
// Manually set the LLM
const updateCurrentLlm = (newLlm: LlmDescriptor) => {
const provider =
newOverride.provider ||
findProviderForModel(llmProviders, newOverride.modelName);
newLlm.provider || findProviderForModel(llmProviders, newLlm.modelName);
const structuredValue = structureValue(
newOverride.name,
newLlm.name,
provider,
newOverride.modelName
newLlm.modelName
);
setLlmOverride(getValidLlmOverride(structuredValue));
setCurrentLlm(getValidLlmDescriptor(structuredValue));
setUserHasManuallyOverriddenLLM(true);
};
const updateModelOverrideForChatSession = (chatSession?: ChatSession) => {
const updateModelOverrideBasedOnChatSession = (chatSession?: ChatSession) => {
if (chatSession && chatSession.current_alternate_model?.length > 0) {
setLlmOverride(getValidLlmOverride(chatSession.current_alternate_model));
setCurrentLlm(getValidLlmDescriptor(chatSession.current_alternate_model));
}
};
const [temperature, setTemperature] = useState<number>(() => {
llmOverrideUpdate();
llmUpdate();
if (currentChatSession?.current_temperature_override != null) {
return Math.min(
currentChatSession.current_temperature_override,
isAnthropic(llmOverride.provider, llmOverride.modelName) ? 1.0 : 2.0
isAnthropic(currentLlm.provider, currentLlm.modelName) ? 1.0 : 2.0
);
} else if (
liveAssistant?.tools.some((tool) => tool.name === SEARCH_TOOL_ID)
@ -533,22 +550,23 @@ export function useLlmOverride(
});
const maxTemperature = useMemo(() => {
return isAnthropic(llmOverride.provider, llmOverride.modelName) ? 1.0 : 2.0;
}, [llmOverride]);
return isAnthropic(currentLlm.provider, currentLlm.modelName) ? 1.0 : 2.0;
}, [currentLlm]);
useEffect(() => {
if (isAnthropic(llmOverride.provider, llmOverride.modelName)) {
if (isAnthropic(currentLlm.provider, currentLlm.modelName)) {
const newTemperature = Math.min(temperature, 1.0);
setTemperature(newTemperature);
if (chatSession?.id) {
updateTemperatureOverrideForChatSession(chatSession.id, newTemperature);
}
}
}, [llmOverride]);
}, [currentLlm]);
useEffect(() => {
llmUpdate();
if (!chatSession && currentChatSession) {
setChatSession(currentChatSession || null);
if (temperature) {
updateTemperatureOverrideForChatSession(
currentChatSession.id,
@ -570,7 +588,7 @@ export function useLlmOverride(
}, [liveAssistant, currentChatSession]);
const updateTemperature = (temperature: number) => {
if (isAnthropic(llmOverride.provider, llmOverride.modelName)) {
if (isAnthropic(currentLlm.provider, currentLlm.modelName)) {
setTemperature((prevTemp) => Math.min(temperature, 1.0));
} else {
setTemperature(temperature);
@ -581,9 +599,9 @@ export function useLlmOverride(
};
return {
updateModelOverrideForChatSession,
llmOverride,
updateLLMOverride,
updateModelOverrideBasedOnChatSession,
currentLlm,
updateCurrentLlm,
temperature,
updateTemperature,
imageFilesPresent,

View File

@ -1,11 +1,11 @@
import { Persona } from "@/app/admin/assistants/interfaces";
import { LLMProviderDescriptor } from "@/app/admin/configuration/llm/interfaces";
import { LlmOverride } from "@/lib/hooks";
import { LlmDescriptor } from "@/lib/hooks";
export function getFinalLLM(
llmProviders: LLMProviderDescriptor[],
persona: Persona | null,
llmOverride: LlmOverride | null
currentLlm: LlmDescriptor | null
): [string, string] {
const defaultProvider = llmProviders.find(
(llmProvider) => llmProvider.is_default_provider
@ -26,9 +26,9 @@ export function getFinalLLM(
model = persona.llm_model_version_override || model;
}
if (llmOverride) {
provider = llmOverride.provider || provider;
model = llmOverride.modelName || model;
if (currentLlm) {
provider = currentLlm.provider || provider;
model = currentLlm.modelName || model;
}
return [provider, model];
@ -37,7 +37,7 @@ export function getFinalLLM(
export function getLLMProviderOverrideForPersona(
liveAssistant: Persona,
llmProviders: LLMProviderDescriptor[]
): LlmOverride | null {
): LlmDescriptor | null {
const overrideProvider = liveAssistant.llm_model_provider_override;
const overrideModel = liveAssistant.llm_model_version_override;
@ -135,7 +135,7 @@ export const structureValue = (
return `${name}__${provider}__${modelName}`;
};
export const destructureValue = (value: string): LlmOverride => {
export const destructureValue = (value: string): LlmDescriptor => {
const [displayName, provider, modelName] = value.split("__");
return {
name: displayName,

View File

@ -1,5 +1,3 @@
import { LlmOverride } from "../hooks";
export async function setUserDefaultModel(
model: string | null
): Promise<Response> {