Add proper default temperature + overrides (#2059)

* add proper default temperature + overrides

* remove unclear commment

* ammend defaults + include internet serach
This commit is contained in:
pablodanswer 2024-08-06 12:57:14 -07:00 committed by GitHub
parent 3bc2cf9946
commit 6350219143
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 31 additions and 10 deletions

View File

@ -111,10 +111,8 @@ export function ChatPage({
const selectedChatSession = chatSessions.find(
(chatSession) => chatSession.id === existingChatSessionId
);
const chatSessionIdRef = useRef<number | null>(existingChatSessionId);
// LLM
const llmOverrideManager = useLlmOverride(selectedChatSession);
const chatSessionIdRef = useRef<number | null>(existingChatSessionId);
// Assistants
const filteredAssistants = orderAssistantsForUser(availableAssistants, user);
@ -136,6 +134,25 @@ export function ChatPage({
)
: undefined
);
// Gather default temperature settings
const search_param_temperature = searchParams.get(
SEARCH_PARAM_NAMES.TEMPERATURE
);
const defaultTemperature = search_param_temperature
? parseFloat(search_param_temperature)
: selectedAssistant?.tools.some(
(tool) =>
tool.in_code_tool_id === "SearchTool" ||
tool.in_code_tool_id === "InternetSearchTool"
)
? 0
: 0.7;
const llmOverrideManager = useLlmOverride(
selectedChatSession,
defaultTemperature
);
const setSelectedAssistantFromId = (assistantId: number) => {
// NOTE: also intentionally look through available assistants here, so that
// even if the user has hidden an assistant they can still go back to it
@ -764,10 +781,7 @@ export function ChatPage({
llmOverrideManager.llmOverride.modelName ||
searchParams.get(SEARCH_PARAM_NAMES.MODEL_VERSION) ||
undefined,
temperature:
llmOverrideManager.temperature ||
parseFloat(searchParams.get(SEARCH_PARAM_NAMES.TEMPERATURE) || "") ||
undefined,
temperature: llmOverrideManager.temperature || undefined,
systemPromptOverride:
searchParams.get(SEARCH_PARAM_NAMES.SYSTEM_PROMPT) || undefined,
useExistingUserMessage: isSeededChat,

View File

@ -6,7 +6,7 @@ import {
} from "@/lib/types";
import useSWR, { mutate, useSWRConfig } from "swr";
import { errorHandlingFetcher } from "./fetcher";
import { useState } from "react";
import { useEffect, useState } from "react";
import { DateRangePickerValue } from "@tremor/react";
import { SourceMetadata } from "./search/interfaces";
import { destructureValue } from "./llm/utils";
@ -143,7 +143,8 @@ export interface LlmOverrideManager {
}
export function useLlmOverride(
currentChatSession?: ChatSession
currentChatSession?: ChatSession,
defaultTemperature?: number
): LlmOverrideManager {
const [llmOverride, setLlmOverride] = useState<LlmOverride>(
currentChatSession && currentChatSession.current_alternate_model
@ -167,7 +168,13 @@ export function useLlmOverride(
);
};
const [temperature, setTemperature] = useState<number | null>(null);
const [temperature, setTemperature] = useState<number | null>(
defaultTemperature != undefined ? defaultTemperature : 0
);
useEffect(() => {
setTemperature(defaultTemperature !== undefined ? defaultTemperature : 0);
}, [defaultTemperature]);
return {
updateModelOverrideForChatSession,