From 4d0794f4f5c7af4464921ff8c34529a19e25b217 Mon Sep 17 00:00:00 2001 From: hagen-danswer Date: Sun, 9 Jun 2024 17:05:41 -0700 Subject: [PATCH] chatpage now checks for llm override for image uploads --- backend/danswer/llm/llm_provider_options.py | 16 ++++++++++++---- web/src/app/chat/ChatPage.tsx | 2 +- web/src/lib/llm/utils.ts | 9 ++++++++- 3 files changed, 21 insertions(+), 6 deletions(-) diff --git a/backend/danswer/llm/llm_provider_options.py b/backend/danswer/llm/llm_provider_options.py index 3b2c62c6c..bb4bd13dd 100644 --- a/backend/danswer/llm/llm_provider_options.py +++ b/backend/danswer/llm/llm_provider_options.py @@ -30,9 +30,9 @@ OPEN_AI_MODEL_NAMES = [ "gpt-4-turbo-preview", "gpt-4-1106-preview", "gpt-4-vision-preview", - "gpt-4-32k", + # "gpt-4-32k", # not EOL but still doesnt work "gpt-4-0613", - "gpt-4-32k-0613", + # "gpt-4-32k-0613", # not EOL but still doesnt work "gpt-4-0314", "gpt-4-32k-0314", "gpt-3.5-turbo", @@ -51,8 +51,16 @@ BEDROCK_MODEL_NAMES = [model for model in litellm.bedrock_models if "/" not in m ::-1 ] +IGNORABLE_ANTHROPIC_MODELS = [ + "claude-2", + "claude-instant-1", +] ANTHROPIC_PROVIDER_NAME = "anthropic" -ANTHROPIC_MODEL_NAMES = [model for model in litellm.anthropic_models][::-1] +ANTHROPIC_MODEL_NAMES = [ + model + for model in litellm.anthropic_models + if model not in IGNORABLE_ANTHROPIC_MODELS +][::-1] AZURE_PROVIDER_NAME = "azure" @@ -73,7 +81,7 @@ def fetch_available_well_known_llms() -> list[WellKnownLLMProviderDescriptor]: api_base_required=False, api_version_required=False, custom_config_keys=[], - llm_names=fetch_models_for_provider("openai"), + llm_names=fetch_models_for_provider(OPENAI_PROVIDER_NAME), default_model="gpt-4", default_fast_model="gpt-3.5-turbo", ), diff --git a/web/src/app/chat/ChatPage.tsx b/web/src/app/chat/ChatPage.tsx index 7ae625820..047b115c2 100644 --- a/web/src/app/chat/ChatPage.tsx +++ b/web/src/app/chat/ChatPage.tsx @@ -773,7 +773,7 @@ export function ChatPage({ const handleImageUpload = (acceptedFiles: File[]) => { const llmAcceptsImages = checkLLMSupportsImageInput( - ...getFinalLLM(llmProviders, livePersona) + ...getFinalLLM(llmProviders, livePersona, llmOverrideManager.llmOverride) ); const imageFiles = acceptedFiles.filter((file) => file.type.startsWith("image/") diff --git a/web/src/lib/llm/utils.ts b/web/src/lib/llm/utils.ts index b9d5d5925..28fffa8d4 100644 --- a/web/src/lib/llm/utils.ts +++ b/web/src/lib/llm/utils.ts @@ -1,9 +1,11 @@ import { Persona } from "@/app/admin/assistants/interfaces"; import { LLMProviderDescriptor } from "@/app/admin/models/llm/interfaces"; +import { LlmOverride } from "@/lib/hooks"; export function getFinalLLM( llmProviders: LLMProviderDescriptor[], - persona: Persona | null + persona: Persona | null, + llmOverride: LlmOverride | null ): [string, string] { const defaultProvider = llmProviders.find( (llmProvider) => llmProvider.is_default_provider @@ -17,6 +19,11 @@ export function getFinalLLM( model = persona.llm_model_version_override || model; } + if (llmOverride) { + provider = llmOverride.provider || provider; + model = llmOverride.modelName || model; + } + return [provider, model]; }