Fix LLM API key (#4623)

* Fix LLM API key

* Remove unused import

* Update web/src/app/admin/configuration/llm/LLMProviderUpdateForm.tsx

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>

---------

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
This commit is contained in:
Chris Weaver
2025-04-27 16:10:36 -07:00
committed by GitHub
parent bbd0874200
commit ea0664e203
4 changed files with 33 additions and 26 deletions

View File

@ -70,7 +70,7 @@ def test_llm_configuration(
name=test_llm_request.name, db_session=db_session name=test_llm_request.name, db_session=db_session
) )
# if an API key is not provided, use the existing provider's API key # if an API key is not provided, use the existing provider's API key
if existing_provider and test_api_key is None: if existing_provider and not test_llm_request.api_key_changed:
test_api_key = existing_provider.api_key test_api_key = existing_provider.api_key
# For this "testing" workflow, we do *not* need the actual `max_input_tokens`. # For this "testing" workflow, we do *not* need the actual `max_input_tokens`.

View File

@ -28,7 +28,10 @@ class TestLLMRequest(BaseModel):
fast_default_model_name: str | None = None fast_default_model_name: str | None = None
deployment_name: str | None = None deployment_name: str | None = None
model_configurations: list["ModelConfigurationUpsertRequest"] = [] model_configurations: list["ModelConfigurationUpsertRequest"]
# if try and use the existing API key
api_key_changed: bool
class LLMProviderDescriptor(BaseModel): class LLMProviderDescriptor(BaseModel):

View File

@ -69,17 +69,16 @@ export function CustomLLMProviderUpdateForm({
model_configurations: existingLlmProvider?.model_configurations.map( model_configurations: existingLlmProvider?.model_configurations.map(
(modelConfiguration) => ({ (modelConfiguration) => ({
...modelConfiguration, ...modelConfiguration,
max_input_tokens: max_input_tokens: modelConfiguration.max_input_tokens ?? null,
modelConfiguration.max_input_tokens ??
("" as string | number | null | undefined),
}) })
) ?? [{ name: "", is_visible: true, max_input_tokens: "" }], ) ?? [{ name: "", is_visible: true, max_input_tokens: null }],
custom_config_list: existingLlmProvider?.custom_config custom_config_list: existingLlmProvider?.custom_config
? Object.entries(existingLlmProvider.custom_config) ? Object.entries(existingLlmProvider.custom_config)
: [], : [],
is_public: existingLlmProvider?.is_public ?? true, is_public: existingLlmProvider?.is_public ?? true,
groups: existingLlmProvider?.groups ?? [], groups: existingLlmProvider?.groups ?? [],
deployment_name: existingLlmProvider?.deployment_name ?? null, deployment_name: existingLlmProvider?.deployment_name ?? null,
api_key_changed: false,
}; };
// Setup validation schema if required // Setup validation schema if required
@ -114,15 +113,20 @@ export function CustomLLMProviderUpdateForm({
onSubmit={async (values, { setSubmitting }) => { onSubmit={async (values, { setSubmitting }) => {
setSubmitting(true); setSubmitting(true);
values.model_configurations.forEach((modelConfiguration) => { // build final payload
if ( const finalValues = { ...values };
modelConfiguration.max_input_tokens === "" || finalValues.model_configurations = finalValues.model_configurations.map(
modelConfiguration.max_input_tokens === null || (modelConfiguration) => ({
modelConfiguration.max_input_tokens === undefined ...modelConfiguration,
) { max_input_tokens:
modelConfiguration.max_input_tokens = null; modelConfiguration.max_input_tokens === null ||
} modelConfiguration.max_input_tokens === undefined
}); ? null
: modelConfiguration.max_input_tokens,
supports_image_input: false, // doesn't matter, not used
})
);
finalValues.api_key_changed = values.api_key !== initialValues.api_key;
if (values.model_configurations.length === 0) { if (values.model_configurations.length === 0) {
const fullErrorMsg = "At least one model name is required"; const fullErrorMsg = "At least one model name is required";

View File

@ -75,7 +75,7 @@ export function LLMProviderUpdateForm({
), ),
is_public: existingLlmProvider?.is_public ?? true, is_public: existingLlmProvider?.is_public ?? true,
groups: existingLlmProvider?.groups ?? [], groups: existingLlmProvider?.groups ?? [],
model_configurations: [] as ModelConfiguration[], model_configurations: existingLlmProvider?.model_configurations ?? [],
deployment_name: existingLlmProvider?.deployment_name, deployment_name: existingLlmProvider?.deployment_name,
api_key_changed: false, api_key_changed: false,
@ -136,7 +136,6 @@ export function LLMProviderUpdateForm({
max_input_tokens: Yup.number().nullable().optional(), max_input_tokens: Yup.number().nullable().optional(),
}) })
), ),
api_key_changed: Yup.boolean(),
}); });
return ( return (
@ -146,10 +145,10 @@ export function LLMProviderUpdateForm({
onSubmit={async (values, { setSubmitting }) => { onSubmit={async (values, { setSubmitting }) => {
setSubmitting(true); setSubmitting(true);
values.api_key_changed = values.api_key !== initialValues.api_key; // build final payload
const visibleModels = new Set(values.selected_model_names); const visibleModels = new Set(values.selected_model_names);
values.model_configurations = llmProviderDescriptor.llm_names.map( const finalValues = { ...values };
finalValues.model_configurations = llmProviderDescriptor.llm_names.map(
(name) => (name) =>
({ ({
name, name,
@ -157,11 +156,11 @@ export function LLMProviderUpdateForm({
max_input_tokens: null, max_input_tokens: null,
}) as ModelConfiguration }) as ModelConfiguration
); );
delete finalValues.selected_model_names;
delete values.selected_model_names; finalValues.api_key_changed = values.api_key !== initialValues.api_key;
// test the configuration // test the configuration
if (!isEqual(values, initialValues)) { if (!isEqual(finalValues, initialValues)) {
setIsTesting(true); setIsTesting(true);
const response = await fetch("/api/admin/llm/test", { const response = await fetch("/api/admin/llm/test", {
@ -171,7 +170,7 @@ export function LLMProviderUpdateForm({
}, },
body: JSON.stringify({ body: JSON.stringify({
provider: llmProviderDescriptor.name, provider: llmProviderDescriptor.name,
...values, ...finalValues,
}), }),
}); });
setIsTesting(false); setIsTesting(false);
@ -194,9 +193,10 @@ export function LLMProviderUpdateForm({
}, },
body: JSON.stringify({ body: JSON.stringify({
provider: llmProviderDescriptor.name, provider: llmProviderDescriptor.name,
...values, ...finalValues,
fast_default_model_name: fast_default_model_name:
values.fast_default_model_name || values.default_model_name, finalValues.fast_default_model_name ||
finalValues.default_model_name,
}), }),
} }
); );