Fix LLM API key (#4623)

* Fix LLM API key

* Remove unused import

* Update web/src/app/admin/configuration/llm/LLMProviderUpdateForm.tsx

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>

---------

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
This commit is contained in:
Chris Weaver 2025-04-27 16:10:36 -07:00 committed by GitHub
parent bbd0874200
commit ea0664e203
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 33 additions and 26 deletions

View File

@ -70,7 +70,7 @@ def test_llm_configuration(
name=test_llm_request.name, db_session=db_session
)
# if an API key is not provided, use the existing provider's API key
if existing_provider and test_api_key is None:
if existing_provider and not test_llm_request.api_key_changed:
test_api_key = existing_provider.api_key
# For this "testing" workflow, we do *not* need the actual `max_input_tokens`.

View File

@ -28,7 +28,10 @@ class TestLLMRequest(BaseModel):
fast_default_model_name: str | None = None
deployment_name: str | None = None
model_configurations: list["ModelConfigurationUpsertRequest"] = []
model_configurations: list["ModelConfigurationUpsertRequest"]
# if try and use the existing API key
api_key_changed: bool
class LLMProviderDescriptor(BaseModel):

View File

@ -69,17 +69,16 @@ export function CustomLLMProviderUpdateForm({
model_configurations: existingLlmProvider?.model_configurations.map(
(modelConfiguration) => ({
...modelConfiguration,
max_input_tokens:
modelConfiguration.max_input_tokens ??
("" as string | number | null | undefined),
max_input_tokens: modelConfiguration.max_input_tokens ?? null,
})
) ?? [{ name: "", is_visible: true, max_input_tokens: "" }],
) ?? [{ name: "", is_visible: true, max_input_tokens: null }],
custom_config_list: existingLlmProvider?.custom_config
? Object.entries(existingLlmProvider.custom_config)
: [],
is_public: existingLlmProvider?.is_public ?? true,
groups: existingLlmProvider?.groups ?? [],
deployment_name: existingLlmProvider?.deployment_name ?? null,
api_key_changed: false,
};
// Setup validation schema if required
@ -114,15 +113,20 @@ export function CustomLLMProviderUpdateForm({
onSubmit={async (values, { setSubmitting }) => {
setSubmitting(true);
values.model_configurations.forEach((modelConfiguration) => {
if (
modelConfiguration.max_input_tokens === "" ||
// build final payload
const finalValues = { ...values };
finalValues.model_configurations = finalValues.model_configurations.map(
(modelConfiguration) => ({
...modelConfiguration,
max_input_tokens:
modelConfiguration.max_input_tokens === null ||
modelConfiguration.max_input_tokens === undefined
) {
modelConfiguration.max_input_tokens = null;
}
});
? null
: modelConfiguration.max_input_tokens,
supports_image_input: false, // doesn't matter, not used
})
);
finalValues.api_key_changed = values.api_key !== initialValues.api_key;
if (values.model_configurations.length === 0) {
const fullErrorMsg = "At least one model name is required";

View File

@ -75,7 +75,7 @@ export function LLMProviderUpdateForm({
),
is_public: existingLlmProvider?.is_public ?? true,
groups: existingLlmProvider?.groups ?? [],
model_configurations: [] as ModelConfiguration[],
model_configurations: existingLlmProvider?.model_configurations ?? [],
deployment_name: existingLlmProvider?.deployment_name,
api_key_changed: false,
@ -136,7 +136,6 @@ export function LLMProviderUpdateForm({
max_input_tokens: Yup.number().nullable().optional(),
})
),
api_key_changed: Yup.boolean(),
});
return (
@ -146,10 +145,10 @@ export function LLMProviderUpdateForm({
onSubmit={async (values, { setSubmitting }) => {
setSubmitting(true);
values.api_key_changed = values.api_key !== initialValues.api_key;
// build final payload
const visibleModels = new Set(values.selected_model_names);
values.model_configurations = llmProviderDescriptor.llm_names.map(
const finalValues = { ...values };
finalValues.model_configurations = llmProviderDescriptor.llm_names.map(
(name) =>
({
name,
@ -157,11 +156,11 @@ export function LLMProviderUpdateForm({
max_input_tokens: null,
}) as ModelConfiguration
);
delete values.selected_model_names;
delete finalValues.selected_model_names;
finalValues.api_key_changed = values.api_key !== initialValues.api_key;
// test the configuration
if (!isEqual(values, initialValues)) {
if (!isEqual(finalValues, initialValues)) {
setIsTesting(true);
const response = await fetch("/api/admin/llm/test", {
@ -171,7 +170,7 @@ export function LLMProviderUpdateForm({
},
body: JSON.stringify({
provider: llmProviderDescriptor.name,
...values,
...finalValues,
}),
});
setIsTesting(false);
@ -194,9 +193,10 @@ export function LLMProviderUpdateForm({
},
body: JSON.stringify({
provider: llmProviderDescriptor.name,
...values,
...finalValues,
fast_default_model_name:
values.fast_default_model_name || values.default_model_name,
finalValues.fast_default_model_name ||
finalValues.default_model_name,
}),
}
);