Temperature (#3310)

* fix temperatures for default llm

* ensure anthropic models don't overflow

* minor cleanup

* k

* k

* k

* fix typing
This commit is contained in:
pablodanswer
2024-12-03 09:22:22 -08:00
committed by GitHub
parent 6c2269e565
commit cd5f2293ad
8 changed files with 50 additions and 59 deletions

View File

@ -71,6 +71,7 @@ def get_llms_for_persona(
api_base=llm_provider.api_base,
api_version=llm_provider.api_version,
custom_config=llm_provider.custom_config,
temperature=temperature_override,
additional_headers=additional_headers,
long_term_logger=long_term_logger,
)
@ -128,11 +129,13 @@ def get_llm(
api_base: str | None = None,
api_version: str | None = None,
custom_config: dict[str, str] | None = None,
temperature: float = GEN_AI_TEMPERATURE,
temperature: float | None = None,
timeout: int = QA_TIMEOUT,
additional_headers: dict[str, str] | None = None,
long_term_logger: LongTermLogger | None = None,
) -> LLM:
if temperature is None:
temperature = GEN_AI_TEMPERATURE
return DefaultMultiLLM(
model_provider=provider,
model_name=model,