Allow removal of max_output_tokens by setting GEN_AI_MAX_OUTPUT_TOKENS=0 (#1958)

Co-authored-by: Emerson Gomes <emerson.gomes@thalesgroup.com>
This commit is contained in:
Emerson Gomes 2024-07-27 11:07:29 -05:00 committed by GitHub
parent d839595330
commit 6c32821ad4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -266,7 +266,7 @@ class DefaultMultiLLM(LLM):
stream=stream,
# model params
temperature=self._temperature,
max_tokens=self._max_output_tokens,
max_tokens=self._max_output_tokens if self._max_output_tokens > 0 else None,
timeout=self._timeout,
**self._model_kwargs,
)