Implemented LLM disabling for api call (#1905)

This commit is contained in:
hagen-danswer 2024-07-23 16:12:51 -07:00 committed by GitHub
parent 9c6084bd0d
commit 866bc803b1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -256,6 +256,9 @@ def stream_answer_objects(
) )
yield initial_response yield initial_response
elif packet.id == SEARCH_DOC_CONTENT_ID:
yield packet.response
elif packet.id == SECTION_RELEVANCE_LIST_ID: elif packet.id == SECTION_RELEVANCE_LIST_ID:
chunk_indices = packet.response chunk_indices = packet.response
@ -267,9 +270,12 @@ def stream_answer_objects(
) )
yield LLMRelevanceFilterResponse(relevant_chunk_indices=packet.response) yield LLMRelevanceFilterResponse(relevant_chunk_indices=packet.response)
if query_req.skip_gen_ai_answer_generation:
elif packet.id == SEARCH_DOC_CONTENT_ID: # Exit early if only source docs + contexts are requested
yield packet.response # Putting exit here assumes that a packet with the ID
# SECTION_RELEVANCE_LIST_ID is the last one yielded before
# calling the LLM
return
elif packet.id == SEARCH_EVALUATION_ID: elif packet.id == SEARCH_EVALUATION_ID:
evaluation_response = LLMRelevanceSummaryResponse( evaluation_response = LLMRelevanceSummaryResponse(