mirror of
https://github.com/danswer-ai/danswer.git
synced 2025-09-18 11:34:12 +02:00
cited_docs -> cited_documents
This commit is contained in:
committed by
Evan Lohn
parent
1a2760edee
commit
d53dd1e356
@@ -0,0 +1,45 @@
|
||||
from datetime import datetime
|
||||
|
||||
from onyx.agents.agent_search.deep_search_a.initial.generate_individual_sub_answer.states import (
|
||||
AnswerQuestionOutput,
|
||||
)
|
||||
from onyx.agents.agent_search.deep_search_a.main.operations import logger
|
||||
from onyx.agents.agent_search.deep_search_a.main.states import (
|
||||
DecompAnswersUpdate,
|
||||
)
|
||||
from onyx.agents.agent_search.shared_graph_utils.operators import (
|
||||
dedup_inference_sections,
|
||||
)
|
||||
|
||||
|
||||
def format_initial_sub_answers(
|
||||
state: AnswerQuestionOutput,
|
||||
) -> DecompAnswersUpdate:
|
||||
now_start = datetime.now()
|
||||
|
||||
logger.info(f"--------{now_start}--------INGEST ANSWERS---")
|
||||
documents = []
|
||||
context_documents = []
|
||||
cited_docs = []
|
||||
answer_results = state.answer_results if hasattr(state, "answer_results") else []
|
||||
for answer_result in answer_results:
|
||||
documents.extend(answer_result.documents)
|
||||
context_documents.extend(answer_result.context_documents)
|
||||
cited_docs.extend(answer_result.cited_docs)
|
||||
now_end = datetime.now()
|
||||
|
||||
logger.debug(
|
||||
f"--------{now_end}--{now_end - now_start}--------INGEST ANSWERS END---"
|
||||
)
|
||||
|
||||
return DecompAnswersUpdate(
|
||||
# Deduping is done by the documents operator for the main graph
|
||||
# so we might not need to dedup here
|
||||
documents=dedup_inference_sections(documents, []),
|
||||
context_documents=dedup_inference_sections(context_documents, []),
|
||||
cited_documents=dedup_inference_sections(cited_docs, []),
|
||||
sub_question_results=answer_results,
|
||||
log_messages=[
|
||||
f"{now_start} -- Main - Ingest initial processed sub questions, Time taken: {now_end - now_start}"
|
||||
],
|
||||
)
|
@@ -67,7 +67,7 @@ def generate_initial_answer(
|
||||
question = agent_a_config.search_request.query
|
||||
prompt_enrichment_components = get_prompt_enrichment_components(agent_a_config)
|
||||
|
||||
sub_questions_cited_docs = state.cited_docs
|
||||
sub_questions_cited_docs = state.cited_documents
|
||||
all_original_question_documents = state.all_original_question_documents
|
||||
|
||||
consolidated_context_docs: list[InferenceSection] = sub_questions_cited_docs
|
||||
|
@@ -33,7 +33,7 @@ def format_initial_sub_answers(
|
||||
# so we might not need to dedup here
|
||||
documents=dedup_inference_sections(documents, []),
|
||||
context_documents=dedup_inference_sections(context_documents, []),
|
||||
cited_docs=dedup_inference_sections(cited_docs, []),
|
||||
cited_documents=dedup_inference_sections(cited_docs, []),
|
||||
sub_question_results=answer_results,
|
||||
log_messages=[
|
||||
get_langgraph_node_log_string(
|
||||
|
@@ -71,7 +71,7 @@ def generate_refined_answer(
|
||||
|
||||
initial_documents = state.documents
|
||||
refined_documents = state.refined_documents
|
||||
sub_questions_cited_docs = state.cited_docs
|
||||
sub_questions_cited_docs = state.cited_documents
|
||||
all_original_question_documents = state.all_original_question_documents
|
||||
|
||||
consolidated_context_docs: list[InferenceSection] = sub_questions_cited_docs
|
||||
|
@@ -104,7 +104,7 @@ class RequireRefinedAnswerUpdate(LoggerUpdate):
|
||||
class DecompAnswersUpdate(LoggerUpdate):
|
||||
documents: Annotated[list[InferenceSection], dedup_inference_sections] = []
|
||||
context_documents: Annotated[list[InferenceSection], dedup_inference_sections] = []
|
||||
cited_docs: Annotated[
|
||||
cited_documents: Annotated[
|
||||
list[InferenceSection], dedup_inference_sections
|
||||
] = [] # cited docs from sub-answers are used for answer context
|
||||
sub_question_results: Annotated[
|
||||
|
Reference in New Issue
Block a user