stop infos when done streaming answers

This commit is contained in:
Evan Lohn
2025-01-24 15:03:10 -08:00
parent eea6f2749a
commit 2d8486bac4
4 changed files with 22 additions and 1 deletions

View File

@@ -43,6 +43,9 @@ from onyx.agents.agent_search.shared_graph_utils.prompts import (
SUB_QUESTION_ANSWER_TEMPLATE, SUB_QUESTION_ANSWER_TEMPLATE,
) )
from onyx.agents.agent_search.shared_graph_utils.prompts import UNKNOWN_ANSWER from onyx.agents.agent_search.shared_graph_utils.prompts import UNKNOWN_ANSWER
from onyx.agents.agent_search.shared_graph_utils.utils import (
dispatch_main_answer_stop_info,
)
from onyx.agents.agent_search.shared_graph_utils.utils import format_docs from onyx.agents.agent_search.shared_graph_utils.utils import format_docs
from onyx.agents.agent_search.shared_graph_utils.utils import get_persona_prompt from onyx.agents.agent_search.shared_graph_utils.utils import get_persona_prompt
from onyx.agents.agent_search.shared_graph_utils.utils import get_today_prompt from onyx.agents.agent_search.shared_graph_utils.utils import get_today_prompt
@@ -84,6 +87,7 @@ def generate_initial_answer(
answer_type="agent_level_answer", answer_type="agent_level_answer",
), ),
) )
dispatch_main_answer_stop_info(0)
answer = UNKNOWN_ANSWER answer = UNKNOWN_ANSWER
initial_agent_stats = InitialAgentResultStats( initial_agent_stats = InitialAgentResultStats(
@@ -209,6 +213,7 @@ def generate_initial_answer(
) )
streamed_tokens.append(content) streamed_tokens.append(content)
dispatch_main_answer_stop_info(0)
response = merge_content(*streamed_tokens) response = merge_content(*streamed_tokens)
answer = cast(str, response) answer = cast(str, response)

View File

@@ -40,6 +40,9 @@ from onyx.agents.agent_search.shared_graph_utils.prompts import (
SUB_QUESTION_ANSWER_TEMPLATE, SUB_QUESTION_ANSWER_TEMPLATE,
) )
from onyx.agents.agent_search.shared_graph_utils.prompts import UNKNOWN_ANSWER from onyx.agents.agent_search.shared_graph_utils.prompts import UNKNOWN_ANSWER
from onyx.agents.agent_search.shared_graph_utils.utils import (
dispatch_main_answer_stop_info,
)
from onyx.agents.agent_search.shared_graph_utils.utils import format_docs from onyx.agents.agent_search.shared_graph_utils.utils import format_docs
from onyx.agents.agent_search.shared_graph_utils.utils import get_persona_prompt from onyx.agents.agent_search.shared_graph_utils.utils import get_persona_prompt
from onyx.agents.agent_search.shared_graph_utils.utils import get_today_prompt from onyx.agents.agent_search.shared_graph_utils.utils import get_today_prompt
@@ -216,6 +219,7 @@ def generate_refined_answer(
) )
streamed_tokens.append(content) streamed_tokens.append(content)
dispatch_main_answer_stop_info(1)
response = merge_content(*streamed_tokens) response = merge_content(*streamed_tokens)
answer = cast(str, response) answer = cast(str, response)

View File

@@ -10,6 +10,7 @@ from typing import Any
from typing import cast from typing import cast
from uuid import UUID from uuid import UUID
from langchain_core.callbacks.manager import dispatch_custom_event
from langchain_core.messages import BaseMessage from langchain_core.messages import BaseMessage
from langchain_core.messages import HumanMessage from langchain_core.messages import HumanMessage
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
@@ -23,6 +24,8 @@ from onyx.chat.models import AnswerStyleConfig
from onyx.chat.models import CitationConfig from onyx.chat.models import CitationConfig
from onyx.chat.models import DocumentPruningConfig from onyx.chat.models import DocumentPruningConfig
from onyx.chat.models import PromptConfig from onyx.chat.models import PromptConfig
from onyx.chat.models import StreamStopInfo
from onyx.chat.models import StreamStopReason
from onyx.chat.prompt_builder.answer_prompt_builder import AnswerPromptBuilder from onyx.chat.prompt_builder.answer_prompt_builder import AnswerPromptBuilder
from onyx.configs.chat_configs import CHAT_TARGET_CHUNK_PERCENTAGE from onyx.configs.chat_configs import CHAT_TARGET_CHUNK_PERCENTAGE
from onyx.configs.chat_configs import MAX_CHUNKS_FED_TO_CHAT from onyx.configs.chat_configs import MAX_CHUNKS_FED_TO_CHAT
@@ -285,5 +288,14 @@ def dispatch_separated(
return streamed_tokens return streamed_tokens
def dispatch_main_answer_stop_info(level: int) -> None:
stop_event = StreamStopInfo(
stop_reason=StreamStopReason.FINISHED,
stream_type="main_answer",
level=level,
)
dispatch_custom_event("stream_finished", stop_event)
def get_today_prompt() -> str: def get_today_prompt() -> str:
return DATE_PROMPT.format(date=datetime.now().strftime("%A, %B %d, %Y")) return DATE_PROMPT.format(date=datetime.now().strftime("%A, %B %d, %Y"))

View File

@@ -70,7 +70,7 @@ class StreamStopReason(Enum):
class StreamStopInfo(BaseModel): class StreamStopInfo(BaseModel):
stop_reason: StreamStopReason stop_reason: StreamStopReason
stream_type: Literal["", "sub_questions", "sub_answer"] = "" stream_type: Literal["", "sub_questions", "sub_answer", "main_answer"] = ""
# used to identify the stream that was stopped for agent search # used to identify the stream that was stopped for agent search
level: int | None = None level: int | None = None
level_question_nr: int | None = None level_question_nr: int | None = None