From 470c4d15dd89937f1a604963d41938515f463e13 Mon Sep 17 00:00:00 2001 From: Evan Lohn Date: Wed, 22 Jan 2025 16:44:47 -0800 Subject: [PATCH] reworked history messages in agent config --- .../main/nodes/agent_path_decision.py | 2 +- .../main/nodes/generate_initial_answer.py | 2 +- .../main/nodes/generate_refined_answer.py | 2 +- .../nodes/initial_sub_question_creation.py | 2 +- .../nodes/refined_sub_question_creation.py | 2 +- .../shared_graph_utils/agent_prompt_ops.py | 39 ++++++++++--------- 6 files changed, 25 insertions(+), 24 deletions(-) diff --git a/backend/onyx/agents/agent_search/deep_search_a/main/nodes/agent_path_decision.py b/backend/onyx/agents/agent_search/deep_search_a/main/nodes/agent_path_decision.py index 639464e83c99..fd487f8c414f 100644 --- a/backend/onyx/agents/agent_search/deep_search_a/main/nodes/agent_path_decision.py +++ b/backend/onyx/agents/agent_search/deep_search_a/main/nodes/agent_path_decision.py @@ -33,7 +33,7 @@ def agent_path_decision(state: MainState, config: RunnableConfig) -> RoutingDeci agent_a_config.perform_initial_search_path_decision ) - history = build_history_prompt(config["metadata"]["config"].message_history) + history = build_history_prompt(agent_a_config.prompt_builder) logger.debug(f"--------{now_start}--------DECIDING TO SEARCH OR GO TO LLM---") diff --git a/backend/onyx/agents/agent_search/deep_search_a/main/nodes/generate_initial_answer.py b/backend/onyx/agents/agent_search/deep_search_a/main/nodes/generate_initial_answer.py index 5020f456f853..b77f2ecafa21 100644 --- a/backend/onyx/agents/agent_search/deep_search_a/main/nodes/generate_initial_answer.py +++ b/backend/onyx/agents/agent_search/deep_search_a/main/nodes/generate_initial_answer.py @@ -62,7 +62,7 @@ def generate_initial_answer( question = agent_a_config.search_request.query persona_prompt = get_persona_prompt(agent_a_config.search_request.persona) - history = build_history_prompt(agent_a_config.message_history) + history = build_history_prompt(agent_a_config.prompt_builder) sub_question_docs = state.documents all_original_question_documents = state.all_original_question_documents diff --git a/backend/onyx/agents/agent_search/deep_search_a/main/nodes/generate_refined_answer.py b/backend/onyx/agents/agent_search/deep_search_a/main/nodes/generate_refined_answer.py index b7dc5cd73c75..f071716dffa9 100644 --- a/backend/onyx/agents/agent_search/deep_search_a/main/nodes/generate_refined_answer.py +++ b/backend/onyx/agents/agent_search/deep_search_a/main/nodes/generate_refined_answer.py @@ -59,7 +59,7 @@ def generate_refined_answer( question = agent_a_config.search_request.query persona_prompt = get_persona_prompt(agent_a_config.search_request.persona) - history = build_history_prompt(agent_a_config.message_history) + history = build_history_prompt(agent_a_config.prompt_builder) initial_documents = state.documents revised_documents = state.refined_documents diff --git a/backend/onyx/agents/agent_search/deep_search_a/main/nodes/initial_sub_question_creation.py b/backend/onyx/agents/agent_search/deep_search_a/main/nodes/initial_sub_question_creation.py index 10069425cf0a..5402f97d4769 100644 --- a/backend/onyx/agents/agent_search/deep_search_a/main/nodes/initial_sub_question_creation.py +++ b/backend/onyx/agents/agent_search/deep_search_a/main/nodes/initial_sub_question_creation.py @@ -50,7 +50,7 @@ def initial_sub_question_creation( perform_initial_search_path_decision = ( agent_a_config.perform_initial_search_path_decision ) - history = build_history_prompt(agent_a_config.message_history) + history = build_history_prompt(agent_a_config.prompt_builder) # Use the initial search results to inform the decomposition sample_doc_str = state.sample_doc_str if hasattr(state, "sample_doc_str") else "" diff --git a/backend/onyx/agents/agent_search/deep_search_a/main/nodes/refined_sub_question_creation.py b/backend/onyx/agents/agent_search/deep_search_a/main/nodes/refined_sub_question_creation.py index 5b3cde984974..5f80d1cd56cd 100644 --- a/backend/onyx/agents/agent_search/deep_search_a/main/nodes/refined_sub_question_creation.py +++ b/backend/onyx/agents/agent_search/deep_search_a/main/nodes/refined_sub_question_creation.py @@ -50,7 +50,7 @@ def refined_sub_question_creation( question = agent_a_config.search_request.query base_answer = state.initial_answer - history = build_history_prompt(agent_a_config.message_history) + history = build_history_prompt(agent_a_config.prompt_builder) # get the entity term extraction dict and properly format it entity_retlation_term_extractions = state.entity_retlation_term_extractions diff --git a/backend/onyx/agents/agent_search/shared_graph_utils/agent_prompt_ops.py b/backend/onyx/agents/agent_search/shared_graph_utils/agent_prompt_ops.py index b1741a167199..b97af9246678 100644 --- a/backend/onyx/agents/agent_search/shared_graph_utils/agent_prompt_ops.py +++ b/backend/onyx/agents/agent_search/shared_graph_utils/agent_prompt_ops.py @@ -5,9 +5,9 @@ from langchain_core.messages.tool import ToolMessage from onyx.agents.agent_search.shared_graph_utils.prompts import BASE_RAG_PROMPT_v2 from onyx.agents.agent_search.shared_graph_utils.prompts import HISTORY_PROMPT +from onyx.chat.prompt_builder.answer_prompt_builder import AnswerPromptBuilder from onyx.context.search.models import InferenceSection from onyx.llm.interfaces import LLMConfig -from onyx.llm.models import PreviousMessage from onyx.llm.utils import get_max_input_tokens from onyx.natural_language_processing.utils import get_tokenizer from onyx.natural_language_processing.utils import tokenizer_trim_content @@ -67,23 +67,24 @@ def trim_prompt_piece(config: LLMConfig, prompt_piece: str, reserved_str: str) - ) -def build_history_prompt(message_history: list[PreviousMessage] | None) -> str: - if message_history is None: +def build_history_prompt(prompt_builder: AnswerPromptBuilder | None) -> str: + if prompt_builder is None: return "" - history = "" - previous_message_type = None - for message in message_history: - if "user" in message.message_type: - history += f"User: {message.message}\n" - previous_message_type = "user" - elif "assistant" in message.message_type: - # only use the initial agent answer for the history - if previous_message_type != "assistant": - history += f"You/Agent: {message.message}\n" - previous_message_type = "assistant" - else: - continue - if len(history) > 0: - return HISTORY_PROMPT.format(history=history) + + if prompt_builder.single_message_history is not None: + history = prompt_builder.single_message_history else: - return "" + history = "" + previous_message_type = None + for message in prompt_builder.raw_message_history: + if "user" in message.message_type: + history += f"User: {message.message}\n" + previous_message_type = "user" + elif "assistant" in message.message_type: + # only use the initial agent answer for the history + if previous_message_type != "assistant": + history += f"You/Agent: {message.message}\n" + previous_message_type = "assistant" + else: + continue + return HISTORY_PROMPT.format(history=history) if history else ""