Agent search history displayed answer (#4052)

This commit is contained in:
evan-danswer 2025-02-19 15:52:16 -08:00 committed by GitHub
parent 1690dc45ba
commit e304ec4ab6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 61 additions and 6 deletions

View File

@ -190,7 +190,8 @@ def create_chat_chain(
and previous_message.message_type == MessageType.ASSISTANT
and mainline_messages
):
mainline_messages[-1] = current_message
if current_message.refined_answer_improvement:
mainline_messages[-1] = current_message
else:
mainline_messages.append(current_message)

View File

@ -142,6 +142,15 @@ class MessageResponseIDInfo(BaseModel):
reserved_assistant_message_id: int
class AgentMessageIDInfo(BaseModel):
level: int
message_id: int
class AgenticMessageResponseIDInfo(BaseModel):
agentic_message_ids: list[AgentMessageIDInfo]
class StreamingError(BaseModel):
error: str
stack_trace: str | None = None

View File

@ -11,6 +11,8 @@ from onyx.agents.agent_search.orchestration.nodes.call_tool import ToolCallExcep
from onyx.chat.answer import Answer
from onyx.chat.chat_utils import create_chat_chain
from onyx.chat.chat_utils import create_temporary_persona
from onyx.chat.models import AgenticMessageResponseIDInfo
from onyx.chat.models import AgentMessageIDInfo
from onyx.chat.models import AgentSearchPacket
from onyx.chat.models import AllCitations
from onyx.chat.models import AnswerPostInfo
@ -308,6 +310,7 @@ ChatPacket = (
| CustomToolResponse
| MessageSpecificCitations
| MessageResponseIDInfo
| AgenticMessageResponseIDInfo
| StreamStopInfo
| AgentSearchPacket
)
@ -1035,6 +1038,7 @@ def stream_chat_message_objects(
next_level = 1
prev_message = gen_ai_response_message
agent_answers = answer.llm_answer_by_level()
agentic_message_ids = []
while next_level in agent_answers:
next_answer = agent_answers[next_level]
info = info_by_subq[
@ -1059,17 +1063,18 @@ def stream_chat_message_objects(
refined_answer_improvement=refined_answer_improvement,
is_agentic=True,
)
agentic_message_ids.append(
AgentMessageIDInfo(level=next_level, message_id=next_answer_message.id)
)
next_level += 1
prev_message = next_answer_message
logger.debug("Committing messages")
db_session.commit() # actually save user / assistant message
msg_detail_response = translate_db_message_to_chat_message_detail(
gen_ai_response_message
)
yield AgenticMessageResponseIDInfo(agentic_message_ids=agentic_message_ids)
yield msg_detail_response
yield translate_db_message_to_chat_message_detail(gen_ai_response_message)
except Exception as e:
error_msg = str(e)
logger.exception(error_msg)

View File

@ -23,6 +23,7 @@ class PreviousMessage(BaseModel):
message_type: MessageType
files: list[InMemoryChatFile]
tool_call: ToolCallFinalResult | None
refined_answer_improvement: bool | None
@classmethod
def from_chat_message(
@ -47,6 +48,7 @@ class PreviousMessage(BaseModel):
)
if chat_message.tool_call
else None,
refined_answer_improvement=chat_message.refined_answer_improvement,
)
def to_langchain_msg(self) -> BaseMessage:

View File

@ -23,6 +23,7 @@ import {
SubQuestionDetail,
constructSubQuestions,
DocumentsResponse,
AgenticMessageResponseIDInfo,
} from "./interfaces";
import Prism from "prismjs";
@ -1280,6 +1281,8 @@ export function ChatPage({
let toolCall: ToolCallMetadata | null = null;
let isImprovement: boolean | undefined = undefined;
let isStreamingQuestions = true;
let includeAgentic = false;
let secondLevelMessageId: number | null = null;
let initialFetchDetails: null | {
user_message_id: number;
@ -1417,6 +1420,17 @@ export function ChatPage({
resetRegenerationState();
} else {
const { user_message_id, frozenMessageMap } = initialFetchDetails;
if (Object.hasOwn(packet, "agentic_message_ids")) {
const agenticMessageIds = (packet as AgenticMessageResponseIDInfo)
.agentic_message_ids;
const level1MessageId = agenticMessageIds.find(
(item) => item.level === 1
)?.message_id;
if (level1MessageId) {
secondLevelMessageId = level1MessageId;
includeAgentic = true;
}
}
setChatState((prevState) => {
if (prevState.get(chatSessionIdRef.current!) === "loading") {
@ -1667,6 +1681,19 @@ export function ChatPage({
second_level_generating: second_level_generating,
agentic_docs: agenticDocs,
},
...(includeAgentic
? [
{
messageId: secondLevelMessageId!,
message: second_level_answer,
type: "assistant" as const,
files: [],
toolCall: null,
parentMessageId:
initialFetchDetails.assistant_message_id!,
},
]
: []),
]);
}
}

View File

@ -155,6 +155,15 @@ export interface MessageResponseIDInfo {
reserved_assistant_message_id: number;
}
export interface AgentMessageIDInfo {
level: number;
message_id: number;
}
export interface AgenticMessageResponseIDInfo {
agentic_message_ids: AgentMessageIDInfo[];
}
export interface DocumentsResponse {
top_documents: OnyxDocument[];
rephrased_query: string | null;

View File

@ -25,6 +25,7 @@ import {
RetrievalType,
StreamingError,
ToolCallMetadata,
AgenticMessageResponseIDInfo,
} from "./interfaces";
import { Persona } from "../admin/assistants/interfaces";
import { ReadonlyURLSearchParams } from "next/navigation";
@ -154,7 +155,8 @@ export type PacketType =
| AgentAnswerPiece
| SubQuestionPiece
| ExtendedToolResponse
| RefinedAnswerImprovement;
| RefinedAnswerImprovement
| AgenticMessageResponseIDInfo;
export async function* sendMessage({
regenerate,