mirror of
https://github.com/danswer-ai/danswer.git
synced 2025-06-25 15:30:59 +02:00
Agent search history displayed answer (#4052)
This commit is contained in:
parent
1690dc45ba
commit
e304ec4ab6
@ -190,6 +190,7 @@ def create_chat_chain(
|
|||||||
and previous_message.message_type == MessageType.ASSISTANT
|
and previous_message.message_type == MessageType.ASSISTANT
|
||||||
and mainline_messages
|
and mainline_messages
|
||||||
):
|
):
|
||||||
|
if current_message.refined_answer_improvement:
|
||||||
mainline_messages[-1] = current_message
|
mainline_messages[-1] = current_message
|
||||||
else:
|
else:
|
||||||
mainline_messages.append(current_message)
|
mainline_messages.append(current_message)
|
||||||
|
@ -142,6 +142,15 @@ class MessageResponseIDInfo(BaseModel):
|
|||||||
reserved_assistant_message_id: int
|
reserved_assistant_message_id: int
|
||||||
|
|
||||||
|
|
||||||
|
class AgentMessageIDInfo(BaseModel):
|
||||||
|
level: int
|
||||||
|
message_id: int
|
||||||
|
|
||||||
|
|
||||||
|
class AgenticMessageResponseIDInfo(BaseModel):
|
||||||
|
agentic_message_ids: list[AgentMessageIDInfo]
|
||||||
|
|
||||||
|
|
||||||
class StreamingError(BaseModel):
|
class StreamingError(BaseModel):
|
||||||
error: str
|
error: str
|
||||||
stack_trace: str | None = None
|
stack_trace: str | None = None
|
||||||
|
@ -11,6 +11,8 @@ from onyx.agents.agent_search.orchestration.nodes.call_tool import ToolCallExcep
|
|||||||
from onyx.chat.answer import Answer
|
from onyx.chat.answer import Answer
|
||||||
from onyx.chat.chat_utils import create_chat_chain
|
from onyx.chat.chat_utils import create_chat_chain
|
||||||
from onyx.chat.chat_utils import create_temporary_persona
|
from onyx.chat.chat_utils import create_temporary_persona
|
||||||
|
from onyx.chat.models import AgenticMessageResponseIDInfo
|
||||||
|
from onyx.chat.models import AgentMessageIDInfo
|
||||||
from onyx.chat.models import AgentSearchPacket
|
from onyx.chat.models import AgentSearchPacket
|
||||||
from onyx.chat.models import AllCitations
|
from onyx.chat.models import AllCitations
|
||||||
from onyx.chat.models import AnswerPostInfo
|
from onyx.chat.models import AnswerPostInfo
|
||||||
@ -308,6 +310,7 @@ ChatPacket = (
|
|||||||
| CustomToolResponse
|
| CustomToolResponse
|
||||||
| MessageSpecificCitations
|
| MessageSpecificCitations
|
||||||
| MessageResponseIDInfo
|
| MessageResponseIDInfo
|
||||||
|
| AgenticMessageResponseIDInfo
|
||||||
| StreamStopInfo
|
| StreamStopInfo
|
||||||
| AgentSearchPacket
|
| AgentSearchPacket
|
||||||
)
|
)
|
||||||
@ -1035,6 +1038,7 @@ def stream_chat_message_objects(
|
|||||||
next_level = 1
|
next_level = 1
|
||||||
prev_message = gen_ai_response_message
|
prev_message = gen_ai_response_message
|
||||||
agent_answers = answer.llm_answer_by_level()
|
agent_answers = answer.llm_answer_by_level()
|
||||||
|
agentic_message_ids = []
|
||||||
while next_level in agent_answers:
|
while next_level in agent_answers:
|
||||||
next_answer = agent_answers[next_level]
|
next_answer = agent_answers[next_level]
|
||||||
info = info_by_subq[
|
info = info_by_subq[
|
||||||
@ -1059,17 +1063,18 @@ def stream_chat_message_objects(
|
|||||||
refined_answer_improvement=refined_answer_improvement,
|
refined_answer_improvement=refined_answer_improvement,
|
||||||
is_agentic=True,
|
is_agentic=True,
|
||||||
)
|
)
|
||||||
|
agentic_message_ids.append(
|
||||||
|
AgentMessageIDInfo(level=next_level, message_id=next_answer_message.id)
|
||||||
|
)
|
||||||
next_level += 1
|
next_level += 1
|
||||||
prev_message = next_answer_message
|
prev_message = next_answer_message
|
||||||
|
|
||||||
logger.debug("Committing messages")
|
logger.debug("Committing messages")
|
||||||
db_session.commit() # actually save user / assistant message
|
db_session.commit() # actually save user / assistant message
|
||||||
|
|
||||||
msg_detail_response = translate_db_message_to_chat_message_detail(
|
yield AgenticMessageResponseIDInfo(agentic_message_ids=agentic_message_ids)
|
||||||
gen_ai_response_message
|
|
||||||
)
|
|
||||||
|
|
||||||
yield msg_detail_response
|
yield translate_db_message_to_chat_message_detail(gen_ai_response_message)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
error_msg = str(e)
|
error_msg = str(e)
|
||||||
logger.exception(error_msg)
|
logger.exception(error_msg)
|
||||||
|
@ -23,6 +23,7 @@ class PreviousMessage(BaseModel):
|
|||||||
message_type: MessageType
|
message_type: MessageType
|
||||||
files: list[InMemoryChatFile]
|
files: list[InMemoryChatFile]
|
||||||
tool_call: ToolCallFinalResult | None
|
tool_call: ToolCallFinalResult | None
|
||||||
|
refined_answer_improvement: bool | None
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_chat_message(
|
def from_chat_message(
|
||||||
@ -47,6 +48,7 @@ class PreviousMessage(BaseModel):
|
|||||||
)
|
)
|
||||||
if chat_message.tool_call
|
if chat_message.tool_call
|
||||||
else None,
|
else None,
|
||||||
|
refined_answer_improvement=chat_message.refined_answer_improvement,
|
||||||
)
|
)
|
||||||
|
|
||||||
def to_langchain_msg(self) -> BaseMessage:
|
def to_langchain_msg(self) -> BaseMessage:
|
||||||
|
@ -23,6 +23,7 @@ import {
|
|||||||
SubQuestionDetail,
|
SubQuestionDetail,
|
||||||
constructSubQuestions,
|
constructSubQuestions,
|
||||||
DocumentsResponse,
|
DocumentsResponse,
|
||||||
|
AgenticMessageResponseIDInfo,
|
||||||
} from "./interfaces";
|
} from "./interfaces";
|
||||||
|
|
||||||
import Prism from "prismjs";
|
import Prism from "prismjs";
|
||||||
@ -1280,6 +1281,8 @@ export function ChatPage({
|
|||||||
let toolCall: ToolCallMetadata | null = null;
|
let toolCall: ToolCallMetadata | null = null;
|
||||||
let isImprovement: boolean | undefined = undefined;
|
let isImprovement: boolean | undefined = undefined;
|
||||||
let isStreamingQuestions = true;
|
let isStreamingQuestions = true;
|
||||||
|
let includeAgentic = false;
|
||||||
|
let secondLevelMessageId: number | null = null;
|
||||||
|
|
||||||
let initialFetchDetails: null | {
|
let initialFetchDetails: null | {
|
||||||
user_message_id: number;
|
user_message_id: number;
|
||||||
@ -1417,6 +1420,17 @@ export function ChatPage({
|
|||||||
resetRegenerationState();
|
resetRegenerationState();
|
||||||
} else {
|
} else {
|
||||||
const { user_message_id, frozenMessageMap } = initialFetchDetails;
|
const { user_message_id, frozenMessageMap } = initialFetchDetails;
|
||||||
|
if (Object.hasOwn(packet, "agentic_message_ids")) {
|
||||||
|
const agenticMessageIds = (packet as AgenticMessageResponseIDInfo)
|
||||||
|
.agentic_message_ids;
|
||||||
|
const level1MessageId = agenticMessageIds.find(
|
||||||
|
(item) => item.level === 1
|
||||||
|
)?.message_id;
|
||||||
|
if (level1MessageId) {
|
||||||
|
secondLevelMessageId = level1MessageId;
|
||||||
|
includeAgentic = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
setChatState((prevState) => {
|
setChatState((prevState) => {
|
||||||
if (prevState.get(chatSessionIdRef.current!) === "loading") {
|
if (prevState.get(chatSessionIdRef.current!) === "loading") {
|
||||||
@ -1667,6 +1681,19 @@ export function ChatPage({
|
|||||||
second_level_generating: second_level_generating,
|
second_level_generating: second_level_generating,
|
||||||
agentic_docs: agenticDocs,
|
agentic_docs: agenticDocs,
|
||||||
},
|
},
|
||||||
|
...(includeAgentic
|
||||||
|
? [
|
||||||
|
{
|
||||||
|
messageId: secondLevelMessageId!,
|
||||||
|
message: second_level_answer,
|
||||||
|
type: "assistant" as const,
|
||||||
|
files: [],
|
||||||
|
toolCall: null,
|
||||||
|
parentMessageId:
|
||||||
|
initialFetchDetails.assistant_message_id!,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
: []),
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -155,6 +155,15 @@ export interface MessageResponseIDInfo {
|
|||||||
reserved_assistant_message_id: number;
|
reserved_assistant_message_id: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface AgentMessageIDInfo {
|
||||||
|
level: number;
|
||||||
|
message_id: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AgenticMessageResponseIDInfo {
|
||||||
|
agentic_message_ids: AgentMessageIDInfo[];
|
||||||
|
}
|
||||||
|
|
||||||
export interface DocumentsResponse {
|
export interface DocumentsResponse {
|
||||||
top_documents: OnyxDocument[];
|
top_documents: OnyxDocument[];
|
||||||
rephrased_query: string | null;
|
rephrased_query: string | null;
|
||||||
|
@ -25,6 +25,7 @@ import {
|
|||||||
RetrievalType,
|
RetrievalType,
|
||||||
StreamingError,
|
StreamingError,
|
||||||
ToolCallMetadata,
|
ToolCallMetadata,
|
||||||
|
AgenticMessageResponseIDInfo,
|
||||||
} from "./interfaces";
|
} from "./interfaces";
|
||||||
import { Persona } from "../admin/assistants/interfaces";
|
import { Persona } from "../admin/assistants/interfaces";
|
||||||
import { ReadonlyURLSearchParams } from "next/navigation";
|
import { ReadonlyURLSearchParams } from "next/navigation";
|
||||||
@ -154,7 +155,8 @@ export type PacketType =
|
|||||||
| AgentAnswerPiece
|
| AgentAnswerPiece
|
||||||
| SubQuestionPiece
|
| SubQuestionPiece
|
||||||
| ExtendedToolResponse
|
| ExtendedToolResponse
|
||||||
| RefinedAnswerImprovement;
|
| RefinedAnswerImprovement
|
||||||
|
| AgenticMessageResponseIDInfo;
|
||||||
|
|
||||||
export async function* sendMessage({
|
export async function* sendMessage({
|
||||||
regenerate,
|
regenerate,
|
||||||
|
Loading…
x
Reference in New Issue
Block a user