mirror of
https://github.com/danswer-ai/danswer.git
synced 2025-09-19 20:24:32 +02:00
Add stop generating
functionality (#2100)
* functional types + sidebar * remove commits * remove logs * functional rework of temporary user/assistant ID * robustify switching * remove logs * typing * robustify frontend handling * cleaner loop + data persistence * migrate to streaming response * formatting * add new loading state to prevent collisions * add `ChatState` for more robust handling * remove logs * robustify typing * unnecessary list removed * robustify * remove log * remove false comment * slightly more robust chat state * update utility + copy * improve clarity + new SSE handling utility function * remove comments * clearer * add back stack trace detail * cleaner messages * clean final message handling * tiny formatting (remove newline) * add synchronous wrapper to avoid hampering main event loop * update typing * include logs * slightly more specific logs * add `critical` error just in case
This commit is contained in:
@@ -12,6 +12,7 @@ import {
|
||||
FileDescriptor,
|
||||
ImageGenerationDisplay,
|
||||
Message,
|
||||
MessageResponseIDInfo,
|
||||
RetrievalType,
|
||||
StreamingError,
|
||||
ToolCallMetadata,
|
||||
@@ -50,7 +51,7 @@ import { SEARCH_PARAM_NAMES, shouldSubmitOnLoad } from "./searchParams";
|
||||
import { useDocumentSelection } from "./useDocumentSelection";
|
||||
import { LlmOverride, useFilters, useLlmOverride } from "@/lib/hooks";
|
||||
import { computeAvailableFilters } from "@/lib/filters";
|
||||
import { FeedbackType } from "./types";
|
||||
import { ChatState, FeedbackType } from "./types";
|
||||
import { DocumentSidebar } from "./documentSidebar/DocumentSidebar";
|
||||
import { DanswerInitializingLoader } from "@/components/DanswerInitializingLoader";
|
||||
import { FeedbackModal } from "./modal/FeedbackModal";
|
||||
@@ -211,6 +212,27 @@ export function ChatPage({
|
||||
}
|
||||
}, [liveAssistant]);
|
||||
|
||||
const stopGeneration = () => {
|
||||
if (abortController) {
|
||||
abortController.abort();
|
||||
}
|
||||
const lastMessage = messageHistory[messageHistory.length - 1];
|
||||
if (
|
||||
lastMessage &&
|
||||
lastMessage.type === "assistant" &&
|
||||
lastMessage.toolCalls[0] &&
|
||||
lastMessage.toolCalls[0].tool_result === undefined
|
||||
) {
|
||||
const newCompleteMessageMap = new Map(completeMessageDetail.messageMap);
|
||||
const updatedMessage = { ...lastMessage, toolCalls: [] };
|
||||
newCompleteMessageMap.set(lastMessage.messageId, updatedMessage);
|
||||
setCompleteMessageDetail({
|
||||
sessionId: completeMessageDetail.sessionId,
|
||||
messageMap: newCompleteMessageMap,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// this is for "@"ing assistants
|
||||
|
||||
// this is used to track which assistant is being used to generate the current message
|
||||
@@ -413,6 +435,7 @@ export function ChatPage({
|
||||
);
|
||||
messages[0].parentMessageId = systemMessageId;
|
||||
}
|
||||
|
||||
messages.forEach((message) => {
|
||||
const idToReplace = replacementsMap?.get(message.messageId);
|
||||
if (idToReplace) {
|
||||
@@ -428,7 +451,6 @@ export function ChatPage({
|
||||
}
|
||||
newCompleteMessageMap.set(message.messageId, message);
|
||||
});
|
||||
|
||||
// if specified, make these new message the latest of the current message chain
|
||||
if (makeLatestChildMessage) {
|
||||
const currentMessageChain = buildLatestMessageChain(
|
||||
@@ -452,7 +474,8 @@ export function ChatPage({
|
||||
const messageHistory = buildLatestMessageChain(
|
||||
completeMessageDetail.messageMap
|
||||
);
|
||||
const [isStreaming, setIsStreaming] = useState(false);
|
||||
const [submittedMessage, setSubmittedMessage] = useState("");
|
||||
const [chatState, setChatState] = useState<ChatState>("input");
|
||||
const [abortController, setAbortController] =
|
||||
useState<AbortController | null>(null);
|
||||
|
||||
@@ -663,13 +686,11 @@ export function ChatPage({
|
||||
params: any
|
||||
) {
|
||||
try {
|
||||
for await (const packetBunch of sendMessage(params)) {
|
||||
for await (const packet of sendMessage(params)) {
|
||||
if (params.signal?.aborted) {
|
||||
throw new Error("AbortError");
|
||||
}
|
||||
for (const packet of packetBunch) {
|
||||
stack.push(packet);
|
||||
}
|
||||
stack.push(packet);
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
@@ -709,7 +730,7 @@ export function ChatPage({
|
||||
isSeededChat?: boolean;
|
||||
alternativeAssistantOverride?: Persona | null;
|
||||
} = {}) => {
|
||||
if (isStreaming) {
|
||||
if (chatState != "input") {
|
||||
setPopup({
|
||||
message: "Please wait for the response to complete",
|
||||
type: "error",
|
||||
@@ -718,6 +739,7 @@ export function ChatPage({
|
||||
return;
|
||||
}
|
||||
|
||||
setChatState("loading");
|
||||
const controller = new AbortController();
|
||||
setAbortController(controller);
|
||||
|
||||
@@ -757,13 +779,15 @@ export function ChatPage({
|
||||
"Failed to re-send message - please refresh the page and try again.",
|
||||
type: "error",
|
||||
});
|
||||
setChatState("input");
|
||||
return;
|
||||
}
|
||||
|
||||
let currMessage = messageToResend ? messageToResend.message : message;
|
||||
if (messageOverride) {
|
||||
currMessage = messageOverride;
|
||||
}
|
||||
|
||||
setSubmittedMessage(currMessage);
|
||||
const currMessageHistory =
|
||||
messageToResendIndex !== null
|
||||
? messageHistory.slice(0, messageToResendIndex)
|
||||
@@ -775,39 +799,6 @@ export function ChatPage({
|
||||
: null) ||
|
||||
(messageMap.size === 1 ? Array.from(messageMap.values())[0] : null);
|
||||
|
||||
// if we're resending, set the parent's child to null
|
||||
// we will use tempMessages until the regenerated message is complete
|
||||
const messageUpdates: Message[] = [
|
||||
{
|
||||
messageId: TEMP_USER_MESSAGE_ID,
|
||||
message: currMessage,
|
||||
type: "user",
|
||||
files: currentMessageFiles,
|
||||
toolCalls: [],
|
||||
parentMessageId: parentMessage?.messageId || null,
|
||||
},
|
||||
];
|
||||
if (parentMessage) {
|
||||
messageUpdates.push({
|
||||
...parentMessage,
|
||||
childrenMessageIds: (parentMessage.childrenMessageIds || []).concat([
|
||||
TEMP_USER_MESSAGE_ID,
|
||||
]),
|
||||
latestChildMessageId: TEMP_USER_MESSAGE_ID,
|
||||
});
|
||||
}
|
||||
const { messageMap: frozenMessageMap, sessionId: frozenSessionId } =
|
||||
upsertToCompleteMessageMap({
|
||||
messages: messageUpdates,
|
||||
chatSessionId: currChatSessionId,
|
||||
});
|
||||
|
||||
// on initial message send, we insert a dummy system message
|
||||
// set this as the parent here if no parent is set
|
||||
if (!parentMessage && frozenMessageMap.size === 2) {
|
||||
parentMessage = frozenMessageMap.get(SYSTEM_MESSAGE_ID) || null;
|
||||
}
|
||||
|
||||
const currentAssistantId = alternativeAssistantOverride
|
||||
? alternativeAssistantOverride.id
|
||||
: alternativeAssistant
|
||||
@@ -815,8 +806,8 @@ export function ChatPage({
|
||||
: liveAssistant.id;
|
||||
|
||||
resetInputBar();
|
||||
let messageUpdates: Message[] | null = null;
|
||||
|
||||
setIsStreaming(true);
|
||||
let answer = "";
|
||||
let query: string | null = null;
|
||||
let retrievalType: RetrievalType =
|
||||
@@ -831,6 +822,13 @@ export function ChatPage({
|
||||
let finalMessage: BackendMessage | null = null;
|
||||
let toolCalls: ToolCallMetadata[] = [];
|
||||
|
||||
let initialFetchDetails: null | {
|
||||
user_message_id: number;
|
||||
assistant_message_id: number;
|
||||
frozenMessageMap: Map<number, Message>;
|
||||
frozenSessionId: number | null;
|
||||
} = null;
|
||||
|
||||
try {
|
||||
const lastSuccessfulMessageId =
|
||||
getLastSuccessfulMessageId(currMessageHistory);
|
||||
@@ -838,7 +836,6 @@ export function ChatPage({
|
||||
const stack = new CurrentMessageFIFO();
|
||||
updateCurrentMessageFIFO(stack, {
|
||||
signal: controller.signal, // Add this line
|
||||
|
||||
message: currMessage,
|
||||
alternateAssistantId: currentAssistantId,
|
||||
fileDescriptors: currentMessageFiles,
|
||||
@@ -875,20 +872,6 @@ export function ChatPage({
|
||||
useExistingUserMessage: isSeededChat,
|
||||
});
|
||||
|
||||
const updateFn = (messages: Message[]) => {
|
||||
const replacementsMap = finalMessage
|
||||
? new Map([
|
||||
[messages[0].messageId, TEMP_USER_MESSAGE_ID],
|
||||
[messages[1].messageId, TEMP_ASSISTANT_MESSAGE_ID],
|
||||
] as [number, number][])
|
||||
: null;
|
||||
upsertToCompleteMessageMap({
|
||||
messages: messages,
|
||||
replacementsMap: replacementsMap,
|
||||
completeMessageMapOverride: frozenMessageMap,
|
||||
chatSessionId: frozenSessionId!,
|
||||
});
|
||||
};
|
||||
const delay = (ms: number) => {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
};
|
||||
@@ -899,8 +882,71 @@ export function ChatPage({
|
||||
|
||||
if (!stack.isEmpty()) {
|
||||
const packet = stack.nextPacket();
|
||||
console.log(packet);
|
||||
if (packet) {
|
||||
if (!packet) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!initialFetchDetails) {
|
||||
if (!Object.hasOwn(packet, "user_message_id")) {
|
||||
console.error(
|
||||
"First packet should contain message response info "
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
const messageResponseIDInfo = packet as MessageResponseIDInfo;
|
||||
|
||||
const user_message_id = messageResponseIDInfo.user_message_id!;
|
||||
const assistant_message_id =
|
||||
messageResponseIDInfo.reserved_assistant_message_id;
|
||||
|
||||
// we will use tempMessages until the regenerated message is complete
|
||||
messageUpdates = [
|
||||
{
|
||||
messageId: user_message_id,
|
||||
message: currMessage,
|
||||
type: "user",
|
||||
files: currentMessageFiles,
|
||||
toolCalls: [],
|
||||
parentMessageId: parentMessage?.messageId || null,
|
||||
},
|
||||
];
|
||||
if (parentMessage) {
|
||||
messageUpdates.push({
|
||||
...parentMessage,
|
||||
childrenMessageIds: (
|
||||
parentMessage.childrenMessageIds || []
|
||||
).concat([user_message_id]),
|
||||
latestChildMessageId: user_message_id,
|
||||
});
|
||||
}
|
||||
|
||||
const {
|
||||
messageMap: currentFrozenMessageMap,
|
||||
sessionId: currentFrozenSessionId,
|
||||
} = upsertToCompleteMessageMap({
|
||||
messages: messageUpdates,
|
||||
chatSessionId: currChatSessionId,
|
||||
});
|
||||
|
||||
const frozenMessageMap = currentFrozenMessageMap;
|
||||
const frozenSessionId = currentFrozenSessionId;
|
||||
initialFetchDetails = {
|
||||
frozenMessageMap,
|
||||
frozenSessionId,
|
||||
assistant_message_id,
|
||||
user_message_id,
|
||||
};
|
||||
} else {
|
||||
const { user_message_id, frozenMessageMap, frozenSessionId } =
|
||||
initialFetchDetails;
|
||||
setChatState((chatState) => {
|
||||
if (chatState == "loading") {
|
||||
return "streaming";
|
||||
}
|
||||
return chatState;
|
||||
});
|
||||
|
||||
if (Object.hasOwn(packet, "answer_piece")) {
|
||||
answer += (packet as AnswerPiecePacket).answer_piece;
|
||||
} else if (Object.hasOwn(packet, "top_documents")) {
|
||||
@@ -910,7 +956,7 @@ export function ChatPage({
|
||||
if (documents && documents.length > 0) {
|
||||
// point to the latest message (we don't know the messageId yet, which is why
|
||||
// we have to use -1)
|
||||
setSelectedMessageForDocDisplay(TEMP_USER_MESSAGE_ID);
|
||||
setSelectedMessageForDocDisplay(user_message_id);
|
||||
}
|
||||
} else if (Object.hasOwn(packet, "tool_name")) {
|
||||
toolCalls = [
|
||||
@@ -920,6 +966,14 @@ export function ChatPage({
|
||||
tool_result: (packet as ToolCallMetadata).tool_result,
|
||||
},
|
||||
];
|
||||
if (
|
||||
!toolCalls[0].tool_result ||
|
||||
toolCalls[0].tool_result == undefined
|
||||
) {
|
||||
setChatState("toolBuilding");
|
||||
} else {
|
||||
setChatState("streaming");
|
||||
}
|
||||
} else if (Object.hasOwn(packet, "file_ids")) {
|
||||
aiMessageImages = (packet as ImageGenerationDisplay).file_ids.map(
|
||||
(fileId) => {
|
||||
@@ -936,23 +990,34 @@ export function ChatPage({
|
||||
finalMessage = packet as BackendMessage;
|
||||
}
|
||||
|
||||
const newUserMessageId =
|
||||
finalMessage?.parent_message || TEMP_USER_MESSAGE_ID;
|
||||
const newAssistantMessageId =
|
||||
finalMessage?.message_id || TEMP_ASSISTANT_MESSAGE_ID;
|
||||
// on initial message send, we insert a dummy system message
|
||||
// set this as the parent here if no parent is set
|
||||
parentMessage =
|
||||
parentMessage || frozenMessageMap?.get(SYSTEM_MESSAGE_ID)!;
|
||||
|
||||
const updateFn = (messages: Message[]) => {
|
||||
const replacementsMap = null;
|
||||
upsertToCompleteMessageMap({
|
||||
messages: messages,
|
||||
replacementsMap: replacementsMap,
|
||||
completeMessageMapOverride: frozenMessageMap,
|
||||
chatSessionId: frozenSessionId!,
|
||||
});
|
||||
};
|
||||
|
||||
updateFn([
|
||||
{
|
||||
messageId: newUserMessageId,
|
||||
messageId: initialFetchDetails.user_message_id!,
|
||||
message: currMessage,
|
||||
type: "user",
|
||||
files: currentMessageFiles,
|
||||
toolCalls: [],
|
||||
parentMessageId: parentMessage?.messageId || null,
|
||||
childrenMessageIds: [newAssistantMessageId],
|
||||
latestChildMessageId: newAssistantMessageId,
|
||||
parentMessageId: error ? null : lastSuccessfulMessageId,
|
||||
childrenMessageIds: [initialFetchDetails.assistant_message_id!],
|
||||
latestChildMessageId: initialFetchDetails.assistant_message_id,
|
||||
},
|
||||
{
|
||||
messageId: newAssistantMessageId,
|
||||
messageId: initialFetchDetails.assistant_message_id!,
|
||||
message: error || answer,
|
||||
type: error ? "error" : "assistant",
|
||||
retrievalType,
|
||||
@@ -962,7 +1027,7 @@ export function ChatPage({
|
||||
citations: finalMessage?.citations || {},
|
||||
files: finalMessage?.files || aiMessageImages || [],
|
||||
toolCalls: finalMessage?.tool_calls || toolCalls,
|
||||
parentMessageId: newUserMessageId,
|
||||
parentMessageId: initialFetchDetails.user_message_id,
|
||||
alternateAssistantID: alternativeAssistant?.id,
|
||||
stackTrace: stackTrace,
|
||||
},
|
||||
@@ -975,7 +1040,8 @@ export function ChatPage({
|
||||
upsertToCompleteMessageMap({
|
||||
messages: [
|
||||
{
|
||||
messageId: TEMP_USER_MESSAGE_ID,
|
||||
messageId:
|
||||
initialFetchDetails?.user_message_id || TEMP_USER_MESSAGE_ID,
|
||||
message: currMessage,
|
||||
type: "user",
|
||||
files: currentMessageFiles,
|
||||
@@ -983,24 +1049,28 @@ export function ChatPage({
|
||||
parentMessageId: parentMessage?.messageId || SYSTEM_MESSAGE_ID,
|
||||
},
|
||||
{
|
||||
messageId: TEMP_ASSISTANT_MESSAGE_ID,
|
||||
messageId:
|
||||
initialFetchDetails?.assistant_message_id ||
|
||||
TEMP_ASSISTANT_MESSAGE_ID,
|
||||
message: errorMsg,
|
||||
type: "error",
|
||||
files: aiMessageImages || [],
|
||||
toolCalls: [],
|
||||
parentMessageId: TEMP_USER_MESSAGE_ID,
|
||||
parentMessageId:
|
||||
initialFetchDetails?.user_message_id || TEMP_USER_MESSAGE_ID,
|
||||
},
|
||||
],
|
||||
completeMessageMapOverride: frozenMessageMap,
|
||||
completeMessageMapOverride: completeMessageDetail.messageMap,
|
||||
});
|
||||
}
|
||||
|
||||
setIsStreaming(false);
|
||||
setChatState("input");
|
||||
if (isNewSession) {
|
||||
if (finalMessage) {
|
||||
setSelectedMessageForDocDisplay(finalMessage.message_id);
|
||||
}
|
||||
|
||||
if (!searchParamBasedChatSessionName) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
await nameChatSession(currChatSessionId, currMessage);
|
||||
}
|
||||
|
||||
@@ -1060,8 +1130,8 @@ export function ChatPage({
|
||||
const onAssistantChange = (assistant: Persona | null) => {
|
||||
if (assistant && assistant.id !== liveAssistant.id) {
|
||||
// Abort the ongoing stream if it exists
|
||||
if (abortController && isStreaming) {
|
||||
abortController.abort();
|
||||
if (chatState != "input") {
|
||||
stopGeneration();
|
||||
resetInputBar();
|
||||
}
|
||||
|
||||
@@ -1163,7 +1233,7 @@ export function ChatPage({
|
||||
});
|
||||
|
||||
useScrollonStream({
|
||||
isStreaming,
|
||||
chatState,
|
||||
scrollableDivRef,
|
||||
scrollDist,
|
||||
endDivRef,
|
||||
@@ -1334,6 +1404,7 @@ export function ChatPage({
|
||||
>
|
||||
<div className="w-full relative">
|
||||
<HistorySidebar
|
||||
stopGenerating={stopGeneration}
|
||||
reset={() => setMessage("")}
|
||||
page="chat"
|
||||
ref={innerSidebarElementRef}
|
||||
@@ -1407,7 +1478,7 @@ export function ChatPage({
|
||||
|
||||
{messageHistory.length === 0 &&
|
||||
!isFetchingChatMessages &&
|
||||
!isStreaming && (
|
||||
chatState == "input" && (
|
||||
<ChatIntro
|
||||
availableSources={finalAvailableSources}
|
||||
selectedPersona={liveAssistant}
|
||||
@@ -1431,6 +1502,7 @@ export function ChatPage({
|
||||
return (
|
||||
<div key={messageReactComponentKey}>
|
||||
<HumanMessage
|
||||
stopGenerating={stopGeneration}
|
||||
content={message.message}
|
||||
files={message.files}
|
||||
messageId={message.messageId}
|
||||
@@ -1483,9 +1555,7 @@ export function ChatPage({
|
||||
(selectedMessageForDocDisplay !== null &&
|
||||
selectedMessageForDocDisplay ===
|
||||
message.messageId) ||
|
||||
(selectedMessageForDocDisplay ===
|
||||
TEMP_USER_MESSAGE_ID &&
|
||||
i === messageHistory.length - 1);
|
||||
i === messageHistory.length - 1;
|
||||
const previousMessage =
|
||||
i !== 0 ? messageHistory[i - 1] : null;
|
||||
|
||||
@@ -1534,7 +1604,8 @@ export function ChatPage({
|
||||
}
|
||||
isComplete={
|
||||
i !== messageHistory.length - 1 ||
|
||||
!isStreaming
|
||||
(chatState != "streaming" &&
|
||||
chatState != "toolBuilding")
|
||||
}
|
||||
hasDocs={
|
||||
(message.documents &&
|
||||
@@ -1542,7 +1613,7 @@ export function ChatPage({
|
||||
}
|
||||
handleFeedback={
|
||||
i === messageHistory.length - 1 &&
|
||||
isStreaming
|
||||
chatState != "input"
|
||||
? undefined
|
||||
: (feedbackType) =>
|
||||
setCurrentFeedback([
|
||||
@@ -1552,7 +1623,7 @@ export function ChatPage({
|
||||
}
|
||||
handleSearchQueryEdit={
|
||||
i === messageHistory.length - 1 &&
|
||||
!isStreaming
|
||||
chatState == "input"
|
||||
? (newQuery) => {
|
||||
if (!previousMessage) {
|
||||
setPopup({
|
||||
@@ -1659,34 +1730,39 @@ export function ChatPage({
|
||||
);
|
||||
}
|
||||
})}
|
||||
{isStreaming &&
|
||||
messageHistory.length > 0 &&
|
||||
messageHistory[messageHistory.length - 1].type ===
|
||||
{chatState == "loading" &&
|
||||
messageHistory[messageHistory.length - 1]?.type !=
|
||||
"user" && (
|
||||
<div
|
||||
key={`${messageHistory.length}-${chatSessionIdRef.current}`}
|
||||
>
|
||||
<AIMessage
|
||||
currentPersona={liveAssistant}
|
||||
alternativeAssistant={
|
||||
alternativeGeneratingAssistant ??
|
||||
alternativeAssistant
|
||||
}
|
||||
messageId={null}
|
||||
personaName={liveAssistant.name}
|
||||
content={
|
||||
<div
|
||||
key={"Generating"}
|
||||
className="mr-auto relative inline-block"
|
||||
>
|
||||
<span className="text-sm loading-text">
|
||||
Thinking...
|
||||
</span>
|
||||
</div>
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
<HumanMessage
|
||||
messageId={-1}
|
||||
content={submittedMessage}
|
||||
/>
|
||||
)}
|
||||
{chatState == "loading" && (
|
||||
<div
|
||||
key={`${messageHistory.length}-${chatSessionIdRef.current}`}
|
||||
>
|
||||
<AIMessage
|
||||
currentPersona={liveAssistant}
|
||||
alternativeAssistant={
|
||||
alternativeGeneratingAssistant ??
|
||||
alternativeAssistant
|
||||
}
|
||||
messageId={null}
|
||||
personaName={liveAssistant.name}
|
||||
content={
|
||||
<div
|
||||
key={"Generating"}
|
||||
className="mr-auto relative inline-block"
|
||||
>
|
||||
<span className="text-sm loading-text">
|
||||
Thinking...
|
||||
</span>
|
||||
</div>
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{currentPersona &&
|
||||
currentPersona.starter_messages &&
|
||||
@@ -1748,6 +1824,8 @@ export function ChatPage({
|
||||
)}
|
||||
|
||||
<ChatInputBar
|
||||
chatState={chatState}
|
||||
stopGenerating={stopGeneration}
|
||||
openModelSettings={() => setSettingsToggled(true)}
|
||||
inputPrompts={userInputPrompts}
|
||||
showDocs={() => setDocumentSelection(true)}
|
||||
@@ -1762,7 +1840,6 @@ export function ChatPage({
|
||||
message={message}
|
||||
setMessage={setMessage}
|
||||
onSubmit={onSubmit}
|
||||
isStreaming={isStreaming}
|
||||
filterManager={filterManager}
|
||||
llmOverrideManager={llmOverrideManager}
|
||||
files={currentMessageFiles}
|
||||
|
@@ -21,6 +21,7 @@ import {
|
||||
CpuIconSkeleton,
|
||||
FileIcon,
|
||||
SendIcon,
|
||||
StopGeneratingIcon,
|
||||
} from "@/components/icons/icons";
|
||||
import { IconType } from "react-icons";
|
||||
import Popup from "../../../components/popup/Popup";
|
||||
@@ -31,6 +32,9 @@ import { AssistantIcon } from "@/components/assistants/AssistantIcon";
|
||||
import { Tooltip } from "@/components/tooltip/Tooltip";
|
||||
import { Hoverable } from "@/components/Hoverable";
|
||||
import { SettingsContext } from "@/components/settings/SettingsProvider";
|
||||
import { StopCircle } from "@phosphor-icons/react/dist/ssr";
|
||||
import { Square } from "@phosphor-icons/react";
|
||||
import { ChatState } from "../types";
|
||||
const MAX_INPUT_HEIGHT = 200;
|
||||
|
||||
export function ChatInputBar({
|
||||
@@ -39,10 +43,11 @@ export function ChatInputBar({
|
||||
selectedDocuments,
|
||||
message,
|
||||
setMessage,
|
||||
stopGenerating,
|
||||
onSubmit,
|
||||
isStreaming,
|
||||
filterManager,
|
||||
llmOverrideManager,
|
||||
chatState,
|
||||
|
||||
// assistants
|
||||
selectedAssistant,
|
||||
@@ -59,6 +64,8 @@ export function ChatInputBar({
|
||||
inputPrompts,
|
||||
}: {
|
||||
openModelSettings: () => void;
|
||||
chatState: ChatState;
|
||||
stopGenerating: () => void;
|
||||
showDocs: () => void;
|
||||
selectedDocuments: DanswerDocument[];
|
||||
assistantOptions: Persona[];
|
||||
@@ -68,7 +75,6 @@ export function ChatInputBar({
|
||||
message: string;
|
||||
setMessage: (message: string) => void;
|
||||
onSubmit: () => void;
|
||||
isStreaming: boolean;
|
||||
filterManager: FilterManager;
|
||||
llmOverrideManager: LlmOverrideManager;
|
||||
selectedAssistant: Persona;
|
||||
@@ -597,24 +603,38 @@ export function ChatInputBar({
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="absolute bottom-2.5 mobile:right-4 desktop:right-10">
|
||||
<div
|
||||
className="cursor-pointer"
|
||||
onClick={() => {
|
||||
if (message) {
|
||||
onSubmit();
|
||||
}
|
||||
}}
|
||||
>
|
||||
<SendIcon
|
||||
size={28}
|
||||
className={`text-emphasis text-white p-1 rounded-full ${
|
||||
message && !isStreaming
|
||||
? "bg-background-800"
|
||||
: "bg-[#D7D7D7]"
|
||||
}`}
|
||||
/>
|
||||
</div>
|
||||
{chatState == "streaming" ||
|
||||
chatState == "toolBuilding" ||
|
||||
chatState == "loading" ? (
|
||||
<button
|
||||
className={`cursor-pointer ${chatState != "streaming" ? "bg-background-400" : "bg-background-800"} h-[28px] w-[28px] rounded-full`}
|
||||
onClick={stopGenerating}
|
||||
disabled={chatState != "streaming"}
|
||||
>
|
||||
<StopGeneratingIcon
|
||||
size={10}
|
||||
className={`text-emphasis m-auto text-white flex-none
|
||||
}`}
|
||||
/>
|
||||
</button>
|
||||
) : (
|
||||
<button
|
||||
className="cursor-pointer"
|
||||
onClick={() => {
|
||||
if (message) {
|
||||
onSubmit();
|
||||
}
|
||||
}}
|
||||
disabled={chatState != "input"}
|
||||
>
|
||||
<SendIcon
|
||||
size={28}
|
||||
className={`text-emphasis text-white p-1 rounded-full ${chatState == "input" && message ? "bg-background-800" : "bg-background-400"} `}
|
||||
/>
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@@ -118,6 +118,11 @@ export interface BackendMessage {
|
||||
alternate_assistant_id?: number | null;
|
||||
}
|
||||
|
||||
export interface MessageResponseIDInfo {
|
||||
user_message_id: number | null;
|
||||
reserved_assistant_message_id: number;
|
||||
}
|
||||
|
||||
export interface DocumentsResponse {
|
||||
top_documents: DanswerDocument[];
|
||||
rephrased_query: string | null;
|
||||
|
@@ -3,8 +3,8 @@ import {
|
||||
DanswerDocument,
|
||||
Filters,
|
||||
} from "@/lib/search/interfaces";
|
||||
import { handleStream } from "@/lib/search/streamingUtils";
|
||||
import { FeedbackType } from "./types";
|
||||
import { handleSSEStream, handleStream } from "@/lib/search/streamingUtils";
|
||||
import { ChatState, FeedbackType } from "./types";
|
||||
import {
|
||||
Dispatch,
|
||||
MutableRefObject,
|
||||
@@ -20,6 +20,7 @@ import {
|
||||
FileDescriptor,
|
||||
ImageGenerationDisplay,
|
||||
Message,
|
||||
MessageResponseIDInfo,
|
||||
RetrievalType,
|
||||
StreamingError,
|
||||
ToolCallMetadata,
|
||||
@@ -109,7 +110,8 @@ export type PacketType =
|
||||
| AnswerPiecePacket
|
||||
| DocumentsResponse
|
||||
| ImageGenerationDisplay
|
||||
| StreamingError;
|
||||
| StreamingError
|
||||
| MessageResponseIDInfo;
|
||||
|
||||
export async function* sendMessage({
|
||||
message,
|
||||
@@ -127,6 +129,7 @@ export async function* sendMessage({
|
||||
systemPromptOverride,
|
||||
useExistingUserMessage,
|
||||
alternateAssistantId,
|
||||
signal,
|
||||
}: {
|
||||
message: string;
|
||||
fileDescriptors: FileDescriptor[];
|
||||
@@ -137,70 +140,69 @@ export async function* sendMessage({
|
||||
selectedDocumentIds: number[] | null;
|
||||
queryOverride?: string;
|
||||
forceSearch?: boolean;
|
||||
// LLM overrides
|
||||
modelProvider?: string;
|
||||
modelVersion?: string;
|
||||
temperature?: number;
|
||||
// prompt overrides
|
||||
systemPromptOverride?: string;
|
||||
// if specified, will use the existing latest user message
|
||||
// and will ignore the specified `message`
|
||||
useExistingUserMessage?: boolean;
|
||||
alternateAssistantId?: number;
|
||||
}) {
|
||||
signal?: AbortSignal;
|
||||
}): AsyncGenerator<PacketType, void, unknown> {
|
||||
const documentsAreSelected =
|
||||
selectedDocumentIds && selectedDocumentIds.length > 0;
|
||||
|
||||
const sendMessageResponse = await fetch("/api/chat/send-message", {
|
||||
const body = JSON.stringify({
|
||||
alternate_assistant_id: alternateAssistantId,
|
||||
chat_session_id: chatSessionId,
|
||||
parent_message_id: parentMessageId,
|
||||
message: message,
|
||||
prompt_id: promptId,
|
||||
search_doc_ids: documentsAreSelected ? selectedDocumentIds : null,
|
||||
file_descriptors: fileDescriptors,
|
||||
retrieval_options: !documentsAreSelected
|
||||
? {
|
||||
run_search:
|
||||
promptId === null ||
|
||||
promptId === undefined ||
|
||||
queryOverride ||
|
||||
forceSearch
|
||||
? "always"
|
||||
: "auto",
|
||||
real_time: true,
|
||||
filters: filters,
|
||||
}
|
||||
: null,
|
||||
query_override: queryOverride,
|
||||
prompt_override: systemPromptOverride
|
||||
? {
|
||||
system_prompt: systemPromptOverride,
|
||||
}
|
||||
: null,
|
||||
llm_override:
|
||||
temperature || modelVersion
|
||||
? {
|
||||
temperature,
|
||||
model_provider: modelProvider,
|
||||
model_version: modelVersion,
|
||||
}
|
||||
: null,
|
||||
use_existing_user_message: useExistingUserMessage,
|
||||
});
|
||||
|
||||
const response = await fetch(`/api/chat/send-message`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
alternate_assistant_id: alternateAssistantId,
|
||||
chat_session_id: chatSessionId,
|
||||
parent_message_id: parentMessageId,
|
||||
message: message,
|
||||
prompt_id: promptId,
|
||||
search_doc_ids: documentsAreSelected ? selectedDocumentIds : null,
|
||||
file_descriptors: fileDescriptors,
|
||||
retrieval_options: !documentsAreSelected
|
||||
? {
|
||||
run_search:
|
||||
promptId === null ||
|
||||
promptId === undefined ||
|
||||
queryOverride ||
|
||||
forceSearch
|
||||
? "always"
|
||||
: "auto",
|
||||
real_time: true,
|
||||
filters: filters,
|
||||
}
|
||||
: null,
|
||||
query_override: queryOverride,
|
||||
prompt_override: systemPromptOverride
|
||||
? {
|
||||
system_prompt: systemPromptOverride,
|
||||
}
|
||||
: null,
|
||||
llm_override:
|
||||
temperature || modelVersion
|
||||
? {
|
||||
temperature,
|
||||
model_provider: modelProvider,
|
||||
model_version: modelVersion,
|
||||
}
|
||||
: null,
|
||||
use_existing_user_message: useExistingUserMessage,
|
||||
}),
|
||||
body,
|
||||
signal,
|
||||
});
|
||||
if (!sendMessageResponse.ok) {
|
||||
const errorJson = await sendMessageResponse.json();
|
||||
const errorMsg = errorJson.message || errorJson.detail || "";
|
||||
throw Error(`Failed to send message - ${errorMsg}`);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
}
|
||||
|
||||
yield* handleStream<PacketType>(sendMessageResponse);
|
||||
yield* handleSSEStream<PacketType>(response);
|
||||
}
|
||||
|
||||
export async function nameChatSession(chatSessionId: number, message: string) {
|
||||
@@ -635,14 +637,14 @@ export async function uploadFilesForChat(
|
||||
}
|
||||
|
||||
export async function useScrollonStream({
|
||||
isStreaming,
|
||||
chatState,
|
||||
scrollableDivRef,
|
||||
scrollDist,
|
||||
endDivRef,
|
||||
distance,
|
||||
debounce,
|
||||
}: {
|
||||
isStreaming: boolean;
|
||||
chatState: ChatState;
|
||||
scrollableDivRef: RefObject<HTMLDivElement>;
|
||||
scrollDist: MutableRefObject<number>;
|
||||
endDivRef: RefObject<HTMLDivElement>;
|
||||
@@ -656,7 +658,7 @@ export async function useScrollonStream({
|
||||
const previousScroll = useRef<number>(0);
|
||||
|
||||
useEffect(() => {
|
||||
if (isStreaming && scrollableDivRef && scrollableDivRef.current) {
|
||||
if (chatState != "input" && scrollableDivRef && scrollableDivRef.current) {
|
||||
let newHeight: number = scrollableDivRef.current?.scrollTop!;
|
||||
const heightDifference = newHeight - previousScroll.current;
|
||||
previousScroll.current = newHeight;
|
||||
@@ -712,7 +714,7 @@ export async function useScrollonStream({
|
||||
|
||||
// scroll on end of stream if within distance
|
||||
useEffect(() => {
|
||||
if (scrollableDivRef?.current && !isStreaming) {
|
||||
if (scrollableDivRef?.current && chatState == "input") {
|
||||
if (scrollDist.current < distance - 50) {
|
||||
scrollableDivRef?.current?.scrollBy({
|
||||
left: 0,
|
||||
@@ -721,5 +723,5 @@ export async function useScrollonStream({
|
||||
});
|
||||
}
|
||||
}
|
||||
}, [isStreaming]);
|
||||
}, [chatState]);
|
||||
}
|
||||
|
@@ -255,7 +255,6 @@ export const AIMessage = ({
|
||||
size="small"
|
||||
assistant={alternativeAssistant || currentPersona}
|
||||
/>
|
||||
|
||||
<div className="w-full">
|
||||
<div className="max-w-message-max break-words">
|
||||
{(!toolCall || toolCall.tool_name === SEARCH_TOOL_NAME) &&
|
||||
@@ -623,6 +622,7 @@ export const HumanMessage = ({
|
||||
onEdit,
|
||||
onMessageSelection,
|
||||
shared,
|
||||
stopGenerating = () => null,
|
||||
}: {
|
||||
shared?: boolean;
|
||||
content: string;
|
||||
@@ -631,6 +631,7 @@ export const HumanMessage = ({
|
||||
otherMessagesCanSwitchTo?: number[];
|
||||
onEdit?: (editedContent: string) => void;
|
||||
onMessageSelection?: (messageId: number) => void;
|
||||
stopGenerating?: () => void;
|
||||
}) => {
|
||||
const textareaRef = useRef<HTMLTextAreaElement>(null);
|
||||
|
||||
@@ -677,7 +678,6 @@ export const HumanMessage = ({
|
||||
<div className="xl:ml-8">
|
||||
<div className="flex flex-col mr-4">
|
||||
<FileDisplay alignBubble files={files || []} />
|
||||
|
||||
<div className="flex justify-end">
|
||||
<div className="w-full ml-8 flex w-full max-w-message-max break-words">
|
||||
{isEditing ? (
|
||||
@@ -857,16 +857,18 @@ export const HumanMessage = ({
|
||||
<MessageSwitcher
|
||||
currentPage={currentMessageInd + 1}
|
||||
totalPages={otherMessagesCanSwitchTo.length}
|
||||
handlePrevious={() =>
|
||||
handlePrevious={() => {
|
||||
stopGenerating();
|
||||
onMessageSelection(
|
||||
otherMessagesCanSwitchTo[currentMessageInd - 1]
|
||||
)
|
||||
}
|
||||
handleNext={() =>
|
||||
);
|
||||
}}
|
||||
handleNext={() => {
|
||||
stopGenerating();
|
||||
onMessageSelection(
|
||||
otherMessagesCanSwitchTo[currentMessageInd + 1]
|
||||
)
|
||||
}
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
@@ -33,6 +33,7 @@ export function ChatSessionDisplay({
|
||||
isSelected,
|
||||
skipGradient,
|
||||
closeSidebar,
|
||||
stopGenerating = () => null,
|
||||
showShareModal,
|
||||
showDeleteModal,
|
||||
}: {
|
||||
@@ -43,6 +44,7 @@ export function ChatSessionDisplay({
|
||||
// if not set, the gradient will still be applied and cause weirdness
|
||||
skipGradient?: boolean;
|
||||
closeSidebar?: () => void;
|
||||
stopGenerating?: () => void;
|
||||
showShareModal?: (chatSession: ChatSession) => void;
|
||||
showDeleteModal?: (chatSession: ChatSession) => void;
|
||||
}) {
|
||||
@@ -99,6 +101,7 @@ export function ChatSessionDisplay({
|
||||
className="flex my-1 group relative"
|
||||
key={chatSession.id}
|
||||
onClick={() => {
|
||||
stopGenerating();
|
||||
if (settings?.isMobile && closeSidebar) {
|
||||
closeSidebar();
|
||||
}
|
||||
|
@@ -40,6 +40,7 @@ interface HistorySidebarProps {
|
||||
reset?: () => void;
|
||||
showShareModal?: (chatSession: ChatSession) => void;
|
||||
showDeleteModal?: (chatSession: ChatSession) => void;
|
||||
stopGenerating?: () => void;
|
||||
}
|
||||
|
||||
export const HistorySidebar = forwardRef<HTMLDivElement, HistorySidebarProps>(
|
||||
@@ -54,6 +55,7 @@ export const HistorySidebar = forwardRef<HTMLDivElement, HistorySidebarProps>(
|
||||
openedFolders,
|
||||
toggleSidebar,
|
||||
removeToggle,
|
||||
stopGenerating = () => null,
|
||||
showShareModal,
|
||||
showDeleteModal,
|
||||
},
|
||||
@@ -179,6 +181,7 @@ export const HistorySidebar = forwardRef<HTMLDivElement, HistorySidebarProps>(
|
||||
)}
|
||||
<div className="border-b border-border pb-4 mx-3" />
|
||||
<PagesTab
|
||||
stopGenerating={stopGenerating}
|
||||
newFolderId={newFolderId}
|
||||
showDeleteModal={showDeleteModal}
|
||||
showShareModal={showShareModal}
|
||||
|
@@ -17,10 +17,12 @@ export function PagesTab({
|
||||
folders,
|
||||
openedFolders,
|
||||
closeSidebar,
|
||||
stopGenerating,
|
||||
newFolderId,
|
||||
showShareModal,
|
||||
showDeleteModal,
|
||||
}: {
|
||||
stopGenerating: () => void;
|
||||
page: pageType;
|
||||
existingChats?: ChatSession[];
|
||||
currentChatId?: number;
|
||||
@@ -124,6 +126,7 @@ export function PagesTab({
|
||||
return (
|
||||
<div key={`${chat.id}-${chat.name}`}>
|
||||
<ChatSessionDisplay
|
||||
stopGenerating={stopGenerating}
|
||||
showDeleteModal={showDeleteModal}
|
||||
showShareModal={showShareModal}
|
||||
closeSidebar={closeSidebar}
|
||||
|
@@ -1 +1,2 @@
|
||||
export type FeedbackType = "like" | "dislike";
|
||||
export type ChatState = "input" | "loading" | "streaming" | "toolBuilding";
|
||||
|
@@ -1763,6 +1763,29 @@ export const FilledLikeIcon = ({
|
||||
);
|
||||
};
|
||||
|
||||
export const StopGeneratingIcon = ({
|
||||
size = 16,
|
||||
className = defaultTailwindCSS,
|
||||
}: IconProps) => {
|
||||
return (
|
||||
<svg
|
||||
style={{ width: `${size}px`, height: `${size}px` }}
|
||||
className={`w-[${size}px] h-[${size}px] ` + className}
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
width="200"
|
||||
height="200"
|
||||
viewBox="0 0 14 14"
|
||||
>
|
||||
<path
|
||||
fill="currentColor"
|
||||
fill-rule="evenodd"
|
||||
d="M1.5 0A1.5 1.5 0 0 0 0 1.5v11A1.5 1.5 0 0 0 1.5 14h11a1.5 1.5 0 0 0 1.5-1.5v-11A1.5 1.5 0 0 0 12.5 0z"
|
||||
clip-rule="evenodd"
|
||||
/>
|
||||
</svg>
|
||||
);
|
||||
};
|
||||
|
||||
export const LikeFeedbackIcon = ({
|
||||
size = 16,
|
||||
className = defaultTailwindCSS,
|
||||
|
@@ -1,3 +1,5 @@
|
||||
import { PacketType } from "@/app/chat/lib";
|
||||
|
||||
type NonEmptyObject = { [k: string]: any };
|
||||
|
||||
const processSingleChunk = <T extends NonEmptyObject>(
|
||||
@@ -75,3 +77,33 @@ export async function* handleStream<T extends NonEmptyObject>(
|
||||
yield await Promise.resolve(completedChunks);
|
||||
}
|
||||
}
|
||||
|
||||
export async function* handleSSEStream<T extends PacketType>(
|
||||
streamingResponse: Response
|
||||
): AsyncGenerator<T, void, unknown> {
|
||||
const reader = streamingResponse.body?.getReader();
|
||||
const decoder = new TextDecoder();
|
||||
|
||||
while (true) {
|
||||
const rawChunk = await reader?.read();
|
||||
if (!rawChunk) {
|
||||
throw new Error("Unable to process chunk");
|
||||
}
|
||||
const { done, value } = rawChunk;
|
||||
if (done) {
|
||||
break;
|
||||
}
|
||||
|
||||
const chunk = decoder.decode(value);
|
||||
const lines = chunk.split("\n").filter((line) => line.trim() !== "");
|
||||
|
||||
for (const line of lines) {
|
||||
try {
|
||||
const data = JSON.parse(line) as T;
|
||||
yield data;
|
||||
} catch (error) {
|
||||
console.error("Error parsing SSE data:", error);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -90,6 +90,7 @@ module.exports = {
|
||||
"background-200": "#e5e5e5", // neutral-200
|
||||
"background-300": "#d4d4d4", // neutral-300
|
||||
"background-400": "#a3a3a3", // neutral-400
|
||||
"background-600": "#525252", // neutral-800
|
||||
"background-500": "#737373", // neutral-400
|
||||
"background-600": "#525252", // neutral-400
|
||||
"background-700": "#404040", // neutral-400
|
||||
|
Reference in New Issue
Block a user