Continue Generating (#2286)

* add stop reason

* add initial propagation

* add continue generating full functionality

* proper continue across chat session

* add new look

* propagate proper types

* fix typing

* cleaner continue generating functionality

* update types

* remove unused imports

* proper infodump

* temp

* add standardized stream handling

* validateing chosen tool args

* properly handle tools

* proper ports

* remove logs + build

* minor typing fix

* fix more minor typing issues

* add stashed reversion for tool call chunks

* ignore model dump types

* remove stop stream

* fix typing
This commit is contained in:
pablodanswer
2024-09-02 15:49:56 -07:00
committed by GitHub
parent 812ca69949
commit 6afcaafe54
12 changed files with 214 additions and 46 deletions

View File

@@ -65,7 +65,12 @@ import { FiArrowDown } from "react-icons/fi";
import { ChatIntro } from "./ChatIntro";
import { AIMessage, HumanMessage } from "./message/Messages";
import { StarterMessage } from "./StarterMessage";
import { AnswerPiecePacket, DanswerDocument } from "@/lib/search/interfaces";
import {
AnswerPiecePacket,
DanswerDocument,
StreamStopInfo,
StreamStopReason,
} from "@/lib/search/interfaces";
import { buildFilters } from "@/lib/search/utils";
import { SettingsContext } from "@/components/settings/SettingsProvider";
import Dropzone from "react-dropzone";
@@ -94,6 +99,7 @@ import ExceptionTraceModal from "@/components/modals/ExceptionTraceModal";
import { SEARCH_TOOL_NAME } from "./tools/constants";
import { useUser } from "@/components/user/UserProvider";
import { Stop } from "@phosphor-icons/react";
const TEMP_USER_MESSAGE_ID = -1;
const TEMP_ASSISTANT_MESSAGE_ID = -2;
@@ -338,6 +344,7 @@ export function ChatPage({
}
return;
}
clearSelectedDocuments();
setIsFetchingChatMessages(true);
const response = await fetch(
@@ -624,6 +631,24 @@ export function ChatPage({
const currentRegenerationState = (): RegenerationState | null => {
return regenerationState.get(currentSessionId()) || null;
};
const [canContinue, setCanContinue] = useState<Map<number | null, boolean>>(
new Map([[null, false]])
);
const updateCanContinue = (newState: boolean, sessionId?: number | null) => {
setCanContinue((prevState) => {
const newCanContinueState = new Map(prevState);
newCanContinueState.set(
sessionId !== undefined ? sessionId : currentSessionId(),
newState
);
return newCanContinueState;
});
};
const currentCanContinue = (): boolean => {
return canContinue.get(currentSessionId()) || false;
};
const currentSessionChatState = currentChatState();
const currentSessionRegenerationState = currentRegenerationState();
@@ -864,6 +889,13 @@ export function ChatPage({
}
};
const continueGenerating = () => {
onSubmit({
messageOverride:
"Continue Generating (pick up exactly where you left off)",
});
};
const onSubmit = async ({
messageIdToResend,
messageOverride,
@@ -884,6 +916,7 @@ export function ChatPage({
regenerationRequest?: RegenerationRequest | null;
} = {}) => {
let frozenSessionId = currentSessionId();
updateCanContinue(false, frozenSessionId);
if (currentChatState() != "input") {
setPopup({
@@ -978,6 +1011,8 @@ export function ChatPage({
let messageUpdates: Message[] | null = null;
let answer = "";
let stopReason: StreamStopReason | null = null;
let query: string | null = null;
let retrievalType: RetrievalType =
selectedDocuments.length > 0
@@ -1174,6 +1209,11 @@ export function ChatPage({
stackTrace = (packet as StreamingError).stack_trace;
} else if (Object.hasOwn(packet, "message_id")) {
finalMessage = packet as BackendMessage;
} else if (Object.hasOwn(packet, "stop_reason")) {
const stop_reason = (packet as StreamStopInfo).stop_reason;
if (stop_reason === StreamStopReason.CONTEXT_LENGTH) {
updateCanContinue(true, frozenSessionId);
}
}
// on initial message send, we insert a dummy system message
@@ -1237,6 +1277,7 @@ export function ChatPage({
alternateAssistantID: alternativeAssistant?.id,
stackTrace: stackTrace,
overridden_model: finalMessage?.overridden_model,
stopReason: stopReason,
},
]);
}
@@ -1835,6 +1876,12 @@ export function ChatPage({
}
>
<AIMessage
continueGenerating={
i == messageHistory.length - 1 &&
currentCanContinue()
? continueGenerating
: undefined
}
overriddenModel={message.overridden_model}
regenerate={createRegenerator({
messageId: message.messageId,

View File

@@ -2,6 +2,7 @@ import {
DanswerDocument,
Filters,
SearchDanswerDocument,
StreamStopReason,
} from "@/lib/search/interfaces";
export enum RetrievalType {
@@ -89,6 +90,7 @@ export interface Message {
alternateAssistantID?: number | null;
stackTrace?: string | null;
overridden_model?: string;
stopReason?: StreamStopReason | null;
}
export interface BackendChatSession {

View File

@@ -2,6 +2,7 @@ import {
AnswerPiecePacket,
DanswerDocument,
Filters,
StreamStopInfo,
} from "@/lib/search/interfaces";
import { handleSSEStream, handleStream } from "@/lib/search/streamingUtils";
import { ChatState, FeedbackType } from "./types";
@@ -111,7 +112,8 @@ export type PacketType =
| DocumentsResponse
| ImageGenerationDisplay
| StreamingError
| MessageResponseIDInfo;
| MessageResponseIDInfo
| StreamStopInfo;
export async function* sendMessage({
regenerate,

View File

@@ -0,0 +1,37 @@
import { EmphasizedClickable } from "@/components/BasicClickable";
import { useEffect, useState } from "react";
import { FiBook, FiPlayCircle } from "react-icons/fi";
export function ContinueGenerating({
handleContinueGenerating,
}: {
handleContinueGenerating: () => void;
}) {
const [showExplanation, setShowExplanation] = useState(false);
useEffect(() => {
const timer = setTimeout(() => {
setShowExplanation(true);
}, 1000);
return () => clearTimeout(timer);
}, []);
return (
<div className="flex justify-center w-full">
<div className="relative group">
<EmphasizedClickable onClick={handleContinueGenerating}>
<>
<FiPlayCircle className="mr-2" />
Continue Generation
</>
</EmphasizedClickable>
{showExplanation && (
<div className="absolute bottom-full left-1/2 transform -translate-x-1/2 mb-2 px-3 py-1 bg-gray-800 text-white text-xs rounded-lg opacity-0 group-hover:opacity-100 transition-opacity duration-300 whitespace-nowrap">
LLM reached its token limit. Click to continue.
</div>
)}
</div>
</div>
);
}

View File

@@ -65,6 +65,8 @@ import GeneratingImageDisplay from "../tools/GeneratingImageDisplay";
import RegenerateOption from "../RegenerateOption";
import { LlmOverride } from "@/lib/hooks";
import ExceptionTraceModal from "@/components/modals/ExceptionTraceModal";
import { EmphasizedClickable } from "@/components/BasicClickable";
import { ContinueGenerating } from "./ContinueMessage";
const TOOLS_WITH_CUSTOM_HANDLING = [
SEARCH_TOOL_NAME,
@@ -123,6 +125,7 @@ function FileDisplay({
export const AIMessage = ({
regenerate,
overriddenModel,
continueGenerating,
shared,
isActive,
toggleDocumentSelection,
@@ -150,6 +153,7 @@ export const AIMessage = ({
}: {
shared?: boolean;
isActive?: boolean;
continueGenerating?: () => void;
otherMessagesCanSwitchTo?: number[];
onMessageSelection?: (messageId: number) => void;
selectedDocuments?: DanswerDocument[] | null;
@@ -283,11 +287,12 @@ export const AIMessage = ({
size="small"
assistant={alternativeAssistant || currentPersona}
/>
<div className="w-full">
<div className="max-w-message-max break-words">
<div className="w-full ml-4">
<div className="max-w-message-max break-words">
{(!toolCall || toolCall.tool_name === SEARCH_TOOL_NAME) && (
{!toolCall || toolCall.tool_name === SEARCH_TOOL_NAME ? (
<>
{query !== undefined &&
handleShowRetrieved !== undefined &&
@@ -315,7 +320,8 @@ export const AIMessage = ({
</div>
)}
</>
)}
) : null}
{toolCall &&
!TOOLS_WITH_CUSTOM_HANDLING.includes(
toolCall.tool_name
@@ -633,6 +639,11 @@ export const AIMessage = ({
</div>
</div>
</div>
{(!toolCall || toolCall.tool_name === SEARCH_TOOL_NAME) &&
!query &&
continueGenerating && (
<ContinueGenerating handleContinueGenerating={continueGenerating} />
)}
</div>
</div>
);

View File

@@ -27,7 +27,7 @@ export function SkippedSearch({
handleForceSearch: () => void;
}) {
return (
<div className="flex text-sm !pt-0 p-1">
<div className="flex text-sm !pt-0 p-1">
<div className="flex mb-auto">
<FiBook className="my-auto flex-none mr-2" size={14} />
<div className="my-auto cursor-default">

View File

@@ -19,6 +19,15 @@ export interface AnswerPiecePacket {
answer_piece: string;
}
export enum StreamStopReason {
CONTEXT_LENGTH = "CONTEXT_LENGTH",
CANCELLED = "CANCELLED",
}
export interface StreamStopInfo {
stop_reason: StreamStopReason;
}
export interface ErrorMessagePacket {
error: string;
}