Assistant Prompt length + client side (#4433)

This commit is contained in:
pablonyx 2025-04-03 11:26:53 -07:00 committed by GitHub
parent 8c3a953b7a
commit 93886f0e2c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
16 changed files with 211 additions and 360 deletions

View File

@ -0,0 +1,50 @@
"""update prompt length
Revision ID: 4794bc13e484
Revises: f7505c5b0284
Create Date: 2025-04-02 11:26:36.180328
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "4794bc13e484"
down_revision = "f7505c5b0284"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.alter_column(
"prompt",
"system_prompt",
existing_type=sa.TEXT(),
type_=sa.String(length=5000000),
existing_nullable=False,
)
op.alter_column(
"prompt",
"task_prompt",
existing_type=sa.TEXT(),
type_=sa.String(length=5000000),
existing_nullable=False,
)
def downgrade() -> None:
op.alter_column(
"prompt",
"system_prompt",
existing_type=sa.String(length=5000000),
type_=sa.TEXT(),
existing_nullable=False,
)
op.alter_column(
"prompt",
"task_prompt",
existing_type=sa.String(length=5000000),
type_=sa.TEXT(),
existing_nullable=False,
)

View File

@ -5,8 +5,6 @@ Revises: 6a804aeb4830
Create Date: 2025-04-01 15:07:14.977435 Create Date: 2025-04-01 15:07:14.977435
""" """
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
@ -17,34 +15,36 @@ depends_on = None
def upgrade() -> None: def upgrade() -> None:
op.alter_column( # op.alter_column(
"prompt", # "prompt",
"system_prompt", # "system_prompt",
existing_type=sa.TEXT(), # existing_type=sa.TEXT(),
type_=sa.String(length=8000), # type_=sa.String(length=8000),
existing_nullable=False, # existing_nullable=False,
) # )
op.alter_column( # op.alter_column(
"prompt", # "prompt",
"task_prompt", # "task_prompt",
existing_type=sa.TEXT(), # existing_type=sa.TEXT(),
type_=sa.String(length=8000), # type_=sa.String(length=8000),
existing_nullable=False, # existing_nullable=False,
) # )
pass
def downgrade() -> None: def downgrade() -> None:
op.alter_column( # op.alter_column(
"prompt", # "prompt",
"system_prompt", # "system_prompt",
existing_type=sa.String(length=8000), # existing_type=sa.String(length=8000),
type_=sa.TEXT(), # type_=sa.TEXT(),
existing_nullable=False, # existing_nullable=False,
) # )
op.alter_column( # op.alter_column(
"prompt", # "prompt",
"task_prompt", # "task_prompt",
existing_type=sa.String(length=8000), # existing_type=sa.String(length=8000),
type_=sa.TEXT(), # type_=sa.TEXT(),
existing_nullable=False, # existing_nullable=False,
) # )
pass

View File

@ -37,8 +37,8 @@ from onyx.db.models import UserFile
from onyx.db.models import UserFolder from onyx.db.models import UserFolder
from onyx.db.models import UserGroup from onyx.db.models import UserGroup
from onyx.db.notification import create_notification from onyx.db.notification import create_notification
from onyx.server.features.persona.models import FullPersonaSnapshot
from onyx.server.features.persona.models import PersonaSharedNotificationData from onyx.server.features.persona.models import PersonaSharedNotificationData
from onyx.server.features.persona.models import PersonaSnapshot
from onyx.server.features.persona.models import PersonaUpsertRequest from onyx.server.features.persona.models import PersonaUpsertRequest
from onyx.utils.logger import setup_logger from onyx.utils.logger import setup_logger
from onyx.utils.variable_functionality import fetch_versioned_implementation from onyx.utils.variable_functionality import fetch_versioned_implementation
@ -201,7 +201,7 @@ def create_update_persona(
create_persona_request: PersonaUpsertRequest, create_persona_request: PersonaUpsertRequest,
user: User | None, user: User | None,
db_session: Session, db_session: Session,
) -> PersonaSnapshot: ) -> FullPersonaSnapshot:
"""Higher level function than upsert_persona, although either is valid to use.""" """Higher level function than upsert_persona, although either is valid to use."""
# Permission to actually use these is checked later # Permission to actually use these is checked later
@ -271,7 +271,7 @@ def create_update_persona(
logger.exception("Failed to create persona") logger.exception("Failed to create persona")
raise HTTPException(status_code=400, detail=str(e)) raise HTTPException(status_code=400, detail=str(e))
return PersonaSnapshot.from_model(persona) return FullPersonaSnapshot.from_model(persona)
def update_persona_shared_users( def update_persona_shared_users(

View File

@ -43,6 +43,7 @@ from onyx.file_store.models import ChatFileType
from onyx.secondary_llm_flows.starter_message_creation import ( from onyx.secondary_llm_flows.starter_message_creation import (
generate_starter_messages, generate_starter_messages,
) )
from onyx.server.features.persona.models import FullPersonaSnapshot
from onyx.server.features.persona.models import GenerateStarterMessageRequest from onyx.server.features.persona.models import GenerateStarterMessageRequest
from onyx.server.features.persona.models import ImageGenerationToolStatus from onyx.server.features.persona.models import ImageGenerationToolStatus
from onyx.server.features.persona.models import PersonaLabelCreate from onyx.server.features.persona.models import PersonaLabelCreate
@ -424,8 +425,8 @@ def get_persona(
persona_id: int, persona_id: int,
user: User | None = Depends(current_limited_user), user: User | None = Depends(current_limited_user),
db_session: Session = Depends(get_session), db_session: Session = Depends(get_session),
) -> PersonaSnapshot: ) -> FullPersonaSnapshot:
return PersonaSnapshot.from_model( return FullPersonaSnapshot.from_model(
get_persona_by_id( get_persona_by_id(
persona_id=persona_id, persona_id=persona_id,
user=user, user=user,

View File

@ -91,37 +91,80 @@ class PersonaUpsertRequest(BaseModel):
class PersonaSnapshot(BaseModel): class PersonaSnapshot(BaseModel):
id: int id: int
owner: MinimalUserSnapshot | None
name: str name: str
is_visible: bool
is_public: bool
display_priority: int | None
description: str description: str
num_chunks: float | None is_public: bool
llm_relevance_filter: bool is_visible: bool
llm_filter_extraction: bool icon_shape: int | None = None
llm_model_provider_override: str | None icon_color: str | None = None
llm_model_version_override: str | None
starter_messages: list[StarterMessage] | None
builtin_persona: bool
prompts: list[PromptSnapshot]
tools: list[ToolSnapshot]
document_sets: list[DocumentSet]
users: list[MinimalUserSnapshot]
groups: list[int]
icon_color: str | None
icon_shape: int | None
uploaded_image_id: str | None = None uploaded_image_id: str | None = None
is_default_persona: bool user_file_ids: list[int] = Field(default_factory=list)
user_folder_ids: list[int] = Field(default_factory=list)
display_priority: int | None = None
is_default_persona: bool = False
builtin_persona: bool = False
starter_messages: list[StarterMessage] | None = None
tools: list[ToolSnapshot] = Field(default_factory=list)
labels: list["PersonaLabelSnapshot"] = Field(default_factory=list)
owner: MinimalUserSnapshot | None = None
users: list[MinimalUserSnapshot] = Field(default_factory=list)
groups: list[int] = Field(default_factory=list)
document_sets: list[DocumentSet] = Field(default_factory=list)
llm_model_provider_override: str | None = None
llm_model_version_override: str | None = None
num_chunks: float | None = None
@classmethod
def from_model(cls, persona: Persona) -> "PersonaSnapshot":
return PersonaSnapshot(
id=persona.id,
name=persona.name,
description=persona.description,
is_public=persona.is_public,
is_visible=persona.is_visible,
icon_shape=persona.icon_shape,
icon_color=persona.icon_color,
uploaded_image_id=persona.uploaded_image_id,
user_file_ids=[file.id for file in persona.user_files],
user_folder_ids=[folder.id for folder in persona.user_folders],
display_priority=persona.display_priority,
is_default_persona=persona.is_default_persona,
builtin_persona=persona.builtin_persona,
starter_messages=persona.starter_messages,
tools=[ToolSnapshot.from_model(tool) for tool in persona.tools],
labels=[PersonaLabelSnapshot.from_model(label) for label in persona.labels],
owner=(
MinimalUserSnapshot(id=persona.user.id, email=persona.user.email)
if persona.user
else None
),
users=[
MinimalUserSnapshot(id=user.id, email=user.email)
for user in persona.users
],
groups=[user_group.id for user_group in persona.groups],
document_sets=[
DocumentSet.from_model(document_set_model)
for document_set_model in persona.document_sets
],
llm_model_provider_override=persona.llm_model_provider_override,
llm_model_version_override=persona.llm_model_version_override,
num_chunks=persona.num_chunks,
)
# Model with full context on perona's internal settings
# This is used for flows which need to know all settings
class FullPersonaSnapshot(PersonaSnapshot):
search_start_date: datetime | None = None search_start_date: datetime | None = None
labels: list["PersonaLabelSnapshot"] = [] prompts: list[PromptSnapshot] = Field(default_factory=list)
user_file_ids: list[int] | None = None llm_relevance_filter: bool = False
user_folder_ids: list[int] | None = None llm_filter_extraction: bool = False
@classmethod @classmethod
def from_model( def from_model(
cls, persona: Persona, allow_deleted: bool = False cls, persona: Persona, allow_deleted: bool = False
) -> "PersonaSnapshot": ) -> "FullPersonaSnapshot":
if persona.deleted: if persona.deleted:
error_msg = f"Persona with ID {persona.id} has been deleted" error_msg = f"Persona with ID {persona.id} has been deleted"
if not allow_deleted: if not allow_deleted:
@ -129,44 +172,32 @@ class PersonaSnapshot(BaseModel):
else: else:
logger.warning(error_msg) logger.warning(error_msg)
return PersonaSnapshot( return FullPersonaSnapshot(
id=persona.id, id=persona.id,
name=persona.name, name=persona.name,
description=persona.description,
is_public=persona.is_public,
is_visible=persona.is_visible,
icon_shape=persona.icon_shape,
icon_color=persona.icon_color,
uploaded_image_id=persona.uploaded_image_id,
user_file_ids=[file.id for file in persona.user_files],
user_folder_ids=[folder.id for folder in persona.user_folders],
display_priority=persona.display_priority,
is_default_persona=persona.is_default_persona,
builtin_persona=persona.builtin_persona,
starter_messages=persona.starter_messages,
tools=[ToolSnapshot.from_model(tool) for tool in persona.tools],
labels=[PersonaLabelSnapshot.from_model(label) for label in persona.labels],
owner=( owner=(
MinimalUserSnapshot(id=persona.user.id, email=persona.user.email) MinimalUserSnapshot(id=persona.user.id, email=persona.user.email)
if persona.user if persona.user
else None else None
), ),
is_visible=persona.is_visible, search_start_date=persona.search_start_date,
is_public=persona.is_public, prompts=[PromptSnapshot.from_model(prompt) for prompt in persona.prompts],
display_priority=persona.display_priority,
description=persona.description,
num_chunks=persona.num_chunks,
llm_relevance_filter=persona.llm_relevance_filter, llm_relevance_filter=persona.llm_relevance_filter,
llm_filter_extraction=persona.llm_filter_extraction, llm_filter_extraction=persona.llm_filter_extraction,
llm_model_provider_override=persona.llm_model_provider_override,
llm_model_version_override=persona.llm_model_version_override,
starter_messages=persona.starter_messages,
builtin_persona=persona.builtin_persona,
is_default_persona=persona.is_default_persona,
prompts=[PromptSnapshot.from_model(prompt) for prompt in persona.prompts],
tools=[ToolSnapshot.from_model(tool) for tool in persona.tools],
document_sets=[
DocumentSet.from_model(document_set_model)
for document_set_model in persona.document_sets
],
users=[
MinimalUserSnapshot(id=user.id, email=user.email)
for user in persona.users
],
groups=[user_group.id for user_group in persona.groups],
icon_color=persona.icon_color,
icon_shape=persona.icon_shape,
uploaded_image_id=persona.uploaded_image_id,
search_start_date=persona.search_start_date,
labels=[PersonaLabelSnapshot.from_model(label) for label in persona.labels],
user_file_ids=[file.id for file in persona.user_files],
user_folder_ids=[folder.id for folder in persona.user_folders],
) )

View File

@ -19,6 +19,7 @@ from onyx.db.models import SlackBot as SlackAppModel
from onyx.db.models import SlackChannelConfig as SlackChannelConfigModel from onyx.db.models import SlackChannelConfig as SlackChannelConfigModel
from onyx.db.models import User from onyx.db.models import User
from onyx.onyxbot.slack.config import VALID_SLACK_FILTERS from onyx.onyxbot.slack.config import VALID_SLACK_FILTERS
from onyx.server.features.persona.models import FullPersonaSnapshot
from onyx.server.features.persona.models import PersonaSnapshot from onyx.server.features.persona.models import PersonaSnapshot
from onyx.server.models import FullUserSnapshot from onyx.server.models import FullUserSnapshot
from onyx.server.models import InvitedUserSnapshot from onyx.server.models import InvitedUserSnapshot
@ -245,7 +246,7 @@ class SlackChannelConfig(BaseModel):
id=slack_channel_config_model.id, id=slack_channel_config_model.id,
slack_bot_id=slack_channel_config_model.slack_bot_id, slack_bot_id=slack_channel_config_model.slack_bot_id,
persona=( persona=(
PersonaSnapshot.from_model( FullPersonaSnapshot.from_model(
slack_channel_config_model.persona, allow_deleted=True slack_channel_config_model.persona, allow_deleted=True
) )
if slack_channel_config_model.persona if slack_channel_config_model.persona

View File

@ -4,7 +4,7 @@ from uuid import uuid4
import requests import requests
from onyx.context.search.enums import RecencyBiasSetting from onyx.context.search.enums import RecencyBiasSetting
from onyx.server.features.persona.models import PersonaSnapshot from onyx.server.features.persona.models import FullPersonaSnapshot
from onyx.server.features.persona.models import PersonaUpsertRequest from onyx.server.features.persona.models import PersonaUpsertRequest
from tests.integration.common_utils.constants import API_SERVER_URL from tests.integration.common_utils.constants import API_SERVER_URL
from tests.integration.common_utils.constants import GENERAL_HEADERS from tests.integration.common_utils.constants import GENERAL_HEADERS
@ -181,7 +181,7 @@ class PersonaManager:
@staticmethod @staticmethod
def get_all( def get_all(
user_performing_action: DATestUser | None = None, user_performing_action: DATestUser | None = None,
) -> list[PersonaSnapshot]: ) -> list[FullPersonaSnapshot]:
response = requests.get( response = requests.get(
f"{API_SERVER_URL}/admin/persona", f"{API_SERVER_URL}/admin/persona",
headers=user_performing_action.headers headers=user_performing_action.headers
@ -189,13 +189,13 @@ class PersonaManager:
else GENERAL_HEADERS, else GENERAL_HEADERS,
) )
response.raise_for_status() response.raise_for_status()
return [PersonaSnapshot(**persona) for persona in response.json()] return [FullPersonaSnapshot(**persona) for persona in response.json()]
@staticmethod @staticmethod
def get_one( def get_one(
persona_id: int, persona_id: int,
user_performing_action: DATestUser | None = None, user_performing_action: DATestUser | None = None,
) -> list[PersonaSnapshot]: ) -> list[FullPersonaSnapshot]:
response = requests.get( response = requests.get(
f"{API_SERVER_URL}/persona/{persona_id}", f"{API_SERVER_URL}/persona/{persona_id}",
headers=user_performing_action.headers headers=user_performing_action.headers
@ -203,7 +203,7 @@ class PersonaManager:
else GENERAL_HEADERS, else GENERAL_HEADERS,
) )
response.raise_for_status() response.raise_for_status()
return [PersonaSnapshot(**response.json())] return [FullPersonaSnapshot(**response.json())]
@staticmethod @staticmethod
def verify( def verify(

View File

@ -42,9 +42,7 @@ import Link from "next/link";
import { useRouter, useSearchParams } from "next/navigation"; import { useRouter, useSearchParams } from "next/navigation";
import { useEffect, useMemo, useState } from "react"; import { useEffect, useMemo, useState } from "react";
import * as Yup from "yup"; import * as Yup from "yup";
import CollapsibleSection from "./CollapsibleSection"; import { FullPersona, PersonaLabel, StarterMessage } from "./interfaces";
import { SuccessfulPersonaUpdateRedirectType } from "./enums";
import { Persona, PersonaLabel, StarterMessage } from "./interfaces";
import { import {
PersonaUpsertParameters, PersonaUpsertParameters,
createPersona, createPersona,
@ -101,6 +99,7 @@ import { SEARCH_TOOL_ID } from "@/app/chat/tools/constants";
import TextView from "@/components/chat/TextView"; import TextView from "@/components/chat/TextView";
import { MinimalOnyxDocument } from "@/lib/search/interfaces"; import { MinimalOnyxDocument } from "@/lib/search/interfaces";
import { TabToggle } from "@/components/ui/TabToggle"; import { TabToggle } from "@/components/ui/TabToggle";
import { MAX_CHARACTERS_PERSONA_DESCRIPTION } from "@/lib/constants";
function findSearchTool(tools: ToolSnapshot[]) { function findSearchTool(tools: ToolSnapshot[]) {
return tools.find((tool) => tool.in_code_tool_id === SEARCH_TOOL_ID); return tools.find((tool) => tool.in_code_tool_id === SEARCH_TOOL_ID);
@ -136,7 +135,7 @@ export function AssistantEditor({
shouldAddAssistantToUserPreferences, shouldAddAssistantToUserPreferences,
admin, admin,
}: { }: {
existingPersona?: Persona | null; existingPersona?: FullPersona | null;
ccPairs: CCPairBasicInfo[]; ccPairs: CCPairBasicInfo[];
documentSets: DocumentSet[]; documentSets: DocumentSet[];
user: User | null; user: User | null;
@ -184,8 +183,6 @@ export function AssistantEditor({
} }
}, [defaultIconShape]); }, [defaultIconShape]);
const [isIconDropdownOpen, setIsIconDropdownOpen] = useState(false);
const [removePersonaImage, setRemovePersonaImage] = useState(false); const [removePersonaImage, setRemovePersonaImage] = useState(false);
const autoStarterMessageEnabled = useMemo( const autoStarterMessageEnabled = useMemo(
@ -462,12 +459,12 @@ export function AssistantEditor({
"Must provide a description for the Assistant" "Must provide a description for the Assistant"
), ),
system_prompt: Yup.string().max( system_prompt: Yup.string().max(
8000, MAX_CHARACTERS_PERSONA_DESCRIPTION,
"Instructions must be less than 8000 characters" "Instructions must be less than 5000000 characters"
), ),
task_prompt: Yup.string().max( task_prompt: Yup.string().max(
8000, MAX_CHARACTERS_PERSONA_DESCRIPTION,
"Reminders must be less than 8000 characters" "Reminders must be less than 5000000 characters"
), ),
is_public: Yup.boolean().required(), is_public: Yup.boolean().required(),
document_set_ids: Yup.array().of(Yup.number()), document_set_ids: Yup.array().of(Yup.number()),

View File

@ -18,35 +18,37 @@ export interface Prompt {
datetime_aware: boolean; datetime_aware: boolean;
default_prompt: boolean; default_prompt: boolean;
} }
export interface Persona { export interface Persona {
id: number; id: number;
name: string; name: string;
search_start_date: Date | null;
owner: MinimalUserSnapshot | null;
is_visible: boolean;
is_public: boolean;
display_priority: number | null;
description: string; description: string;
document_sets: DocumentSet[]; is_public: boolean;
prompts: Prompt[]; is_visible: boolean;
tools: ToolSnapshot[];
num_chunks?: number;
llm_relevance_filter?: boolean;
llm_filter_extraction?: boolean;
llm_model_provider_override?: string;
llm_model_version_override?: string;
starter_messages: StarterMessage[] | null;
builtin_persona: boolean;
is_default_persona: boolean;
users: MinimalUserSnapshot[];
groups: number[];
icon_shape?: number; icon_shape?: number;
icon_color?: string; icon_color?: string;
uploaded_image_id?: string; uploaded_image_id?: string;
labels?: PersonaLabel[];
user_file_ids: number[]; user_file_ids: number[];
user_folder_ids: number[]; user_folder_ids: number[];
display_priority: number | null;
is_default_persona: boolean;
builtin_persona: boolean;
starter_messages: StarterMessage[] | null;
tools: ToolSnapshot[];
labels?: PersonaLabel[];
owner: MinimalUserSnapshot | null;
users: MinimalUserSnapshot[];
groups: number[];
document_sets: DocumentSet[];
llm_model_provider_override?: string;
llm_model_version_override?: string;
num_chunks?: number;
}
export interface FullPersona extends Persona {
search_start_date: Date | null;
prompts: Prompt[];
llm_relevance_filter?: boolean;
llm_filter_extraction?: boolean;
} }
export interface PersonaLabel { export interface PersonaLabel {

View File

@ -331,28 +331,3 @@ export function providersContainImageGeneratingSupport(
) { ) {
return providers.some((provider) => provider.provider === "openai"); return providers.some((provider) => provider.provider === "openai");
} }
// Default fallback persona for when we must display a persona
// but assistant has access to none
export const defaultPersona: Persona = {
id: 0,
name: "Default Assistant",
description: "A default assistant",
is_visible: true,
is_public: true,
builtin_persona: false,
is_default_persona: true,
users: [],
groups: [],
document_sets: [],
prompts: [],
tools: [],
starter_messages: null,
display_priority: null,
search_start_date: null,
owner: null,
icon_shape: 50910,
icon_color: "#FF6F6F",
user_file_ids: [],
user_folder_ids: [],
};

View File

@ -1383,7 +1383,7 @@ export function ChatPage({
regenerationRequest?.parentMessage.messageId || regenerationRequest?.parentMessage.messageId ||
lastSuccessfulMessageId, lastSuccessfulMessageId,
chatSessionId: currChatSessionId, chatSessionId: currChatSessionId,
promptId: liveAssistant?.prompts[0]?.id || 0, promptId: null,
filters: buildFilters( filters: buildFilters(
filterManager.selectedSources, filterManager.selectedSources,
filterManager.selectedDocumentSets, filterManager.selectedDocumentSets,

View File

@ -9,11 +9,6 @@ import { redirect } from "next/navigation";
import { BackendChatSession } from "../../interfaces"; import { BackendChatSession } from "../../interfaces";
import { SharedChatDisplay } from "./SharedChatDisplay"; import { SharedChatDisplay } from "./SharedChatDisplay";
import { Persona } from "@/app/admin/assistants/interfaces"; import { Persona } from "@/app/admin/assistants/interfaces";
import {
FetchAssistantsResponse,
fetchAssistantsSS,
} from "@/lib/assistants/fetchAssistantsSS";
import { defaultPersona } from "@/app/admin/assistants/lib";
import { constructMiniFiedPersona } from "@/lib/assistantIconUtils"; import { constructMiniFiedPersona } from "@/lib/assistantIconUtils";
async function getSharedChat(chatId: string) { async function getSharedChat(chatId: string) {

View File

@ -167,9 +167,7 @@ export const constructMiniFiedPersona = (
display_priority: 0, display_priority: 0,
description: "", description: "",
document_sets: [], document_sets: [],
prompts: [],
tools: [], tools: [],
search_start_date: null,
owner: null, owner: null,
starter_messages: null, starter_messages: null,
builtin_persona: false, builtin_persona: false,

View File

@ -1,4 +1,4 @@
import { Persona } from "@/app/admin/assistants/interfaces"; import { FullPersona, Persona } from "@/app/admin/assistants/interfaces";
import { CCPairBasicInfo, DocumentSet, User } from "../types"; import { CCPairBasicInfo, DocumentSet, User } from "../types";
import { getCurrentUserSS } from "../userSS"; import { getCurrentUserSS } from "../userSS";
import { fetchSS } from "../utilsSS"; import { fetchSS } from "../utilsSS";
@ -18,7 +18,7 @@ export async function fetchAssistantEditorInfoSS(
documentSets: DocumentSet[]; documentSets: DocumentSet[];
llmProviders: LLMProviderView[]; llmProviders: LLMProviderView[];
user: User | null; user: User | null;
existingPersona: Persona | null; existingPersona: FullPersona | null;
tools: ToolSnapshot[]; tools: ToolSnapshot[];
}, },
null, null,
@ -94,7 +94,7 @@ export async function fetchAssistantEditorInfoSS(
} }
const existingPersona = personaResponse const existingPersona = personaResponse
? ((await personaResponse.json()) as Persona) ? ((await personaResponse.json()) as FullPersona)
: null; : null;
let error: string | null = null; let error: string | null = null;

View File

@ -105,3 +105,5 @@ export const ALLOWED_URL_PROTOCOLS = [
"spotify:", "spotify:",
"zoommtg:", "zoommtg:",
]; ];
export const MAX_CHARACTERS_PERSONA_DESCRIPTION = 5000000;

View File

@ -1,201 +0,0 @@
import {
BackendMessage,
LLMRelevanceFilterPacket,
} from "@/app/chat/interfaces";
import {
AnswerPiecePacket,
OnyxDocument,
ErrorMessagePacket,
DocumentInfoPacket,
Quote,
QuotesInfoPacket,
RelevanceChunk,
SearchRequestArgs,
} from "./interfaces";
import { processRawChunkString } from "./streamingUtils";
import { buildFilters, endsWithLetterOrNumber } from "./utils";
export const searchRequestStreamed = async ({
query,
sources,
documentSets,
timeRange,
tags,
persona,
agentic,
updateCurrentAnswer,
updateQuotes,
updateDocs,
updateSuggestedSearchType,
updateSuggestedFlowType,
updateSelectedDocIndices,
updateError,
updateMessageAndThreadId,
finishedSearching,
updateDocumentRelevance,
updateComments,
}: SearchRequestArgs) => {
let answer = "";
let quotes: Quote[] | null = null;
let relevantDocuments: OnyxDocument[] | null = null;
try {
const filters = buildFilters(sources, documentSets, timeRange, tags);
const threadMessage = {
message: query,
sender: null,
role: "user",
};
const response = await fetch("/api/query/stream-answer-with-quote", {
method: "POST",
body: JSON.stringify({
messages: [threadMessage],
persona_id: persona.id,
agentic,
prompt_id: persona.id === 0 ? null : persona.prompts[0]?.id,
retrieval_options: {
run_search: "always",
real_time: true,
filters: filters,
enable_auto_detect_filters: false,
},
evaluation_type: agentic ? "agentic" : "basic",
}),
headers: {
"Content-Type": "application/json",
},
});
const reader = response.body?.getReader();
const decoder = new TextDecoder("utf-8");
let previousPartialChunk: string | null = null;
while (true) {
const rawChunk = await reader?.read();
if (!rawChunk) {
throw new Error("Unable to process chunk");
}
const { done, value } = rawChunk;
if (done) {
break;
}
// Process each chunk as it arrives
const [completedChunks, partialChunk] = processRawChunkString<
| AnswerPiecePacket
| ErrorMessagePacket
| QuotesInfoPacket
| DocumentInfoPacket
| LLMRelevanceFilterPacket
| BackendMessage
| DocumentInfoPacket
| RelevanceChunk
>(decoder.decode(value, { stream: true }), previousPartialChunk);
if (!completedChunks.length && !partialChunk) {
break;
}
previousPartialChunk = partialChunk as string | null;
completedChunks.forEach((chunk) => {
// check for answer piece / end of answer
if (Object.hasOwn(chunk, "relevance_summaries")) {
const relevanceChunk = chunk as RelevanceChunk;
updateDocumentRelevance(relevanceChunk.relevance_summaries);
}
if (Object.hasOwn(chunk, "answer_piece")) {
const answerPiece = (chunk as AnswerPiecePacket).answer_piece;
if (answerPiece !== null) {
answer += (chunk as AnswerPiecePacket).answer_piece;
updateCurrentAnswer(answer);
} else {
// set quotes as non-null to signify that the answer is finished and
// we're now looking for quotes
updateQuotes([]);
if (
answer &&
!answer.endsWith(".") &&
!answer.endsWith("?") &&
!answer.endsWith("!") &&
endsWithLetterOrNumber(answer)
) {
answer += ".";
updateCurrentAnswer(answer);
}
}
return;
}
if (Object.hasOwn(chunk, "error")) {
updateError((chunk as ErrorMessagePacket).error);
return;
}
// These all come together
if (Object.hasOwn(chunk, "top_documents")) {
chunk = chunk as DocumentInfoPacket;
const topDocuments = chunk.top_documents as OnyxDocument[] | null;
if (topDocuments) {
relevantDocuments = topDocuments;
updateDocs(relevantDocuments);
}
if (chunk.predicted_flow) {
updateSuggestedFlowType(chunk.predicted_flow);
}
if (chunk.predicted_search) {
updateSuggestedSearchType(chunk.predicted_search);
}
return;
}
if (Object.hasOwn(chunk, "relevant_chunk_indices")) {
const relevantChunkIndices = (chunk as LLMRelevanceFilterPacket)
.relevant_chunk_indices;
if (relevantChunkIndices) {
updateSelectedDocIndices(relevantChunkIndices);
}
return;
}
// Check for quote section
if (Object.hasOwn(chunk, "quotes")) {
quotes = (chunk as QuotesInfoPacket).quotes;
updateQuotes(quotes);
return;
}
// Check for the final chunk
if (Object.hasOwn(chunk, "message_id")) {
const backendChunk = chunk as BackendMessage;
updateComments(backendChunk.comments);
updateMessageAndThreadId(
backendChunk.message_id,
backendChunk.chat_session_id
);
}
});
}
} catch (err) {
console.error("Fetch error:", err);
let errorMessage = "An error occurred while fetching the answer.";
if (err instanceof Error) {
if (err.message.includes("rate_limit_error")) {
errorMessage =
"Rate limit exceeded. Please try again later or reduce the length of your query.";
} else {
errorMessage = err.message;
}
}
updateError(errorMessage);
}
return { answer, quotes, relevantDocuments };
};