mirror of
https://github.com/danswer-ai/danswer.git
synced 2025-06-23 06:21:00 +02:00
Assistant Prompt length + client side (#4433)
This commit is contained in:
parent
8c3a953b7a
commit
93886f0e2c
@ -0,0 +1,50 @@
|
||||
"""update prompt length
|
||||
|
||||
Revision ID: 4794bc13e484
|
||||
Revises: f7505c5b0284
|
||||
Create Date: 2025-04-02 11:26:36.180328
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "4794bc13e484"
|
||||
down_revision = "f7505c5b0284"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.alter_column(
|
||||
"prompt",
|
||||
"system_prompt",
|
||||
existing_type=sa.TEXT(),
|
||||
type_=sa.String(length=5000000),
|
||||
existing_nullable=False,
|
||||
)
|
||||
op.alter_column(
|
||||
"prompt",
|
||||
"task_prompt",
|
||||
existing_type=sa.TEXT(),
|
||||
type_=sa.String(length=5000000),
|
||||
existing_nullable=False,
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.alter_column(
|
||||
"prompt",
|
||||
"system_prompt",
|
||||
existing_type=sa.String(length=5000000),
|
||||
type_=sa.TEXT(),
|
||||
existing_nullable=False,
|
||||
)
|
||||
op.alter_column(
|
||||
"prompt",
|
||||
"task_prompt",
|
||||
existing_type=sa.String(length=5000000),
|
||||
type_=sa.TEXT(),
|
||||
existing_nullable=False,
|
||||
)
|
@ -5,8 +5,6 @@ Revises: 6a804aeb4830
|
||||
Create Date: 2025-04-01 15:07:14.977435
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
@ -17,34 +15,36 @@ depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.alter_column(
|
||||
"prompt",
|
||||
"system_prompt",
|
||||
existing_type=sa.TEXT(),
|
||||
type_=sa.String(length=8000),
|
||||
existing_nullable=False,
|
||||
)
|
||||
op.alter_column(
|
||||
"prompt",
|
||||
"task_prompt",
|
||||
existing_type=sa.TEXT(),
|
||||
type_=sa.String(length=8000),
|
||||
existing_nullable=False,
|
||||
)
|
||||
# op.alter_column(
|
||||
# "prompt",
|
||||
# "system_prompt",
|
||||
# existing_type=sa.TEXT(),
|
||||
# type_=sa.String(length=8000),
|
||||
# existing_nullable=False,
|
||||
# )
|
||||
# op.alter_column(
|
||||
# "prompt",
|
||||
# "task_prompt",
|
||||
# existing_type=sa.TEXT(),
|
||||
# type_=sa.String(length=8000),
|
||||
# existing_nullable=False,
|
||||
# )
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.alter_column(
|
||||
"prompt",
|
||||
"system_prompt",
|
||||
existing_type=sa.String(length=8000),
|
||||
type_=sa.TEXT(),
|
||||
existing_nullable=False,
|
||||
)
|
||||
op.alter_column(
|
||||
"prompt",
|
||||
"task_prompt",
|
||||
existing_type=sa.String(length=8000),
|
||||
type_=sa.TEXT(),
|
||||
existing_nullable=False,
|
||||
)
|
||||
# op.alter_column(
|
||||
# "prompt",
|
||||
# "system_prompt",
|
||||
# existing_type=sa.String(length=8000),
|
||||
# type_=sa.TEXT(),
|
||||
# existing_nullable=False,
|
||||
# )
|
||||
# op.alter_column(
|
||||
# "prompt",
|
||||
# "task_prompt",
|
||||
# existing_type=sa.String(length=8000),
|
||||
# type_=sa.TEXT(),
|
||||
# existing_nullable=False,
|
||||
# )
|
||||
pass
|
||||
|
@ -37,8 +37,8 @@ from onyx.db.models import UserFile
|
||||
from onyx.db.models import UserFolder
|
||||
from onyx.db.models import UserGroup
|
||||
from onyx.db.notification import create_notification
|
||||
from onyx.server.features.persona.models import FullPersonaSnapshot
|
||||
from onyx.server.features.persona.models import PersonaSharedNotificationData
|
||||
from onyx.server.features.persona.models import PersonaSnapshot
|
||||
from onyx.server.features.persona.models import PersonaUpsertRequest
|
||||
from onyx.utils.logger import setup_logger
|
||||
from onyx.utils.variable_functionality import fetch_versioned_implementation
|
||||
@ -201,7 +201,7 @@ def create_update_persona(
|
||||
create_persona_request: PersonaUpsertRequest,
|
||||
user: User | None,
|
||||
db_session: Session,
|
||||
) -> PersonaSnapshot:
|
||||
) -> FullPersonaSnapshot:
|
||||
"""Higher level function than upsert_persona, although either is valid to use."""
|
||||
# Permission to actually use these is checked later
|
||||
|
||||
@ -271,7 +271,7 @@ def create_update_persona(
|
||||
logger.exception("Failed to create persona")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
return PersonaSnapshot.from_model(persona)
|
||||
return FullPersonaSnapshot.from_model(persona)
|
||||
|
||||
|
||||
def update_persona_shared_users(
|
||||
|
@ -43,6 +43,7 @@ from onyx.file_store.models import ChatFileType
|
||||
from onyx.secondary_llm_flows.starter_message_creation import (
|
||||
generate_starter_messages,
|
||||
)
|
||||
from onyx.server.features.persona.models import FullPersonaSnapshot
|
||||
from onyx.server.features.persona.models import GenerateStarterMessageRequest
|
||||
from onyx.server.features.persona.models import ImageGenerationToolStatus
|
||||
from onyx.server.features.persona.models import PersonaLabelCreate
|
||||
@ -424,8 +425,8 @@ def get_persona(
|
||||
persona_id: int,
|
||||
user: User | None = Depends(current_limited_user),
|
||||
db_session: Session = Depends(get_session),
|
||||
) -> PersonaSnapshot:
|
||||
return PersonaSnapshot.from_model(
|
||||
) -> FullPersonaSnapshot:
|
||||
return FullPersonaSnapshot.from_model(
|
||||
get_persona_by_id(
|
||||
persona_id=persona_id,
|
||||
user=user,
|
||||
|
@ -91,37 +91,80 @@ class PersonaUpsertRequest(BaseModel):
|
||||
|
||||
class PersonaSnapshot(BaseModel):
|
||||
id: int
|
||||
owner: MinimalUserSnapshot | None
|
||||
name: str
|
||||
is_visible: bool
|
||||
is_public: bool
|
||||
display_priority: int | None
|
||||
description: str
|
||||
num_chunks: float | None
|
||||
llm_relevance_filter: bool
|
||||
llm_filter_extraction: bool
|
||||
llm_model_provider_override: str | None
|
||||
llm_model_version_override: str | None
|
||||
starter_messages: list[StarterMessage] | None
|
||||
builtin_persona: bool
|
||||
prompts: list[PromptSnapshot]
|
||||
tools: list[ToolSnapshot]
|
||||
document_sets: list[DocumentSet]
|
||||
users: list[MinimalUserSnapshot]
|
||||
groups: list[int]
|
||||
icon_color: str | None
|
||||
icon_shape: int | None
|
||||
is_public: bool
|
||||
is_visible: bool
|
||||
icon_shape: int | None = None
|
||||
icon_color: str | None = None
|
||||
uploaded_image_id: str | None = None
|
||||
is_default_persona: bool
|
||||
user_file_ids: list[int] = Field(default_factory=list)
|
||||
user_folder_ids: list[int] = Field(default_factory=list)
|
||||
display_priority: int | None = None
|
||||
is_default_persona: bool = False
|
||||
builtin_persona: bool = False
|
||||
starter_messages: list[StarterMessage] | None = None
|
||||
tools: list[ToolSnapshot] = Field(default_factory=list)
|
||||
labels: list["PersonaLabelSnapshot"] = Field(default_factory=list)
|
||||
owner: MinimalUserSnapshot | None = None
|
||||
users: list[MinimalUserSnapshot] = Field(default_factory=list)
|
||||
groups: list[int] = Field(default_factory=list)
|
||||
document_sets: list[DocumentSet] = Field(default_factory=list)
|
||||
llm_model_provider_override: str | None = None
|
||||
llm_model_version_override: str | None = None
|
||||
num_chunks: float | None = None
|
||||
|
||||
@classmethod
|
||||
def from_model(cls, persona: Persona) -> "PersonaSnapshot":
|
||||
return PersonaSnapshot(
|
||||
id=persona.id,
|
||||
name=persona.name,
|
||||
description=persona.description,
|
||||
is_public=persona.is_public,
|
||||
is_visible=persona.is_visible,
|
||||
icon_shape=persona.icon_shape,
|
||||
icon_color=persona.icon_color,
|
||||
uploaded_image_id=persona.uploaded_image_id,
|
||||
user_file_ids=[file.id for file in persona.user_files],
|
||||
user_folder_ids=[folder.id for folder in persona.user_folders],
|
||||
display_priority=persona.display_priority,
|
||||
is_default_persona=persona.is_default_persona,
|
||||
builtin_persona=persona.builtin_persona,
|
||||
starter_messages=persona.starter_messages,
|
||||
tools=[ToolSnapshot.from_model(tool) for tool in persona.tools],
|
||||
labels=[PersonaLabelSnapshot.from_model(label) for label in persona.labels],
|
||||
owner=(
|
||||
MinimalUserSnapshot(id=persona.user.id, email=persona.user.email)
|
||||
if persona.user
|
||||
else None
|
||||
),
|
||||
users=[
|
||||
MinimalUserSnapshot(id=user.id, email=user.email)
|
||||
for user in persona.users
|
||||
],
|
||||
groups=[user_group.id for user_group in persona.groups],
|
||||
document_sets=[
|
||||
DocumentSet.from_model(document_set_model)
|
||||
for document_set_model in persona.document_sets
|
||||
],
|
||||
llm_model_provider_override=persona.llm_model_provider_override,
|
||||
llm_model_version_override=persona.llm_model_version_override,
|
||||
num_chunks=persona.num_chunks,
|
||||
)
|
||||
|
||||
|
||||
# Model with full context on perona's internal settings
|
||||
# This is used for flows which need to know all settings
|
||||
class FullPersonaSnapshot(PersonaSnapshot):
|
||||
search_start_date: datetime | None = None
|
||||
labels: list["PersonaLabelSnapshot"] = []
|
||||
user_file_ids: list[int] | None = None
|
||||
user_folder_ids: list[int] | None = None
|
||||
prompts: list[PromptSnapshot] = Field(default_factory=list)
|
||||
llm_relevance_filter: bool = False
|
||||
llm_filter_extraction: bool = False
|
||||
|
||||
@classmethod
|
||||
def from_model(
|
||||
cls, persona: Persona, allow_deleted: bool = False
|
||||
) -> "PersonaSnapshot":
|
||||
) -> "FullPersonaSnapshot":
|
||||
if persona.deleted:
|
||||
error_msg = f"Persona with ID {persona.id} has been deleted"
|
||||
if not allow_deleted:
|
||||
@ -129,44 +172,32 @@ class PersonaSnapshot(BaseModel):
|
||||
else:
|
||||
logger.warning(error_msg)
|
||||
|
||||
return PersonaSnapshot(
|
||||
return FullPersonaSnapshot(
|
||||
id=persona.id,
|
||||
name=persona.name,
|
||||
description=persona.description,
|
||||
is_public=persona.is_public,
|
||||
is_visible=persona.is_visible,
|
||||
icon_shape=persona.icon_shape,
|
||||
icon_color=persona.icon_color,
|
||||
uploaded_image_id=persona.uploaded_image_id,
|
||||
user_file_ids=[file.id for file in persona.user_files],
|
||||
user_folder_ids=[folder.id for folder in persona.user_folders],
|
||||
display_priority=persona.display_priority,
|
||||
is_default_persona=persona.is_default_persona,
|
||||
builtin_persona=persona.builtin_persona,
|
||||
starter_messages=persona.starter_messages,
|
||||
tools=[ToolSnapshot.from_model(tool) for tool in persona.tools],
|
||||
labels=[PersonaLabelSnapshot.from_model(label) for label in persona.labels],
|
||||
owner=(
|
||||
MinimalUserSnapshot(id=persona.user.id, email=persona.user.email)
|
||||
if persona.user
|
||||
else None
|
||||
),
|
||||
is_visible=persona.is_visible,
|
||||
is_public=persona.is_public,
|
||||
display_priority=persona.display_priority,
|
||||
description=persona.description,
|
||||
num_chunks=persona.num_chunks,
|
||||
search_start_date=persona.search_start_date,
|
||||
prompts=[PromptSnapshot.from_model(prompt) for prompt in persona.prompts],
|
||||
llm_relevance_filter=persona.llm_relevance_filter,
|
||||
llm_filter_extraction=persona.llm_filter_extraction,
|
||||
llm_model_provider_override=persona.llm_model_provider_override,
|
||||
llm_model_version_override=persona.llm_model_version_override,
|
||||
starter_messages=persona.starter_messages,
|
||||
builtin_persona=persona.builtin_persona,
|
||||
is_default_persona=persona.is_default_persona,
|
||||
prompts=[PromptSnapshot.from_model(prompt) for prompt in persona.prompts],
|
||||
tools=[ToolSnapshot.from_model(tool) for tool in persona.tools],
|
||||
document_sets=[
|
||||
DocumentSet.from_model(document_set_model)
|
||||
for document_set_model in persona.document_sets
|
||||
],
|
||||
users=[
|
||||
MinimalUserSnapshot(id=user.id, email=user.email)
|
||||
for user in persona.users
|
||||
],
|
||||
groups=[user_group.id for user_group in persona.groups],
|
||||
icon_color=persona.icon_color,
|
||||
icon_shape=persona.icon_shape,
|
||||
uploaded_image_id=persona.uploaded_image_id,
|
||||
search_start_date=persona.search_start_date,
|
||||
labels=[PersonaLabelSnapshot.from_model(label) for label in persona.labels],
|
||||
user_file_ids=[file.id for file in persona.user_files],
|
||||
user_folder_ids=[folder.id for folder in persona.user_folders],
|
||||
)
|
||||
|
||||
|
||||
|
@ -19,6 +19,7 @@ from onyx.db.models import SlackBot as SlackAppModel
|
||||
from onyx.db.models import SlackChannelConfig as SlackChannelConfigModel
|
||||
from onyx.db.models import User
|
||||
from onyx.onyxbot.slack.config import VALID_SLACK_FILTERS
|
||||
from onyx.server.features.persona.models import FullPersonaSnapshot
|
||||
from onyx.server.features.persona.models import PersonaSnapshot
|
||||
from onyx.server.models import FullUserSnapshot
|
||||
from onyx.server.models import InvitedUserSnapshot
|
||||
@ -245,7 +246,7 @@ class SlackChannelConfig(BaseModel):
|
||||
id=slack_channel_config_model.id,
|
||||
slack_bot_id=slack_channel_config_model.slack_bot_id,
|
||||
persona=(
|
||||
PersonaSnapshot.from_model(
|
||||
FullPersonaSnapshot.from_model(
|
||||
slack_channel_config_model.persona, allow_deleted=True
|
||||
)
|
||||
if slack_channel_config_model.persona
|
||||
|
@ -4,7 +4,7 @@ from uuid import uuid4
|
||||
import requests
|
||||
|
||||
from onyx.context.search.enums import RecencyBiasSetting
|
||||
from onyx.server.features.persona.models import PersonaSnapshot
|
||||
from onyx.server.features.persona.models import FullPersonaSnapshot
|
||||
from onyx.server.features.persona.models import PersonaUpsertRequest
|
||||
from tests.integration.common_utils.constants import API_SERVER_URL
|
||||
from tests.integration.common_utils.constants import GENERAL_HEADERS
|
||||
@ -181,7 +181,7 @@ class PersonaManager:
|
||||
@staticmethod
|
||||
def get_all(
|
||||
user_performing_action: DATestUser | None = None,
|
||||
) -> list[PersonaSnapshot]:
|
||||
) -> list[FullPersonaSnapshot]:
|
||||
response = requests.get(
|
||||
f"{API_SERVER_URL}/admin/persona",
|
||||
headers=user_performing_action.headers
|
||||
@ -189,13 +189,13 @@ class PersonaManager:
|
||||
else GENERAL_HEADERS,
|
||||
)
|
||||
response.raise_for_status()
|
||||
return [PersonaSnapshot(**persona) for persona in response.json()]
|
||||
return [FullPersonaSnapshot(**persona) for persona in response.json()]
|
||||
|
||||
@staticmethod
|
||||
def get_one(
|
||||
persona_id: int,
|
||||
user_performing_action: DATestUser | None = None,
|
||||
) -> list[PersonaSnapshot]:
|
||||
) -> list[FullPersonaSnapshot]:
|
||||
response = requests.get(
|
||||
f"{API_SERVER_URL}/persona/{persona_id}",
|
||||
headers=user_performing_action.headers
|
||||
@ -203,7 +203,7 @@ class PersonaManager:
|
||||
else GENERAL_HEADERS,
|
||||
)
|
||||
response.raise_for_status()
|
||||
return [PersonaSnapshot(**response.json())]
|
||||
return [FullPersonaSnapshot(**response.json())]
|
||||
|
||||
@staticmethod
|
||||
def verify(
|
||||
|
@ -42,9 +42,7 @@ import Link from "next/link";
|
||||
import { useRouter, useSearchParams } from "next/navigation";
|
||||
import { useEffect, useMemo, useState } from "react";
|
||||
import * as Yup from "yup";
|
||||
import CollapsibleSection from "./CollapsibleSection";
|
||||
import { SuccessfulPersonaUpdateRedirectType } from "./enums";
|
||||
import { Persona, PersonaLabel, StarterMessage } from "./interfaces";
|
||||
import { FullPersona, PersonaLabel, StarterMessage } from "./interfaces";
|
||||
import {
|
||||
PersonaUpsertParameters,
|
||||
createPersona,
|
||||
@ -101,6 +99,7 @@ import { SEARCH_TOOL_ID } from "@/app/chat/tools/constants";
|
||||
import TextView from "@/components/chat/TextView";
|
||||
import { MinimalOnyxDocument } from "@/lib/search/interfaces";
|
||||
import { TabToggle } from "@/components/ui/TabToggle";
|
||||
import { MAX_CHARACTERS_PERSONA_DESCRIPTION } from "@/lib/constants";
|
||||
|
||||
function findSearchTool(tools: ToolSnapshot[]) {
|
||||
return tools.find((tool) => tool.in_code_tool_id === SEARCH_TOOL_ID);
|
||||
@ -136,7 +135,7 @@ export function AssistantEditor({
|
||||
shouldAddAssistantToUserPreferences,
|
||||
admin,
|
||||
}: {
|
||||
existingPersona?: Persona | null;
|
||||
existingPersona?: FullPersona | null;
|
||||
ccPairs: CCPairBasicInfo[];
|
||||
documentSets: DocumentSet[];
|
||||
user: User | null;
|
||||
@ -184,8 +183,6 @@ export function AssistantEditor({
|
||||
}
|
||||
}, [defaultIconShape]);
|
||||
|
||||
const [isIconDropdownOpen, setIsIconDropdownOpen] = useState(false);
|
||||
|
||||
const [removePersonaImage, setRemovePersonaImage] = useState(false);
|
||||
|
||||
const autoStarterMessageEnabled = useMemo(
|
||||
@ -462,12 +459,12 @@ export function AssistantEditor({
|
||||
"Must provide a description for the Assistant"
|
||||
),
|
||||
system_prompt: Yup.string().max(
|
||||
8000,
|
||||
"Instructions must be less than 8000 characters"
|
||||
MAX_CHARACTERS_PERSONA_DESCRIPTION,
|
||||
"Instructions must be less than 5000000 characters"
|
||||
),
|
||||
task_prompt: Yup.string().max(
|
||||
8000,
|
||||
"Reminders must be less than 8000 characters"
|
||||
MAX_CHARACTERS_PERSONA_DESCRIPTION,
|
||||
"Reminders must be less than 5000000 characters"
|
||||
),
|
||||
is_public: Yup.boolean().required(),
|
||||
document_set_ids: Yup.array().of(Yup.number()),
|
||||
|
@ -18,35 +18,37 @@ export interface Prompt {
|
||||
datetime_aware: boolean;
|
||||
default_prompt: boolean;
|
||||
}
|
||||
|
||||
export interface Persona {
|
||||
id: number;
|
||||
name: string;
|
||||
search_start_date: Date | null;
|
||||
owner: MinimalUserSnapshot | null;
|
||||
is_visible: boolean;
|
||||
is_public: boolean;
|
||||
display_priority: number | null;
|
||||
description: string;
|
||||
document_sets: DocumentSet[];
|
||||
prompts: Prompt[];
|
||||
tools: ToolSnapshot[];
|
||||
num_chunks?: number;
|
||||
llm_relevance_filter?: boolean;
|
||||
llm_filter_extraction?: boolean;
|
||||
llm_model_provider_override?: string;
|
||||
llm_model_version_override?: string;
|
||||
starter_messages: StarterMessage[] | null;
|
||||
builtin_persona: boolean;
|
||||
is_default_persona: boolean;
|
||||
users: MinimalUserSnapshot[];
|
||||
groups: number[];
|
||||
is_public: boolean;
|
||||
is_visible: boolean;
|
||||
icon_shape?: number;
|
||||
icon_color?: string;
|
||||
uploaded_image_id?: string;
|
||||
labels?: PersonaLabel[];
|
||||
user_file_ids: number[];
|
||||
user_folder_ids: number[];
|
||||
display_priority: number | null;
|
||||
is_default_persona: boolean;
|
||||
builtin_persona: boolean;
|
||||
starter_messages: StarterMessage[] | null;
|
||||
tools: ToolSnapshot[];
|
||||
labels?: PersonaLabel[];
|
||||
owner: MinimalUserSnapshot | null;
|
||||
users: MinimalUserSnapshot[];
|
||||
groups: number[];
|
||||
document_sets: DocumentSet[];
|
||||
llm_model_provider_override?: string;
|
||||
llm_model_version_override?: string;
|
||||
num_chunks?: number;
|
||||
}
|
||||
|
||||
export interface FullPersona extends Persona {
|
||||
search_start_date: Date | null;
|
||||
prompts: Prompt[];
|
||||
llm_relevance_filter?: boolean;
|
||||
llm_filter_extraction?: boolean;
|
||||
}
|
||||
|
||||
export interface PersonaLabel {
|
||||
|
@ -331,28 +331,3 @@ export function providersContainImageGeneratingSupport(
|
||||
) {
|
||||
return providers.some((provider) => provider.provider === "openai");
|
||||
}
|
||||
|
||||
// Default fallback persona for when we must display a persona
|
||||
// but assistant has access to none
|
||||
export const defaultPersona: Persona = {
|
||||
id: 0,
|
||||
name: "Default Assistant",
|
||||
description: "A default assistant",
|
||||
is_visible: true,
|
||||
is_public: true,
|
||||
builtin_persona: false,
|
||||
is_default_persona: true,
|
||||
users: [],
|
||||
groups: [],
|
||||
document_sets: [],
|
||||
prompts: [],
|
||||
tools: [],
|
||||
starter_messages: null,
|
||||
display_priority: null,
|
||||
search_start_date: null,
|
||||
owner: null,
|
||||
icon_shape: 50910,
|
||||
icon_color: "#FF6F6F",
|
||||
user_file_ids: [],
|
||||
user_folder_ids: [],
|
||||
};
|
||||
|
@ -1383,7 +1383,7 @@ export function ChatPage({
|
||||
regenerationRequest?.parentMessage.messageId ||
|
||||
lastSuccessfulMessageId,
|
||||
chatSessionId: currChatSessionId,
|
||||
promptId: liveAssistant?.prompts[0]?.id || 0,
|
||||
promptId: null,
|
||||
filters: buildFilters(
|
||||
filterManager.selectedSources,
|
||||
filterManager.selectedDocumentSets,
|
||||
|
@ -9,11 +9,6 @@ import { redirect } from "next/navigation";
|
||||
import { BackendChatSession } from "../../interfaces";
|
||||
import { SharedChatDisplay } from "./SharedChatDisplay";
|
||||
import { Persona } from "@/app/admin/assistants/interfaces";
|
||||
import {
|
||||
FetchAssistantsResponse,
|
||||
fetchAssistantsSS,
|
||||
} from "@/lib/assistants/fetchAssistantsSS";
|
||||
import { defaultPersona } from "@/app/admin/assistants/lib";
|
||||
import { constructMiniFiedPersona } from "@/lib/assistantIconUtils";
|
||||
|
||||
async function getSharedChat(chatId: string) {
|
||||
|
@ -167,9 +167,7 @@ export const constructMiniFiedPersona = (
|
||||
display_priority: 0,
|
||||
description: "",
|
||||
document_sets: [],
|
||||
prompts: [],
|
||||
tools: [],
|
||||
search_start_date: null,
|
||||
owner: null,
|
||||
starter_messages: null,
|
||||
builtin_persona: false,
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { Persona } from "@/app/admin/assistants/interfaces";
|
||||
import { FullPersona, Persona } from "@/app/admin/assistants/interfaces";
|
||||
import { CCPairBasicInfo, DocumentSet, User } from "../types";
|
||||
import { getCurrentUserSS } from "../userSS";
|
||||
import { fetchSS } from "../utilsSS";
|
||||
@ -18,7 +18,7 @@ export async function fetchAssistantEditorInfoSS(
|
||||
documentSets: DocumentSet[];
|
||||
llmProviders: LLMProviderView[];
|
||||
user: User | null;
|
||||
existingPersona: Persona | null;
|
||||
existingPersona: FullPersona | null;
|
||||
tools: ToolSnapshot[];
|
||||
},
|
||||
null,
|
||||
@ -94,7 +94,7 @@ export async function fetchAssistantEditorInfoSS(
|
||||
}
|
||||
|
||||
const existingPersona = personaResponse
|
||||
? ((await personaResponse.json()) as Persona)
|
||||
? ((await personaResponse.json()) as FullPersona)
|
||||
: null;
|
||||
|
||||
let error: string | null = null;
|
||||
|
@ -105,3 +105,5 @@ export const ALLOWED_URL_PROTOCOLS = [
|
||||
"spotify:",
|
||||
"zoommtg:",
|
||||
];
|
||||
|
||||
export const MAX_CHARACTERS_PERSONA_DESCRIPTION = 5000000;
|
||||
|
@ -1,201 +0,0 @@
|
||||
import {
|
||||
BackendMessage,
|
||||
LLMRelevanceFilterPacket,
|
||||
} from "@/app/chat/interfaces";
|
||||
import {
|
||||
AnswerPiecePacket,
|
||||
OnyxDocument,
|
||||
ErrorMessagePacket,
|
||||
DocumentInfoPacket,
|
||||
Quote,
|
||||
QuotesInfoPacket,
|
||||
RelevanceChunk,
|
||||
SearchRequestArgs,
|
||||
} from "./interfaces";
|
||||
import { processRawChunkString } from "./streamingUtils";
|
||||
import { buildFilters, endsWithLetterOrNumber } from "./utils";
|
||||
|
||||
export const searchRequestStreamed = async ({
|
||||
query,
|
||||
sources,
|
||||
documentSets,
|
||||
timeRange,
|
||||
tags,
|
||||
persona,
|
||||
agentic,
|
||||
updateCurrentAnswer,
|
||||
updateQuotes,
|
||||
updateDocs,
|
||||
updateSuggestedSearchType,
|
||||
updateSuggestedFlowType,
|
||||
updateSelectedDocIndices,
|
||||
updateError,
|
||||
updateMessageAndThreadId,
|
||||
finishedSearching,
|
||||
updateDocumentRelevance,
|
||||
updateComments,
|
||||
}: SearchRequestArgs) => {
|
||||
let answer = "";
|
||||
let quotes: Quote[] | null = null;
|
||||
let relevantDocuments: OnyxDocument[] | null = null;
|
||||
|
||||
try {
|
||||
const filters = buildFilters(sources, documentSets, timeRange, tags);
|
||||
|
||||
const threadMessage = {
|
||||
message: query,
|
||||
sender: null,
|
||||
role: "user",
|
||||
};
|
||||
|
||||
const response = await fetch("/api/query/stream-answer-with-quote", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
messages: [threadMessage],
|
||||
persona_id: persona.id,
|
||||
agentic,
|
||||
prompt_id: persona.id === 0 ? null : persona.prompts[0]?.id,
|
||||
retrieval_options: {
|
||||
run_search: "always",
|
||||
real_time: true,
|
||||
filters: filters,
|
||||
enable_auto_detect_filters: false,
|
||||
},
|
||||
evaluation_type: agentic ? "agentic" : "basic",
|
||||
}),
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
});
|
||||
|
||||
const reader = response.body?.getReader();
|
||||
const decoder = new TextDecoder("utf-8");
|
||||
|
||||
let previousPartialChunk: string | null = null;
|
||||
while (true) {
|
||||
const rawChunk = await reader?.read();
|
||||
|
||||
if (!rawChunk) {
|
||||
throw new Error("Unable to process chunk");
|
||||
}
|
||||
const { done, value } = rawChunk;
|
||||
if (done) {
|
||||
break;
|
||||
}
|
||||
|
||||
// Process each chunk as it arrives
|
||||
const [completedChunks, partialChunk] = processRawChunkString<
|
||||
| AnswerPiecePacket
|
||||
| ErrorMessagePacket
|
||||
| QuotesInfoPacket
|
||||
| DocumentInfoPacket
|
||||
| LLMRelevanceFilterPacket
|
||||
| BackendMessage
|
||||
| DocumentInfoPacket
|
||||
| RelevanceChunk
|
||||
>(decoder.decode(value, { stream: true }), previousPartialChunk);
|
||||
if (!completedChunks.length && !partialChunk) {
|
||||
break;
|
||||
}
|
||||
previousPartialChunk = partialChunk as string | null;
|
||||
completedChunks.forEach((chunk) => {
|
||||
// check for answer piece / end of answer
|
||||
|
||||
if (Object.hasOwn(chunk, "relevance_summaries")) {
|
||||
const relevanceChunk = chunk as RelevanceChunk;
|
||||
updateDocumentRelevance(relevanceChunk.relevance_summaries);
|
||||
}
|
||||
|
||||
if (Object.hasOwn(chunk, "answer_piece")) {
|
||||
const answerPiece = (chunk as AnswerPiecePacket).answer_piece;
|
||||
if (answerPiece !== null) {
|
||||
answer += (chunk as AnswerPiecePacket).answer_piece;
|
||||
updateCurrentAnswer(answer);
|
||||
} else {
|
||||
// set quotes as non-null to signify that the answer is finished and
|
||||
// we're now looking for quotes
|
||||
updateQuotes([]);
|
||||
if (
|
||||
answer &&
|
||||
!answer.endsWith(".") &&
|
||||
!answer.endsWith("?") &&
|
||||
!answer.endsWith("!") &&
|
||||
endsWithLetterOrNumber(answer)
|
||||
) {
|
||||
answer += ".";
|
||||
updateCurrentAnswer(answer);
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (Object.hasOwn(chunk, "error")) {
|
||||
updateError((chunk as ErrorMessagePacket).error);
|
||||
return;
|
||||
}
|
||||
|
||||
// These all come together
|
||||
if (Object.hasOwn(chunk, "top_documents")) {
|
||||
chunk = chunk as DocumentInfoPacket;
|
||||
const topDocuments = chunk.top_documents as OnyxDocument[] | null;
|
||||
if (topDocuments) {
|
||||
relevantDocuments = topDocuments;
|
||||
updateDocs(relevantDocuments);
|
||||
}
|
||||
|
||||
if (chunk.predicted_flow) {
|
||||
updateSuggestedFlowType(chunk.predicted_flow);
|
||||
}
|
||||
|
||||
if (chunk.predicted_search) {
|
||||
updateSuggestedSearchType(chunk.predicted_search);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (Object.hasOwn(chunk, "relevant_chunk_indices")) {
|
||||
const relevantChunkIndices = (chunk as LLMRelevanceFilterPacket)
|
||||
.relevant_chunk_indices;
|
||||
if (relevantChunkIndices) {
|
||||
updateSelectedDocIndices(relevantChunkIndices);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for quote section
|
||||
if (Object.hasOwn(chunk, "quotes")) {
|
||||
quotes = (chunk as QuotesInfoPacket).quotes;
|
||||
updateQuotes(quotes);
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for the final chunk
|
||||
if (Object.hasOwn(chunk, "message_id")) {
|
||||
const backendChunk = chunk as BackendMessage;
|
||||
updateComments(backendChunk.comments);
|
||||
updateMessageAndThreadId(
|
||||
backendChunk.message_id,
|
||||
backendChunk.chat_session_id
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Fetch error:", err);
|
||||
let errorMessage = "An error occurred while fetching the answer.";
|
||||
|
||||
if (err instanceof Error) {
|
||||
if (err.message.includes("rate_limit_error")) {
|
||||
errorMessage =
|
||||
"Rate limit exceeded. Please try again later or reduce the length of your query.";
|
||||
} else {
|
||||
errorMessage = err.message;
|
||||
}
|
||||
}
|
||||
|
||||
updateError(errorMessage);
|
||||
}
|
||||
|
||||
return { answer, quotes, relevantDocuments };
|
||||
};
|
Loading…
x
Reference in New Issue
Block a user