From f137fc78a67da2dbe3a4b029fd5443a124bec7d3 Mon Sep 17 00:00:00 2001 From: pablodanswer Date: Wed, 5 Feb 2025 21:12:55 -0800 Subject: [PATCH] various UX improvements --- backend/ee/onyx/db/persona.py | 11 + .../onyx/server/middleware/tenant_tracking.py | 5 +- .../nodes/decompose_orig_question.py | 7 +- .../nodes/create_refined_sub_questions.py | 7 +- .../deep_search/main/operations.py | 19 ++ .../agent_search/shared_graph_utils/utils.py | 3 + backend/onyx/db/persona.py | 12 + .../server/query_and_chat/chat_backend.py | 17 +- backend/tests/integration/Dockerfile | 1 + .../app/admin/configuration/llm/interfaces.ts | 50 +++-- web/src/app/admin/settings/SettingsForm.tsx | 2 +- web/src/app/admin/settings/interfaces.ts | 2 +- web/src/app/chat/ChatPage.tsx | 82 ++++--- web/src/app/chat/input/ChatInputBar.tsx | 18 +- web/src/app/chat/input/LLMPopover.tsx | 33 +-- web/src/app/chat/interfaces.ts | 4 +- web/src/app/chat/message/Messages.tsx | 6 +- web/src/app/chat/message/SourcesDisplay.tsx | 9 +- web/src/app/chat/message/StreamingMessages.ts | 36 ++- .../app/chat/message/SubQuestionsDisplay.tsx | 210 +++++++++--------- web/src/components/SearchResultIcon.tsx | 4 + web/src/components/UserDropdown.tsx | 2 +- web/src/components/chat/Notifications.tsx | 7 +- .../components/chat/sources/SourceCard.tsx | 137 ++++++++---- .../search/filtering/FilterPopup.tsx | 2 +- web/src/components/settings/lib.ts | 5 +- web/src/components/table/DragHandle.tsx | 19 +- web/src/components/table/DraggableRow.tsx | 24 +- web/src/components/table/DraggableTable.tsx | 50 +++-- 29 files changed, 467 insertions(+), 317 deletions(-) diff --git a/backend/ee/onyx/db/persona.py b/backend/ee/onyx/db/persona.py index 910877fd0..b5773588b 100644 --- a/backend/ee/onyx/db/persona.py +++ b/backend/ee/onyx/db/persona.py @@ -2,8 +2,11 @@ from uuid import UUID from sqlalchemy.orm import Session +from onyx.configs.constants import NotificationType from onyx.db.models import Persona__User from onyx.db.models import Persona__UserGroup +from onyx.db.notification import create_notification +from onyx.server.features.persona.models import PersonaSharedNotificationData def make_persona_private( @@ -23,6 +26,14 @@ def make_persona_private( for user_uuid in user_ids: db_session.add(Persona__User(persona_id=persona_id, user_id=user_uuid)) + create_notification( + user_id=user_uuid, + notif_type=NotificationType.PERSONA_SHARED, + db_session=db_session, + additional_data=PersonaSharedNotificationData( + persona_id=persona_id, + ).model_dump(), + ) if group_ids: for group_id in group_ids: db_session.add( diff --git a/backend/ee/onyx/server/middleware/tenant_tracking.py b/backend/ee/onyx/server/middleware/tenant_tracking.py index 3fdcfda58..12a1c4c34 100644 --- a/backend/ee/onyx/server/middleware/tenant_tracking.py +++ b/backend/ee/onyx/server/middleware/tenant_tracking.py @@ -87,13 +87,14 @@ async def _get_tenant_id_from_request( if not is_valid_schema_name(tenant_id): raise HTTPException(status_code=400, detail="Invalid tenant ID format") - return tenant_id - except Exception as e: logger.error(f"Unexpected error in _get_tenant_id_from_request: {str(e)}") raise HTTPException(status_code=500, detail="Internal server error") finally: + if tenant_id: + return tenant_id + # As a final step, check for explicit tenant_id cookie tenant_id_cookie = request.cookies.get(TENANT_ID_COOKIE_NAME) if tenant_id_cookie and is_valid_schema_name(tenant_id_cookie): diff --git a/backend/onyx/agents/agent_search/deep_search/initial/generate_sub_answers/nodes/decompose_orig_question.py b/backend/onyx/agents/agent_search/deep_search/initial/generate_sub_answers/nodes/decompose_orig_question.py index 7c6bc8cde..5489a8683 100644 --- a/backend/onyx/agents/agent_search/deep_search/initial/generate_sub_answers/nodes/decompose_orig_question.py +++ b/backend/onyx/agents/agent_search/deep_search/initial/generate_sub_answers/nodes/decompose_orig_question.py @@ -12,8 +12,9 @@ from onyx.agents.agent_search.deep_search.initial.generate_initial_answer.states from onyx.agents.agent_search.deep_search.main.models import ( AgentRefinedMetrics, ) +from onyx.agents.agent_search.deep_search.main.operations import dispatch_subquestion from onyx.agents.agent_search.deep_search.main.operations import ( - dispatch_subquestion, + dispatch_subquestion_sep, ) from onyx.agents.agent_search.deep_search.main.states import ( InitialQuestionDecompositionUpdate, @@ -111,7 +112,9 @@ def decompose_orig_question( ) # dispatches custom events for subquestion tokens, adding in subquestion ids. streamed_tokens = dispatch_separated( - model.stream(msg), dispatch_subquestion(0, writer) + model.stream(msg), + dispatch_subquestion(0, writer), + sep_callback=dispatch_subquestion_sep(0, writer), ) stop_event = StreamStopInfo( diff --git a/backend/onyx/agents/agent_search/deep_search/main/nodes/create_refined_sub_questions.py b/backend/onyx/agents/agent_search/deep_search/main/nodes/create_refined_sub_questions.py index 83c741542..cd7f774bf 100644 --- a/backend/onyx/agents/agent_search/deep_search/main/nodes/create_refined_sub_questions.py +++ b/backend/onyx/agents/agent_search/deep_search/main/nodes/create_refined_sub_questions.py @@ -9,8 +9,9 @@ from langgraph.types import StreamWriter from onyx.agents.agent_search.deep_search.main.models import ( RefinementSubQuestion, ) +from onyx.agents.agent_search.deep_search.main.operations import dispatch_subquestion from onyx.agents.agent_search.deep_search.main.operations import ( - dispatch_subquestion, + dispatch_subquestion_sep, ) from onyx.agents.agent_search.deep_search.main.states import MainState from onyx.agents.agent_search.deep_search.main.states import ( @@ -96,7 +97,9 @@ def create_refined_sub_questions( model = graph_config.tooling.fast_llm streamed_tokens = dispatch_separated( - model.stream(msg), dispatch_subquestion(1, writer) + model.stream(msg), + dispatch_subquestion(1, writer), + sep_callback=dispatch_subquestion_sep(1, writer), ) response = merge_content(*streamed_tokens) diff --git a/backend/onyx/agents/agent_search/deep_search/main/operations.py b/backend/onyx/agents/agent_search/deep_search/main/operations.py index bd7f40910..152581e10 100644 --- a/backend/onyx/agents/agent_search/deep_search/main/operations.py +++ b/backend/onyx/agents/agent_search/deep_search/main/operations.py @@ -9,6 +9,9 @@ from onyx.agents.agent_search.shared_graph_utils.models import ( SubQuestionAnswerResults, ) from onyx.agents.agent_search.shared_graph_utils.utils import write_custom_event +from onyx.chat.models import StreamStopInfo +from onyx.chat.models import StreamStopReason +from onyx.chat.models import StreamType from onyx.chat.models import SubQuestionPiece from onyx.context.search.models import IndexFilters from onyx.tools.models import SearchQueryInfo @@ -34,6 +37,22 @@ def dispatch_subquestion( return _helper +def dispatch_subquestion_sep(level: int, writer: StreamWriter) -> Callable[[int], None]: + def _helper(sep_num: int) -> None: + write_custom_event( + "stream_finished", + StreamStopInfo( + stop_reason=StreamStopReason.FINISHED, + stream_type=StreamType.SUB_QUESTIONS, + level=level, + level_question_num=sep_num, + ), + writer, + ) + + return _helper + + def calculate_initial_agent_stats( decomp_answer_results: list[SubQuestionAnswerResults], original_question_stats: AgentChunkRetrievalStats, diff --git a/backend/onyx/agents/agent_search/shared_graph_utils/utils.py b/backend/onyx/agents/agent_search/shared_graph_utils/utils.py index 72aea783c..56aea3e29 100644 --- a/backend/onyx/agents/agent_search/shared_graph_utils/utils.py +++ b/backend/onyx/agents/agent_search/shared_graph_utils/utils.py @@ -295,6 +295,7 @@ def _dispatch_nonempty( def dispatch_separated( tokens: Iterator[BaseMessage], dispatch_event: Callable[[str, int], None], + sep_callback: Callable[[int], None] | None = None, sep: str = DISPATCH_SEP_CHAR, ) -> list[BaseMessage_Content]: num = 1 @@ -304,6 +305,8 @@ def dispatch_separated( if sep in content: sub_question_parts = content.split(sep) _dispatch_nonempty(sub_question_parts[0], dispatch_event, num) + if sep_callback: + sep_callback(num) num += 1 _dispatch_nonempty( "".join(sub_question_parts[1:]).strip(), dispatch_event, num diff --git a/backend/onyx/db/persona.py b/backend/onyx/db/persona.py index 638ee74a6..40667e02d 100644 --- a/backend/onyx/db/persona.py +++ b/backend/onyx/db/persona.py @@ -20,6 +20,7 @@ from onyx.configs.app_configs import DISABLE_AUTH from onyx.configs.chat_configs import BING_API_KEY from onyx.configs.chat_configs import CONTEXT_CHUNKS_ABOVE from onyx.configs.chat_configs import CONTEXT_CHUNKS_BELOW +from onyx.configs.constants import NotificationType from onyx.context.search.enums import RecencyBiasSetting from onyx.db.constants import SLACK_BOT_PERSONA_PREFIX from onyx.db.models import DocumentSet @@ -33,6 +34,8 @@ from onyx.db.models import Tool from onyx.db.models import User from onyx.db.models import User__UserGroup from onyx.db.models import UserGroup +from onyx.db.notification import create_notification +from onyx.server.features.persona.models import PersonaSharedNotificationData from onyx.server.features.persona.models import PersonaSnapshot from onyx.server.features.persona.models import PersonaUpsertRequest from onyx.utils.logger import setup_logger @@ -170,6 +173,15 @@ def make_persona_private( for user_uuid in user_ids: db_session.add(Persona__User(persona_id=persona_id, user_id=user_uuid)) + create_notification( + user_id=user_uuid, + notif_type=NotificationType.PERSONA_SHARED, + db_session=db_session, + additional_data=PersonaSharedNotificationData( + persona_id=persona_id, + ).model_dump(), + ) + db_session.commit() # May cause error if someone switches down to MIT from EE diff --git a/backend/onyx/server/query_and_chat/chat_backend.py b/backend/onyx/server/query_and_chat/chat_backend.py index 839c79ac2..05cbd0485 100644 --- a/backend/onyx/server/query_and_chat/chat_backend.py +++ b/backend/onyx/server/query_and_chat/chat_backend.py @@ -717,15 +717,14 @@ def upload_files_for_chat( else ChatFileType.PLAIN_TEXT ) - if file_type == ChatFileType.IMAGE: - file_content_io = file.file - # NOTE: Image conversion to JPEG used to be enforced here. - # This was removed to: - # 1. Preserve original file content for downloads - # 2. Maintain transparency in formats like PNG - # 3. Ameliorate issue with file conversion - else: - file_content_io = io.BytesIO(file.file.read()) + file_content = file.file.read() # Read the file content + + # NOTE: Image conversion to JPEG used to be enforced here. + # This was removed to: + # 1. Preserve original file content for downloads + # 2. Maintain transparency in formats like PNG + # 3. Ameliorate issue with file conversion + file_content_io = io.BytesIO(file_content) new_content_type = file.content_type diff --git a/backend/tests/integration/Dockerfile b/backend/tests/integration/Dockerfile index 12e63a9b9..98d021a15 100644 --- a/backend/tests/integration/Dockerfile +++ b/backend/tests/integration/Dockerfile @@ -71,6 +71,7 @@ COPY supervisord.conf /etc/supervisor/conf.d/supervisord.conf COPY ./onyx /app/onyx COPY ./shared_configs /app/shared_configs COPY ./alembic /app/alembic +COPY ./alembic_tenants /app/alembic_tenants COPY ./alembic.ini /app/alembic.ini COPY ./pytest.ini /app/pytest.ini COPY supervisord.conf /usr/etc/supervisord.conf diff --git a/web/src/app/admin/configuration/llm/interfaces.ts b/web/src/app/admin/configuration/llm/interfaces.ts index aa094a029..5ad5704d5 100644 --- a/web/src/app/admin/configuration/llm/interfaces.ts +++ b/web/src/app/admin/configuration/llm/interfaces.ts @@ -11,6 +11,7 @@ import { GeminiIcon, OpenSourceIcon, AnthropicSVG, + IconProps, } from "@/components/icons/icons"; import { FaRobot } from "react-icons/fa"; @@ -74,29 +75,36 @@ export interface LLMProviderDescriptor { } export const getProviderIcon = (providerName: string, modelName?: string) => { + const modelNameToIcon = ( + modelName: string, + fallbackIcon: ({ size, className }: IconProps) => JSX.Element + ): (({ size, className }: IconProps) => JSX.Element) => { + if (modelName?.toLowerCase().includes("amazon")) { + return AmazonIcon; + } + if (modelName?.toLowerCase().includes("phi")) { + return MicrosoftIconSVG; + } + if (modelName?.toLowerCase().includes("mistral")) { + return MistralIcon; + } + if (modelName?.toLowerCase().includes("llama")) { + return MetaIcon; + } + if (modelName?.toLowerCase().includes("gemini")) { + return GeminiIcon; + } + if (modelName?.toLowerCase().includes("claude")) { + return AnthropicIcon; + } else { + return fallbackIcon; + } + }; + switch (providerName) { case "openai": // Special cases for openai based on modelName - if (modelName?.toLowerCase().includes("amazon")) { - return AmazonIcon; - } - if (modelName?.toLowerCase().includes("phi")) { - return MicrosoftIconSVG; - } - if (modelName?.toLowerCase().includes("mistral")) { - return MistralIcon; - } - if (modelName?.toLowerCase().includes("llama")) { - return MetaIcon; - } - if (modelName?.toLowerCase().includes("gemini")) { - return GeminiIcon; - } - if (modelName?.toLowerCase().includes("claude")) { - return AnthropicIcon; - } - - return OpenAIIcon; // Default for openai + return modelNameToIcon(modelName || "", OpenAIIcon); case "anthropic": return AnthropicSVG; case "bedrock": @@ -104,7 +112,7 @@ export const getProviderIcon = (providerName: string, modelName?: string) => { case "azure": return AzureIcon; default: - return CPUIcon; + return modelNameToIcon(modelName || "", CPUIcon); } }; diff --git a/web/src/app/admin/settings/SettingsForm.tsx b/web/src/app/admin/settings/SettingsForm.tsx index d563d6c11..9e06b8ca3 100644 --- a/web/src/app/admin/settings/SettingsForm.tsx +++ b/web/src/app/admin/settings/SettingsForm.tsx @@ -231,7 +231,7 @@ export function SettingsForm() { handleToggleSettingsField("pro_search_disabled", e.target.checked) } diff --git a/web/src/app/admin/settings/interfaces.ts b/web/src/app/admin/settings/interfaces.ts index 0c833002c..4574ad8a0 100644 --- a/web/src/app/admin/settings/interfaces.ts +++ b/web/src/app/admin/settings/interfaces.ts @@ -10,7 +10,7 @@ export interface Settings { notifications: Notification[]; needs_reindexing: boolean; gpu_enabled: boolean; - pro_search_disabled: boolean; + pro_search_disabled: boolean | null; product_gating: GatingType; auto_scroll: boolean; } diff --git a/web/src/app/chat/ChatPage.tsx b/web/src/app/chat/ChatPage.tsx index d7ed3dd17..adb591185 100644 --- a/web/src/app/chat/ChatPage.tsx +++ b/web/src/app/chat/ChatPage.tsx @@ -471,9 +471,6 @@ export function ChatPage({ } return; } - const shouldScrollToBottom = - visibleRange.get(existingChatSessionId) === undefined || - visibleRange.get(existingChatSessionId)?.end == 0; clearSelectedDocuments(); setIsFetchingChatMessages(true); @@ -511,16 +508,13 @@ export function ChatPage({ // go to bottom. If initial load, then do a scroll, // otherwise just appear at the bottom - if (shouldScrollToBottom) { - scrollInitialized.current = false; - } - if (shouldScrollToBottom) { - if (!hasPerformedInitialScroll && autoScrollEnabled) { - clientScrollToBottom(); - } else if (isChatSessionSwitch && autoScrollEnabled) { - clientScrollToBottom(true); - } + scrollInitialized.current = false; + + if (!hasPerformedInitialScroll) { + clientScrollToBottom(); + } else if (isChatSessionSwitch) { + clientScrollToBottom(true); } setIsFetchingChatMessages(false); @@ -1034,6 +1028,7 @@ export function ChatPage({ ) { setDocumentSidebarToggled(false); } + clientScrollToBottom(); }, [chatSessionIdRef.current]); const loadNewPageLogic = (event: MessageEvent) => { @@ -1068,7 +1063,6 @@ export function ChatPage({ if (!documentSidebarInitialWidth && maxDocumentSidebarWidth) { documentSidebarInitialWidth = Math.min(700, maxDocumentSidebarWidth); } - class CurrentMessageFIFO { private stack: PacketType[] = []; isComplete: boolean = false; @@ -1332,7 +1326,9 @@ export function ChatPage({ searchParams.get(SEARCH_PARAM_NAMES.SYSTEM_PROMPT) || undefined, useExistingUserMessage: isSeededChat, useLanggraph: - !settings?.settings.pro_search_disabled && proSearchEnabled, + !settings?.settings.pro_search_disabled && + proSearchEnabled && + retrievalEnabled, }); const delay = (ms: number) => { @@ -1440,21 +1436,22 @@ export function ChatPage({ } } - // Continuously refine the sub_questions based on the packets that we receive + // // Continuously refine the sub_questions based on the packets that we receive if ( Object.hasOwn(packet, "stop_reason") && Object.hasOwn(packet, "level_question_num") ) { - // sub_questions = constructSubQuestions( - // sub_questions, - // packet as StreamStopInfo - // ); + sub_questions = constructSubQuestions( + sub_questions, + packet as StreamStopInfo + ); } else if (Object.hasOwn(packet, "sub_question")) { is_generating = true; sub_questions = constructSubQuestions( sub_questions, packet as SubQuestionPiece ); + setAgenticGenerating(true); } else if (Object.hasOwn(packet, "sub_query")) { sub_questions = constructSubQuestions( sub_questions, @@ -1663,6 +1660,7 @@ export function ChatPage({ completeMessageMapOverride: currentMessageMap(completeMessageDetail), }); } + setAgenticGenerating(false); resetRegenerationState(currentSessionId()); updateChatState("input"); @@ -1790,6 +1788,7 @@ export function ChatPage({ // Used to maintain a "time out" for history sidebar so our existing refs can have time to process change const [untoggled, setUntoggled] = useState(false); const [loadingError, setLoadingError] = useState(null); + const [agenticGenerating, setAgenticGenerating] = useState(false); const explicitlyUntoggle = () => { setShowHistorySidebar(false); @@ -1834,17 +1833,17 @@ export function ChatPage({ const autoScrollEnabled = user?.preferences?.auto_scroll == null ? settings?.enterpriseSettings?.auto_scroll || false - : user?.preferences?.auto_scroll!; + : user?.preferences?.auto_scroll! && !agenticGenerating; - // useScrollonStream({ - // chatState: currentSessionChatState, - // scrollableDivRef, - // scrollDist, - // endDivRef, - // debounceNumber, - // mobile: settings?.isMobile, - // enableAutoScroll: autoScrollEnabled, - // }); + useScrollonStream({ + chatState: currentSessionChatState, + scrollableDivRef, + scrollDist, + endDivRef, + debounceNumber, + mobile: settings?.isMobile, + enableAutoScroll: autoScrollEnabled, + }); // Virtualization + Scrolling related effects and functions const scrollInitialized = useRef(false); @@ -3058,20 +3057,19 @@ export function ChatPage({
-
- {aboveHorizon && ( -
- -
- )} - + {aboveHorizon && ( +
+ +
+ )} +
toggleProSearch()} diff --git a/web/src/app/chat/input/ChatInputBar.tsx b/web/src/app/chat/input/ChatInputBar.tsx index 8ffb43c15..06c996eaa 100644 --- a/web/src/app/chat/input/ChatInputBar.tsx +++ b/web/src/app/chat/input/ChatInputBar.tsx @@ -1,5 +1,6 @@ import React, { useContext, useEffect, useRef, useState } from "react"; import { FiPlusCircle, FiPlus, FiInfo, FiX, FiFilter } from "react-icons/fi"; +import { FiLoader } from "react-icons/fi"; import { ChatInputOption } from "./ChatInputOption"; import { Persona } from "@/app/admin/assistants/interfaces"; import LLMPopover from "./LLMPopover"; @@ -36,6 +37,9 @@ import { buildImgUrl } from "../files/images/utils"; import { useUser } from "@/components/user/UserProvider"; import { AgenticToggle } from "./AgenticToggle"; import { SettingsContext } from "@/components/settings/SettingsProvider"; +import { LoadingIndicator } from "react-select/dist/declarations/src/components/indicators"; +import { FidgetSpinner } from "react-loader-spinner"; +import { LoadingAnimation } from "@/components/Loading"; const MAX_INPUT_HEIGHT = 200; export const SourceChip2 = ({ @@ -709,12 +713,16 @@ export function ChatInputBar({ + file.isUploading ? ( + + ) : ( + + ) } - title={file.name || "File"} + title={file.name || "File" + file.id} onRemove={() => { setFiles( files.filter( diff --git a/web/src/app/chat/input/LLMPopover.tsx b/web/src/app/chat/input/LLMPopover.tsx index 9cdab8b57..bdf1112ca 100644 --- a/web/src/app/chat/input/LLMPopover.tsx +++ b/web/src/app/chat/input/LLMPopover.tsx @@ -5,7 +5,7 @@ import { PopoverTrigger, } from "@/components/ui/popover"; import { ChatInputOption } from "./ChatInputOption"; -import { getDisplayNameForModel } from "@/lib/hooks"; +import { defaultModelsByProvider, getDisplayNameForModel } from "@/lib/hooks"; import { checkLLMSupportsImageInput, destructureValue, @@ -61,22 +61,23 @@ export default function LLMPopover({ llmOptionsByProvider[llmProvider.provider] = []; } - (llmProvider.display_model_names || llmProvider.model_names).forEach( - (modelName) => { - if (!uniqueModelNames.has(modelName)) { - uniqueModelNames.add(modelName); - llmOptionsByProvider[llmProvider.provider].push({ - name: modelName, - value: structureValue( - llmProvider.name, - llmProvider.provider, - modelName - ), - icon: getProviderIcon(llmProvider.provider, modelName), - }); - } + ( + llmProvider.display_model_names || + defaultModelsByProvider[llmProvider.provider] + ).forEach((modelName) => { + if (!uniqueModelNames.has(modelName)) { + uniqueModelNames.add(modelName); + llmOptionsByProvider[llmProvider.provider].push({ + name: modelName, + value: structureValue( + llmProvider.name, + llmProvider.provider, + modelName + ), + icon: getProviderIcon(llmProvider.provider, modelName), + }); } - ); + }); }); const llmOptions = Object.entries(llmOptionsByProvider).flatMap( diff --git a/web/src/app/chat/interfaces.ts b/web/src/app/chat/interfaces.ts index b0bb36447..c04ec15d2 100644 --- a/web/src/app/chat/interfaces.ts +++ b/web/src/app/chat/interfaces.ts @@ -249,14 +249,12 @@ export const constructSubQuestions = ( // ); if ("stop_reason" in newDetail) { - console.log("STOP REASON"); - console.log(newDetail); const { level, level_question_num } = newDetail; let subQuestion = updatedSubQuestions.find( (sq) => sq.level === level && sq.level_question_num === level_question_num ); if (subQuestion) { - // subQuestion.is_complete = true; + subQuestion.is_complete = true; } } else if ("top_documents" in newDetail) { const { level, level_question_num, top_documents } = newDetail; diff --git a/web/src/app/chat/message/Messages.tsx b/web/src/app/chat/message/Messages.tsx index 9e063e56c..3f4cf20fc 100644 --- a/web/src/app/chat/message/Messages.tsx +++ b/web/src/app/chat/message/Messages.tsx @@ -322,10 +322,6 @@ export const AIMessage = ({ ? otherMessagesCanSwitchTo?.indexOf(messageId) : undefined; - const uniqueSources: ValidSources[] = Array.from( - new Set((docs || []).map((doc) => doc.source_type)) - ).slice(0, 3); - const webSourceDomains: string[] = Array.from( new Set( docs @@ -506,7 +502,7 @@ export const AIMessage = ({
diff --git a/web/src/app/chat/message/SourcesDisplay.tsx b/web/src/app/chat/message/SourcesDisplay.tsx index 8e3d6f78f..d3c2b1220 100644 --- a/web/src/app/chat/message/SourcesDisplay.tsx +++ b/web/src/app/chat/message/SourcesDisplay.tsx @@ -53,7 +53,7 @@ const SourceCard: React.FC<{
- +
{truncatedIdentifier}
@@ -105,13 +105,10 @@ export const SourcesDisplay: React.FC = ({ {hasMoreDocuments && ( doc.source_type)) - ) as ValidSources[] - } + docs={documents} webSourceDomains={documents.map((doc) => doc.link)} /> )} diff --git a/web/src/app/chat/message/StreamingMessages.ts b/web/src/app/chat/message/StreamingMessages.ts index 96f425046..66c72b9c5 100644 --- a/web/src/app/chat/message/StreamingMessages.ts +++ b/web/src/app/chat/message/StreamingMessages.ts @@ -55,7 +55,8 @@ const DOC_DELAY_MS = 100; export const useStreamingMessages = ( subQuestions: SubQuestionDetail[], - allowStreaming: () => void + allowStreaming: () => void, + onComplete: () => void ) => { const [dynamicSubQuestions, setDynamicSubQuestions] = useState< SubQuestionDetail[] @@ -117,24 +118,39 @@ export const useStreamingMessages = ( return; } - // 1) Stream high-level questions in parallel + // Stream high-level questions sequentially let didStreamQuestion = false; + let allQuestionsComplete = true; for (let i = 0; i < actualSubQs.length; i++) { const sq = actualSubQs[i]; const p = progressRef.current[i]; const dynSQ = dynamicSubQuestionsRef.current[i]; - if (sq.question) { - const nextIndex = p.questionCharIndex + 1; - if (nextIndex <= sq.question.length) { - dynSQ.question = sq.question.slice(0, nextIndex); - p.questionCharIndex = nextIndex; - if (nextIndex >= sq.question.length) { - p.questionDone = true; + // Always stream the first subquestion (index 0) + // For others, only stream if the previous question is complete + if (i === 0 || (i > 0 && progressRef.current[i - 1].questionDone)) { + if (sq.question) { + const nextIndex = p.questionCharIndex + 1; + if (nextIndex <= sq.question.length) { + dynSQ.question = sq.question.slice(0, nextIndex); + p.questionCharIndex = nextIndex; + if (nextIndex >= sq.question.length) { + p.questionDone = true; + } + didStreamQuestion = true; + // Break after streaming one question to ensure sequential behavior + break; } - didStreamQuestion = true; } } + + if (!p.questionDone) { + allQuestionsComplete = false; + } + } + + if (allQuestionsComplete && !didStreamQuestion) { + onComplete(); } if (didStreamQuestion) { diff --git a/web/src/app/chat/message/SubQuestionsDisplay.tsx b/web/src/app/chat/message/SubQuestionsDisplay.tsx index bdc26443f..da7389f49 100644 --- a/web/src/app/chat/message/SubQuestionsDisplay.tsx +++ b/web/src/app/chat/message/SubQuestionsDisplay.tsx @@ -317,7 +317,7 @@ const SubQuestionDisplay: React.FC<{
@@ -331,7 +331,7 @@ const SubQuestionDisplay: React.FC<{
setToggled(!toggled)} >
@@ -344,102 +344,108 @@ const SubQuestionDisplay: React.FC<{ size={20} />
-
- {isVisible && subQuestion && ( -
-
-
-
- Searching -
-
- {subQuestion?.sub_queries?.map((query, queryIndex) => ( - } - title={query.query} - includeTooltip - /> - ))} -
-
- {(subQuestion?.is_complete || memoizedDocs?.length > 0) && ( + {!temporaryDisplay && ( +
+ {isVisible && subQuestion && ( +
+
- Reading + Searching
- {memoizedDocs.length > 0 ? ( - memoizedDocs.slice(0, 10).map((doc, docIndex) => { - const truncatedIdentifier = - doc.semantic_identifier?.slice(0, 20) || ""; - return ( - - openDocument(doc, setPresentingDocument) - } - key={docIndex} - icon={} - title={`${truncatedIdentifier}${ - truncatedIdentifier.length === 20 ? "..." : "" - }`} - /> - ); - }) - ) : ( -
- No sources found + {subQuestion?.sub_queries?.map((query, queryIndex) => ( + } + title={query.query} + includeTooltip + /> + ))} +
+
+ + {(subQuestion?.is_complete || memoizedDocs?.length > 0) && ( +
+
+ Reading +
+
+ {memoizedDocs.length > 0 ? ( + memoizedDocs.slice(0, 10).map((doc, docIndex) => { + const truncatedIdentifier = + doc.semantic_identifier?.slice(0, 20) || ""; + return ( + + openDocument(doc, setPresentingDocument) + } + key={docIndex} + icon={} + title={`${truncatedIdentifier}${ + truncatedIdentifier.length === 20 + ? "..." + : "" + }`} + /> + ); + }) + ) : ( +
+ No sources found +
+ )} +
+
+ )} + + {(subQuestion?.is_complete || + subQuestion?.answer?.length > 0) && ( +
+
setAnalysisToggled(!analysisToggled)} + > + Analyzing + +
+ {analysisToggled && ( +
+ {renderedMarkdown}
)}
-
- )} - - {(subQuestion?.is_complete || - subQuestion?.answer?.length > 0) && ( -
-
setAnalysisToggled(!analysisToggled)} - > - Analyzing - -
- {analysisToggled && ( -
- {renderedMarkdown} -
- )} -
- )} + )} +
-
- )} -
- + )} +
+ )} {temporaryDisplay && - (status === ToggleState.InProgress || toggled) && ( + ((status === ToggleState.InProgress && + forcedStatus !== ToggleState.Done) || + toggled) && (
-
+
{temporaryDisplay?.tinyQuestion} @@ -468,9 +474,24 @@ const SubQuestionsDisplay: React.FC = ({ overallAnswerGenerating, allowDocuments, }) => { - const { dynamicSubQuestions } = useStreamingMessages(subQuestions, () => {}); + const [showSummarizing, setShowSummarizing] = useState( + finishedGenerating && !overallAnswerGenerating + ); + const { dynamicSubQuestions } = useStreamingMessages( + subQuestions, + () => {}, + () => { + setTimeout(() => { + setShowSummarizing(true); + }, PHASE_MIN_MS * 3); + } + ); const { dynamicSubQuestions: dynamicSecondLevelQuestions } = - useStreamingMessages(secondLevelQuestions || [], () => {}); + useStreamingMessages( + secondLevelQuestions || [], + () => {}, + () => {} + ); const memoizedSubQuestions = useMemo(() => { return finishedGenerating ? subQuestions : dynamicSubQuestions; }, [finishedGenerating, dynamicSubQuestions, subQuestions]); @@ -497,10 +518,7 @@ const SubQuestionsDisplay: React.FC = ({ ).length == memoizedSubQuestions.length; const [streamedText, setStreamedText] = useState( - !overallAnswerGenerating ? "Summarize findings" : "" - ); - const [showSummarizing, setShowSummarizing] = useState( - finishedGenerating && !overallAnswerGenerating + finishedGenerating ? "Summarize findings" : "" ); const [canShowSummarizing, setCanShowSummarizing] = useState(finishedGenerating); @@ -520,7 +538,7 @@ const SubQuestionsDisplay: React.FC = ({ memoizedSubQuestions.length > 0 && memoizedSubQuestions.filter( (subQuestion) => subQuestion?.answer.length > 2 - ).length == memoizedSubQuestions.length + ).length == subQuestions.length ) { setTimeout(() => { setCanShowSummarizing(true); @@ -560,7 +578,7 @@ const SubQuestionsDisplay: React.FC = ({ } else { clearInterval(streamInterval); } - }, 8); + }, 10); } }, [showSummarizing]); @@ -704,12 +722,6 @@ const SubQuestionsDisplay: React.FC = ({ (subQuestion?.sub_queries?.length > 0 && (subQuestion.answer == undefined || subQuestion.answer.length > 3)) - // subQuestion == undefined && - // subQuestion.answer != undefined && - // !( - // dynamicSubQuestions[index + 1] != undefined || - // dynamicSubQuestions[index + 1]?.sub_queries?.length! > 0 - // ) } /> ))} diff --git a/web/src/components/SearchResultIcon.tsx b/web/src/components/SearchResultIcon.tsx index 30571390b..c5902fcfd 100644 --- a/web/src/components/SearchResultIcon.tsx +++ b/web/src/components/SearchResultIcon.tsx @@ -2,6 +2,7 @@ import { useState, useEffect } from "react"; import faviconFetch from "favicon-fetch"; import { SourceIcon } from "./SourceIcon"; import { ValidSources } from "@/lib/types"; +import { OnyxIcon } from "./icons/icons"; const CACHE_DURATION = 24 * 60 * 60 * 1000; @@ -48,6 +49,9 @@ export function SearchResultIcon({ url }: { url: string }) { if (!faviconUrl) { return ; } + if (url.includes("docs.onyx.app")) { + return ; + } return (
diff --git a/web/src/components/UserDropdown.tsx b/web/src/components/UserDropdown.tsx index 24e729a18..256986c1e 100644 --- a/web/src/components/UserDropdown.tsx +++ b/web/src/components/UserDropdown.tsx @@ -169,7 +169,7 @@ export function UserDropdown({
void; }) => { const [showDropdown, setShowDropdown] = useState(false); - + const router = useRouter(); const { refreshAssistants } = useAssistants(); const { refreshUser } = useUser(); @@ -90,10 +91,10 @@ export const Notifications = ({ notification: Notification, persona: Persona ) => { - addAssistantToList(persona.id); await dismissNotification(notification.id); await refreshUser(); await refreshAssistants(); + router.push(`/chat?assistantId=${persona.id}`); }; const sortedNotifications = notifications @@ -204,7 +205,7 @@ export const Notifications = ({ } className="px-3 py-1 text-sm font-medium text-blue-600 hover:text-blue-800 transition duration-150 ease-in-out" > - Accept + Chat
-
    +
      {availableSources.map((source) => ( { } else { settings = await results[0].json(); } - console.log(JSON.stringify(settings)); let enterpriseSettings: EnterpriseSettings | null = null; if (tasks.length > 1) { @@ -95,6 +94,10 @@ export async function fetchSettingsSS(): Promise { } } + if (enterpriseSettings && settings.pro_search_disabled == null) { + settings.pro_search_disabled = true; + } + const webVersion = getWebVersion(); const combinedSettings: CombinedSettings = { diff --git a/web/src/components/table/DragHandle.tsx b/web/src/components/table/DragHandle.tsx index a288d58b4..626ca3496 100644 --- a/web/src/components/table/DragHandle.tsx +++ b/web/src/components/table/DragHandle.tsx @@ -1,15 +1,24 @@ import React from "react"; import { MdDragIndicator } from "react-icons/md"; -export const DragHandle = (props: any) => { +interface DragHandleProps extends React.HTMLAttributes { + isDragging?: boolean; + size?: number; +} + +export const DragHandle: React.FC = ({ + isDragging, + size = 16, + ...props +}) => { return (
      - +
      ); }; diff --git a/web/src/components/table/DraggableRow.tsx b/web/src/components/table/DraggableRow.tsx index 07b7db951..7a689f9b0 100644 --- a/web/src/components/table/DraggableRow.tsx +++ b/web/src/components/table/DraggableRow.tsx @@ -6,12 +6,12 @@ import { Row } from "./interfaces"; export function DraggableRow({ row, - forceDragging, isAdmin = true, + isDragOverlay = false, }: { row: Row; - forceDragging?: boolean; isAdmin?: boolean; + isDragOverlay?: boolean; }) { const { attributes, @@ -22,29 +22,25 @@ export function DraggableRow({ isDragging, } = useSortable({ id: row.id, + disabled: isDragOverlay, }); + const style = { transform: CSS.Transform.toString(transform), - transition: transition, + transition, }; return ( - {isAdmin && ( - - )} + {isAdmin && } - {row.cells.map((column, ind) => ( - {column} + {row.cells.map((cell, index) => ( + {cell} ))} ); diff --git a/web/src/components/table/DraggableTable.tsx b/web/src/components/table/DraggableTable.tsx index 650265ad3..1d6f606e9 100644 --- a/web/src/components/table/DraggableTable.tsx +++ b/web/src/components/table/DraggableTable.tsx @@ -27,7 +27,6 @@ import { } from "@dnd-kit/sortable"; import { DraggableRow } from "./DraggableRow"; import { Row } from "./interfaces"; -import { StaticRow } from "./StaticRow"; export function DraggableTable({ headers, @@ -43,8 +42,17 @@ export function DraggableTable({ const [activeId, setActiveId] = useState(); const items = useMemo(() => rows?.map(({ id }) => id), [rows]); const sensors = useSensors( - useSensor(MouseSensor, {}), - useSensor(TouchSensor, {}), + useSensor(MouseSensor, { + activationConstraint: { + distance: 5, + }, + }), + useSensor(TouchSensor, { + activationConstraint: { + delay: 250, + tolerance: 5, + }, + }), useSensor(KeyboardSensor, {}) ); @@ -87,7 +95,7 @@ export function DraggableTable({ collisionDetection={closestCenter} modifiers={[restrictToVerticalAxis]} > - +
      @@ -99,24 +107,28 @@ export function DraggableTable({ - {rows.map((row) => { - return ; - })} + {rows.map((row) => ( + + ))} - - {isAdmin && ( - - {selectedRow && ( -
      - - - -
      - )} - - )} + + {isAdmin && ( + + {selectedRow && ( + + + + +
      + )} +
      + )} ); }