various UX improvements

This commit is contained in:
pablodanswer 2025-02-05 21:12:55 -08:00
parent 396f096dda
commit f137fc78a6
29 changed files with 467 additions and 317 deletions

View File

@ -2,8 +2,11 @@ from uuid import UUID
from sqlalchemy.orm import Session
from onyx.configs.constants import NotificationType
from onyx.db.models import Persona__User
from onyx.db.models import Persona__UserGroup
from onyx.db.notification import create_notification
from onyx.server.features.persona.models import PersonaSharedNotificationData
def make_persona_private(
@ -23,6 +26,14 @@ def make_persona_private(
for user_uuid in user_ids:
db_session.add(Persona__User(persona_id=persona_id, user_id=user_uuid))
create_notification(
user_id=user_uuid,
notif_type=NotificationType.PERSONA_SHARED,
db_session=db_session,
additional_data=PersonaSharedNotificationData(
persona_id=persona_id,
).model_dump(),
)
if group_ids:
for group_id in group_ids:
db_session.add(

View File

@ -87,13 +87,14 @@ async def _get_tenant_id_from_request(
if not is_valid_schema_name(tenant_id):
raise HTTPException(status_code=400, detail="Invalid tenant ID format")
return tenant_id
except Exception as e:
logger.error(f"Unexpected error in _get_tenant_id_from_request: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
finally:
if tenant_id:
return tenant_id
# As a final step, check for explicit tenant_id cookie
tenant_id_cookie = request.cookies.get(TENANT_ID_COOKIE_NAME)
if tenant_id_cookie and is_valid_schema_name(tenant_id_cookie):

View File

@ -12,8 +12,9 @@ from onyx.agents.agent_search.deep_search.initial.generate_initial_answer.states
from onyx.agents.agent_search.deep_search.main.models import (
AgentRefinedMetrics,
)
from onyx.agents.agent_search.deep_search.main.operations import dispatch_subquestion
from onyx.agents.agent_search.deep_search.main.operations import (
dispatch_subquestion,
dispatch_subquestion_sep,
)
from onyx.agents.agent_search.deep_search.main.states import (
InitialQuestionDecompositionUpdate,
@ -111,7 +112,9 @@ def decompose_orig_question(
)
# dispatches custom events for subquestion tokens, adding in subquestion ids.
streamed_tokens = dispatch_separated(
model.stream(msg), dispatch_subquestion(0, writer)
model.stream(msg),
dispatch_subquestion(0, writer),
sep_callback=dispatch_subquestion_sep(0, writer),
)
stop_event = StreamStopInfo(

View File

@ -9,8 +9,9 @@ from langgraph.types import StreamWriter
from onyx.agents.agent_search.deep_search.main.models import (
RefinementSubQuestion,
)
from onyx.agents.agent_search.deep_search.main.operations import dispatch_subquestion
from onyx.agents.agent_search.deep_search.main.operations import (
dispatch_subquestion,
dispatch_subquestion_sep,
)
from onyx.agents.agent_search.deep_search.main.states import MainState
from onyx.agents.agent_search.deep_search.main.states import (
@ -96,7 +97,9 @@ def create_refined_sub_questions(
model = graph_config.tooling.fast_llm
streamed_tokens = dispatch_separated(
model.stream(msg), dispatch_subquestion(1, writer)
model.stream(msg),
dispatch_subquestion(1, writer),
sep_callback=dispatch_subquestion_sep(1, writer),
)
response = merge_content(*streamed_tokens)

View File

@ -9,6 +9,9 @@ from onyx.agents.agent_search.shared_graph_utils.models import (
SubQuestionAnswerResults,
)
from onyx.agents.agent_search.shared_graph_utils.utils import write_custom_event
from onyx.chat.models import StreamStopInfo
from onyx.chat.models import StreamStopReason
from onyx.chat.models import StreamType
from onyx.chat.models import SubQuestionPiece
from onyx.context.search.models import IndexFilters
from onyx.tools.models import SearchQueryInfo
@ -34,6 +37,22 @@ def dispatch_subquestion(
return _helper
def dispatch_subquestion_sep(level: int, writer: StreamWriter) -> Callable[[int], None]:
def _helper(sep_num: int) -> None:
write_custom_event(
"stream_finished",
StreamStopInfo(
stop_reason=StreamStopReason.FINISHED,
stream_type=StreamType.SUB_QUESTIONS,
level=level,
level_question_num=sep_num,
),
writer,
)
return _helper
def calculate_initial_agent_stats(
decomp_answer_results: list[SubQuestionAnswerResults],
original_question_stats: AgentChunkRetrievalStats,

View File

@ -295,6 +295,7 @@ def _dispatch_nonempty(
def dispatch_separated(
tokens: Iterator[BaseMessage],
dispatch_event: Callable[[str, int], None],
sep_callback: Callable[[int], None] | None = None,
sep: str = DISPATCH_SEP_CHAR,
) -> list[BaseMessage_Content]:
num = 1
@ -304,6 +305,8 @@ def dispatch_separated(
if sep in content:
sub_question_parts = content.split(sep)
_dispatch_nonempty(sub_question_parts[0], dispatch_event, num)
if sep_callback:
sep_callback(num)
num += 1
_dispatch_nonempty(
"".join(sub_question_parts[1:]).strip(), dispatch_event, num

View File

@ -20,6 +20,7 @@ from onyx.configs.app_configs import DISABLE_AUTH
from onyx.configs.chat_configs import BING_API_KEY
from onyx.configs.chat_configs import CONTEXT_CHUNKS_ABOVE
from onyx.configs.chat_configs import CONTEXT_CHUNKS_BELOW
from onyx.configs.constants import NotificationType
from onyx.context.search.enums import RecencyBiasSetting
from onyx.db.constants import SLACK_BOT_PERSONA_PREFIX
from onyx.db.models import DocumentSet
@ -33,6 +34,8 @@ from onyx.db.models import Tool
from onyx.db.models import User
from onyx.db.models import User__UserGroup
from onyx.db.models import UserGroup
from onyx.db.notification import create_notification
from onyx.server.features.persona.models import PersonaSharedNotificationData
from onyx.server.features.persona.models import PersonaSnapshot
from onyx.server.features.persona.models import PersonaUpsertRequest
from onyx.utils.logger import setup_logger
@ -170,6 +173,15 @@ def make_persona_private(
for user_uuid in user_ids:
db_session.add(Persona__User(persona_id=persona_id, user_id=user_uuid))
create_notification(
user_id=user_uuid,
notif_type=NotificationType.PERSONA_SHARED,
db_session=db_session,
additional_data=PersonaSharedNotificationData(
persona_id=persona_id,
).model_dump(),
)
db_session.commit()
# May cause error if someone switches down to MIT from EE

View File

@ -717,15 +717,14 @@ def upload_files_for_chat(
else ChatFileType.PLAIN_TEXT
)
if file_type == ChatFileType.IMAGE:
file_content_io = file.file
# NOTE: Image conversion to JPEG used to be enforced here.
# This was removed to:
# 1. Preserve original file content for downloads
# 2. Maintain transparency in formats like PNG
# 3. Ameliorate issue with file conversion
else:
file_content_io = io.BytesIO(file.file.read())
file_content = file.file.read() # Read the file content
# NOTE: Image conversion to JPEG used to be enforced here.
# This was removed to:
# 1. Preserve original file content for downloads
# 2. Maintain transparency in formats like PNG
# 3. Ameliorate issue with file conversion
file_content_io = io.BytesIO(file_content)
new_content_type = file.content_type

View File

@ -71,6 +71,7 @@ COPY supervisord.conf /etc/supervisor/conf.d/supervisord.conf
COPY ./onyx /app/onyx
COPY ./shared_configs /app/shared_configs
COPY ./alembic /app/alembic
COPY ./alembic_tenants /app/alembic_tenants
COPY ./alembic.ini /app/alembic.ini
COPY ./pytest.ini /app/pytest.ini
COPY supervisord.conf /usr/etc/supervisord.conf

View File

@ -11,6 +11,7 @@ import {
GeminiIcon,
OpenSourceIcon,
AnthropicSVG,
IconProps,
} from "@/components/icons/icons";
import { FaRobot } from "react-icons/fa";
@ -74,29 +75,36 @@ export interface LLMProviderDescriptor {
}
export const getProviderIcon = (providerName: string, modelName?: string) => {
const modelNameToIcon = (
modelName: string,
fallbackIcon: ({ size, className }: IconProps) => JSX.Element
): (({ size, className }: IconProps) => JSX.Element) => {
if (modelName?.toLowerCase().includes("amazon")) {
return AmazonIcon;
}
if (modelName?.toLowerCase().includes("phi")) {
return MicrosoftIconSVG;
}
if (modelName?.toLowerCase().includes("mistral")) {
return MistralIcon;
}
if (modelName?.toLowerCase().includes("llama")) {
return MetaIcon;
}
if (modelName?.toLowerCase().includes("gemini")) {
return GeminiIcon;
}
if (modelName?.toLowerCase().includes("claude")) {
return AnthropicIcon;
} else {
return fallbackIcon;
}
};
switch (providerName) {
case "openai":
// Special cases for openai based on modelName
if (modelName?.toLowerCase().includes("amazon")) {
return AmazonIcon;
}
if (modelName?.toLowerCase().includes("phi")) {
return MicrosoftIconSVG;
}
if (modelName?.toLowerCase().includes("mistral")) {
return MistralIcon;
}
if (modelName?.toLowerCase().includes("llama")) {
return MetaIcon;
}
if (modelName?.toLowerCase().includes("gemini")) {
return GeminiIcon;
}
if (modelName?.toLowerCase().includes("claude")) {
return AnthropicIcon;
}
return OpenAIIcon; // Default for openai
return modelNameToIcon(modelName || "", OpenAIIcon);
case "anthropic":
return AnthropicSVG;
case "bedrock":
@ -104,7 +112,7 @@ export const getProviderIcon = (providerName: string, modelName?: string) => {
case "azure":
return AzureIcon;
default:
return CPUIcon;
return modelNameToIcon(modelName || "", CPUIcon);
}
};

View File

@ -231,7 +231,7 @@ export function SettingsForm() {
<Checkbox
label="Pro Search Disabled"
sublabel="If set, users will not be able to use Pro Search."
checked={settings.pro_search_disabled}
checked={settings.pro_search_disabled ?? false}
onChange={(e) =>
handleToggleSettingsField("pro_search_disabled", e.target.checked)
}

View File

@ -10,7 +10,7 @@ export interface Settings {
notifications: Notification[];
needs_reindexing: boolean;
gpu_enabled: boolean;
pro_search_disabled: boolean;
pro_search_disabled: boolean | null;
product_gating: GatingType;
auto_scroll: boolean;
}

View File

@ -471,9 +471,6 @@ export function ChatPage({
}
return;
}
const shouldScrollToBottom =
visibleRange.get(existingChatSessionId) === undefined ||
visibleRange.get(existingChatSessionId)?.end == 0;
clearSelectedDocuments();
setIsFetchingChatMessages(true);
@ -511,16 +508,13 @@ export function ChatPage({
// go to bottom. If initial load, then do a scroll,
// otherwise just appear at the bottom
if (shouldScrollToBottom) {
scrollInitialized.current = false;
}
if (shouldScrollToBottom) {
if (!hasPerformedInitialScroll && autoScrollEnabled) {
clientScrollToBottom();
} else if (isChatSessionSwitch && autoScrollEnabled) {
clientScrollToBottom(true);
}
scrollInitialized.current = false;
if (!hasPerformedInitialScroll) {
clientScrollToBottom();
} else if (isChatSessionSwitch) {
clientScrollToBottom(true);
}
setIsFetchingChatMessages(false);
@ -1034,6 +1028,7 @@ export function ChatPage({
) {
setDocumentSidebarToggled(false);
}
clientScrollToBottom();
}, [chatSessionIdRef.current]);
const loadNewPageLogic = (event: MessageEvent) => {
@ -1068,7 +1063,6 @@ export function ChatPage({
if (!documentSidebarInitialWidth && maxDocumentSidebarWidth) {
documentSidebarInitialWidth = Math.min(700, maxDocumentSidebarWidth);
}
class CurrentMessageFIFO {
private stack: PacketType[] = [];
isComplete: boolean = false;
@ -1332,7 +1326,9 @@ export function ChatPage({
searchParams.get(SEARCH_PARAM_NAMES.SYSTEM_PROMPT) || undefined,
useExistingUserMessage: isSeededChat,
useLanggraph:
!settings?.settings.pro_search_disabled && proSearchEnabled,
!settings?.settings.pro_search_disabled &&
proSearchEnabled &&
retrievalEnabled,
});
const delay = (ms: number) => {
@ -1440,21 +1436,22 @@ export function ChatPage({
}
}
// Continuously refine the sub_questions based on the packets that we receive
// // Continuously refine the sub_questions based on the packets that we receive
if (
Object.hasOwn(packet, "stop_reason") &&
Object.hasOwn(packet, "level_question_num")
) {
// sub_questions = constructSubQuestions(
// sub_questions,
// packet as StreamStopInfo
// );
sub_questions = constructSubQuestions(
sub_questions,
packet as StreamStopInfo
);
} else if (Object.hasOwn(packet, "sub_question")) {
is_generating = true;
sub_questions = constructSubQuestions(
sub_questions,
packet as SubQuestionPiece
);
setAgenticGenerating(true);
} else if (Object.hasOwn(packet, "sub_query")) {
sub_questions = constructSubQuestions(
sub_questions,
@ -1663,6 +1660,7 @@ export function ChatPage({
completeMessageMapOverride: currentMessageMap(completeMessageDetail),
});
}
setAgenticGenerating(false);
resetRegenerationState(currentSessionId());
updateChatState("input");
@ -1790,6 +1788,7 @@ export function ChatPage({
// Used to maintain a "time out" for history sidebar so our existing refs can have time to process change
const [untoggled, setUntoggled] = useState(false);
const [loadingError, setLoadingError] = useState<string | null>(null);
const [agenticGenerating, setAgenticGenerating] = useState(false);
const explicitlyUntoggle = () => {
setShowHistorySidebar(false);
@ -1834,17 +1833,17 @@ export function ChatPage({
const autoScrollEnabled =
user?.preferences?.auto_scroll == null
? settings?.enterpriseSettings?.auto_scroll || false
: user?.preferences?.auto_scroll!;
: user?.preferences?.auto_scroll! && !agenticGenerating;
// useScrollonStream({
// chatState: currentSessionChatState,
// scrollableDivRef,
// scrollDist,
// endDivRef,
// debounceNumber,
// mobile: settings?.isMobile,
// enableAutoScroll: autoScrollEnabled,
// });
useScrollonStream({
chatState: currentSessionChatState,
scrollableDivRef,
scrollDist,
endDivRef,
debounceNumber,
mobile: settings?.isMobile,
enableAutoScroll: autoScrollEnabled,
});
// Virtualization + Scrolling related effects and functions
const scrollInitialized = useRef(false);
@ -3058,20 +3057,19 @@ export function ChatPage({
</div>
<div
ref={inputRef}
className="absolute bottom-0 z-10 w-full"
className="absolute pointer-events-none bottom-0 z-10 w-full"
>
<div className="w-[95%] mx-auto relative mb-8">
{aboveHorizon && (
<div className="pointer-events-none w-full bg-transparent flex sticky justify-center">
<button
onClick={() => clientScrollToBottom()}
className="p-1 pointer-events-auto rounded-2xl bg-background-strong border border-border mb-2 mx-auto "
>
<FiArrowDown size={18} />
</button>
</div>
)}
{aboveHorizon && (
<div className="mx-auto w-fit !pointer-events-none flex sticky justify-center">
<button
onClick={() => clientScrollToBottom()}
className="p-1 pointer-events-auto rounded-2xl bg-background-strong border border-border mx-auto "
>
<FiArrowDown size={18} />
</button>
</div>
)}
<div className="pointer-events-auto w-[95%] mx-auto relative mb-8">
<ChatInputBar
proSearchEnabled={proSearchEnabled}
setProSearchEnabled={() => toggleProSearch()}

View File

@ -1,5 +1,6 @@
import React, { useContext, useEffect, useRef, useState } from "react";
import { FiPlusCircle, FiPlus, FiInfo, FiX, FiFilter } from "react-icons/fi";
import { FiLoader } from "react-icons/fi";
import { ChatInputOption } from "./ChatInputOption";
import { Persona } from "@/app/admin/assistants/interfaces";
import LLMPopover from "./LLMPopover";
@ -36,6 +37,9 @@ import { buildImgUrl } from "../files/images/utils";
import { useUser } from "@/components/user/UserProvider";
import { AgenticToggle } from "./AgenticToggle";
import { SettingsContext } from "@/components/settings/SettingsProvider";
import { LoadingIndicator } from "react-select/dist/declarations/src/components/indicators";
import { FidgetSpinner } from "react-loader-spinner";
import { LoadingAnimation } from "@/components/Loading";
const MAX_INPUT_HEIGHT = 200;
export const SourceChip2 = ({
@ -709,12 +713,16 @@ export function ChatInputBar({
<SourceChip
key={`file-${index}`}
icon={
<img
className="h-full py-.5 object-cover rounded-lg bg-background cursor-pointer"
src={buildImgUrl(file.id)}
/>
file.isUploading ? (
<FiLoader className="animate-spin" />
) : (
<img
className="h-full py-.5 object-cover rounded-lg bg-background cursor-pointer"
src={buildImgUrl(file.id)}
/>
)
}
title={file.name || "File"}
title={file.name || "File" + file.id}
onRemove={() => {
setFiles(
files.filter(

View File

@ -5,7 +5,7 @@ import {
PopoverTrigger,
} from "@/components/ui/popover";
import { ChatInputOption } from "./ChatInputOption";
import { getDisplayNameForModel } from "@/lib/hooks";
import { defaultModelsByProvider, getDisplayNameForModel } from "@/lib/hooks";
import {
checkLLMSupportsImageInput,
destructureValue,
@ -61,22 +61,23 @@ export default function LLMPopover({
llmOptionsByProvider[llmProvider.provider] = [];
}
(llmProvider.display_model_names || llmProvider.model_names).forEach(
(modelName) => {
if (!uniqueModelNames.has(modelName)) {
uniqueModelNames.add(modelName);
llmOptionsByProvider[llmProvider.provider].push({
name: modelName,
value: structureValue(
llmProvider.name,
llmProvider.provider,
modelName
),
icon: getProviderIcon(llmProvider.provider, modelName),
});
}
(
llmProvider.display_model_names ||
defaultModelsByProvider[llmProvider.provider]
).forEach((modelName) => {
if (!uniqueModelNames.has(modelName)) {
uniqueModelNames.add(modelName);
llmOptionsByProvider[llmProvider.provider].push({
name: modelName,
value: structureValue(
llmProvider.name,
llmProvider.provider,
modelName
),
icon: getProviderIcon(llmProvider.provider, modelName),
});
}
);
});
});
const llmOptions = Object.entries(llmOptionsByProvider).flatMap(

View File

@ -249,14 +249,12 @@ export const constructSubQuestions = (
// );
if ("stop_reason" in newDetail) {
console.log("STOP REASON");
console.log(newDetail);
const { level, level_question_num } = newDetail;
let subQuestion = updatedSubQuestions.find(
(sq) => sq.level === level && sq.level_question_num === level_question_num
);
if (subQuestion) {
// subQuestion.is_complete = true;
subQuestion.is_complete = true;
}
} else if ("top_documents" in newDetail) {
const { level, level_question_num, top_documents } = newDetail;

View File

@ -322,10 +322,6 @@ export const AIMessage = ({
? otherMessagesCanSwitchTo?.indexOf(messageId)
: undefined;
const uniqueSources: ValidSources[] = Array.from(
new Set((docs || []).map((doc) => doc.source_type))
).slice(0, 3);
const webSourceDomains: string[] = Array.from(
new Set(
docs
@ -506,7 +502,7 @@ export const AIMessage = ({
<SeeMoreBlock
toggled={toggledDocumentSidebar!}
toggleDocumentSelection={toggleDocumentSelection!}
uniqueSources={uniqueSources}
docs={docs}
webSourceDomains={webSourceDomains}
/>
</div>

View File

@ -53,7 +53,7 @@ const SourceCard: React.FC<{
</div>
<div className="flex items-center gap-1 mt-1">
<ResultIcon doc={document} size={14} />
<ResultIcon doc={document} size={18} />
<div className="text-[#4a4a4a] text-xs leading-tight truncate flex-1 min-w-0">
{truncatedIdentifier}
</div>
@ -105,13 +105,10 @@ export const SourcesDisplay: React.FC<SourcesDisplayProps> = ({
{hasMoreDocuments && (
<SeeMoreBlock
fullWidth
toggled={docSidebarToggled}
toggleDocumentSelection={toggleDocumentSelection}
uniqueSources={
Array.from(
new Set(documents.map((doc) => doc.source_type))
) as ValidSources[]
}
docs={documents}
webSourceDomains={documents.map((doc) => doc.link)}
/>
)}

View File

@ -55,7 +55,8 @@ const DOC_DELAY_MS = 100;
export const useStreamingMessages = (
subQuestions: SubQuestionDetail[],
allowStreaming: () => void
allowStreaming: () => void,
onComplete: () => void
) => {
const [dynamicSubQuestions, setDynamicSubQuestions] = useState<
SubQuestionDetail[]
@ -117,24 +118,39 @@ export const useStreamingMessages = (
return;
}
// 1) Stream high-level questions in parallel
// Stream high-level questions sequentially
let didStreamQuestion = false;
let allQuestionsComplete = true;
for (let i = 0; i < actualSubQs.length; i++) {
const sq = actualSubQs[i];
const p = progressRef.current[i];
const dynSQ = dynamicSubQuestionsRef.current[i];
if (sq.question) {
const nextIndex = p.questionCharIndex + 1;
if (nextIndex <= sq.question.length) {
dynSQ.question = sq.question.slice(0, nextIndex);
p.questionCharIndex = nextIndex;
if (nextIndex >= sq.question.length) {
p.questionDone = true;
// Always stream the first subquestion (index 0)
// For others, only stream if the previous question is complete
if (i === 0 || (i > 0 && progressRef.current[i - 1].questionDone)) {
if (sq.question) {
const nextIndex = p.questionCharIndex + 1;
if (nextIndex <= sq.question.length) {
dynSQ.question = sq.question.slice(0, nextIndex);
p.questionCharIndex = nextIndex;
if (nextIndex >= sq.question.length) {
p.questionDone = true;
}
didStreamQuestion = true;
// Break after streaming one question to ensure sequential behavior
break;
}
didStreamQuestion = true;
}
}
if (!p.questionDone) {
allQuestionsComplete = false;
}
}
if (allQuestionsComplete && !didStreamQuestion) {
onComplete();
}
if (didStreamQuestion) {

View File

@ -317,7 +317,7 @@ const SubQuestionDisplay: React.FC<{
<div
className={`absolute left-[5px] ${
isFirst ? "top-[15px]" : "top-0"
} bottom-0 w-[2px] bg-neutral-200
} bottom-0 w-[2px] bg-neutral-200
${isLast && !toggled ? "h-4" : "h-full"}`}
/>
@ -331,7 +331,7 @@ const SubQuestionDisplay: React.FC<{
</div>
<div className="ml-8 w-full">
<div
className="flex -mx-2 rounded-md px-2 hover:bg-[#F5F3ED] items-start py-1.5 my-.5 cursor-pointer"
className="flex -mx-2 rounded-md px-2 hover:bg-[#F5F3ED] items-start py-1.5 my-.5 cursor-pointer"
onClick={() => setToggled(!toggled)}
>
<div className="text-black text-base font-medium leading-normal flex-grow pr-2">
@ -344,102 +344,108 @@ const SubQuestionDisplay: React.FC<{
size={20}
/>
</div>
<div
className={`transition-all duration-300 ease-in-out ${
toggled ? "max-h-[1000px]" : "max-h-0"
}`}
>
{isVisible && subQuestion && (
<div
className={`transform transition-all duration-300 ease-in-out origin-top ${
toggled ? "scale-y-100 opacity-100" : "scale-y-95 opacity-0"
}`}
>
<div className="pl-0 pb-2">
<div className="mb-4 flex flex-col gap-2">
<div className="text-[#4a4a4a] text-xs font-medium leading-normal">
Searching
</div>
<div className="flex flex-wrap gap-2">
{subQuestion?.sub_queries?.map((query, queryIndex) => (
<SourceChip2
key={queryIndex}
icon={<FiSearch size={10} />}
title={query.query}
includeTooltip
/>
))}
</div>
</div>
{(subQuestion?.is_complete || memoizedDocs?.length > 0) && (
{!temporaryDisplay && (
<div
className={`transition-all duration-300 ease-in-out ${
toggled ? "max-h-[1000px]" : "max-h-0"
}`}
>
{isVisible && subQuestion && (
<div
className={`transform transition-all duration-300 ease-in-out origin-top ${
toggled ? "scale-y-100 opacity-100" : "scale-y-95 opacity-0"
}`}
>
<div className="pl-0 pb-2">
<div className="mb-4 flex flex-col gap-2">
<div className="text-[#4a4a4a] text-xs font-medium leading-normal">
Reading
Searching
</div>
<div className="flex flex-wrap gap-2">
{memoizedDocs.length > 0 ? (
memoizedDocs.slice(0, 10).map((doc, docIndex) => {
const truncatedIdentifier =
doc.semantic_identifier?.slice(0, 20) || "";
return (
<SourceChip2
includeAnimation
onClick={() =>
openDocument(doc, setPresentingDocument)
}
key={docIndex}
icon={<ResultIcon doc={doc} size={10} />}
title={`${truncatedIdentifier}${
truncatedIdentifier.length === 20 ? "..." : ""
}`}
/>
);
})
) : (
<div className="text-black text-sm font-medium">
No sources found
{subQuestion?.sub_queries?.map((query, queryIndex) => (
<SourceChip2
key={queryIndex}
icon={<FiSearch size={10} />}
title={query.query}
includeTooltip
/>
))}
</div>
</div>
{(subQuestion?.is_complete || memoizedDocs?.length > 0) && (
<div className="mb-4 flex flex-col gap-2">
<div className="text-[#4a4a4a] text-xs font-medium leading-normal">
Reading
</div>
<div className="flex flex-wrap gap-2">
{memoizedDocs.length > 0 ? (
memoizedDocs.slice(0, 10).map((doc, docIndex) => {
const truncatedIdentifier =
doc.semantic_identifier?.slice(0, 20) || "";
return (
<SourceChip2
includeAnimation
onClick={() =>
openDocument(doc, setPresentingDocument)
}
key={docIndex}
icon={<ResultIcon doc={doc} size={10} />}
title={`${truncatedIdentifier}${
truncatedIdentifier.length === 20
? "..."
: ""
}`}
/>
);
})
) : (
<div className="text-black text-sm font-medium">
No sources found
</div>
)}
</div>
</div>
)}
{(subQuestion?.is_complete ||
subQuestion?.answer?.length > 0) && (
<div className="flex flex-col gap-2">
<div
className="text-[#4a4a4a] cursor-pointer items-center text-xs flex gap-x-1 font-medium leading-normal"
onClick={() => setAnalysisToggled(!analysisToggled)}
>
Analyzing
<ChevronDown
className={`transition-transform duration-200 ${
analysisToggled ? "" : "-rotate-90"
}`}
size={8}
/>
</div>
{analysisToggled && (
<div className="flex flex-wrap gap-2">
{renderedMarkdown}
</div>
)}
</div>
</div>
)}
{(subQuestion?.is_complete ||
subQuestion?.answer?.length > 0) && (
<div className="flex flex-col gap-2">
<div
className="text-[#4a4a4a] cursor-pointer items-center text-xs flex gap-x-1 font-medium leading-normal"
onClick={() => setAnalysisToggled(!analysisToggled)}
>
Analyzing
<ChevronDown
className={`transition-transform duration-200 ${
analysisToggled ? "" : "-rotate-90"
}`}
size={8}
/>
</div>
{analysisToggled && (
<div className="flex flex-wrap gap-2">
{renderedMarkdown}
</div>
)}
</div>
)}
)}
</div>
</div>
</div>
)}
</div>
)}
</div>
)}
{temporaryDisplay &&
(status === ToggleState.InProgress || toggled) && (
((status === ToggleState.InProgress &&
forcedStatus !== ToggleState.Done) ||
toggled) && (
<div
className={`transform transition-all duration-100 ease-in-out origin-top ${
toggled ? "scale-y-100 opacity-100" : "scale-y-95 opacity-0"
className={`transform ease-in-out origin-top ${
toggled ? "scale-y-100 opacity-100" : "scale-y-100 opacity-0"
}`}
>
<div className="bg-blaack pl-0">
<div className="pl-0">
<div className="flex flex-col gap-2">
<div className="leading-none text-[#4a4a4a] text-xs font-medium">
{temporaryDisplay?.tinyQuestion}
@ -468,9 +474,24 @@ const SubQuestionsDisplay: React.FC<SubQuestionsDisplayProps> = ({
overallAnswerGenerating,
allowDocuments,
}) => {
const { dynamicSubQuestions } = useStreamingMessages(subQuestions, () => {});
const [showSummarizing, setShowSummarizing] = useState(
finishedGenerating && !overallAnswerGenerating
);
const { dynamicSubQuestions } = useStreamingMessages(
subQuestions,
() => {},
() => {
setTimeout(() => {
setShowSummarizing(true);
}, PHASE_MIN_MS * 3);
}
);
const { dynamicSubQuestions: dynamicSecondLevelQuestions } =
useStreamingMessages(secondLevelQuestions || [], () => {});
useStreamingMessages(
secondLevelQuestions || [],
() => {},
() => {}
);
const memoizedSubQuestions = useMemo(() => {
return finishedGenerating ? subQuestions : dynamicSubQuestions;
}, [finishedGenerating, dynamicSubQuestions, subQuestions]);
@ -497,10 +518,7 @@ const SubQuestionsDisplay: React.FC<SubQuestionsDisplayProps> = ({
).length == memoizedSubQuestions.length;
const [streamedText, setStreamedText] = useState(
!overallAnswerGenerating ? "Summarize findings" : ""
);
const [showSummarizing, setShowSummarizing] = useState(
finishedGenerating && !overallAnswerGenerating
finishedGenerating ? "Summarize findings" : ""
);
const [canShowSummarizing, setCanShowSummarizing] =
useState(finishedGenerating);
@ -520,7 +538,7 @@ const SubQuestionsDisplay: React.FC<SubQuestionsDisplayProps> = ({
memoizedSubQuestions.length > 0 &&
memoizedSubQuestions.filter(
(subQuestion) => subQuestion?.answer.length > 2
).length == memoizedSubQuestions.length
).length == subQuestions.length
) {
setTimeout(() => {
setCanShowSummarizing(true);
@ -560,7 +578,7 @@ const SubQuestionsDisplay: React.FC<SubQuestionsDisplayProps> = ({
} else {
clearInterval(streamInterval);
}
}, 8);
}, 10);
}
}, [showSummarizing]);
@ -704,12 +722,6 @@ const SubQuestionsDisplay: React.FC<SubQuestionsDisplayProps> = ({
(subQuestion?.sub_queries?.length > 0 &&
(subQuestion.answer == undefined ||
subQuestion.answer.length > 3))
// subQuestion == undefined &&
// subQuestion.answer != undefined &&
// !(
// dynamicSubQuestions[index + 1] != undefined ||
// dynamicSubQuestions[index + 1]?.sub_queries?.length! > 0
// )
}
/>
))}

View File

@ -2,6 +2,7 @@ import { useState, useEffect } from "react";
import faviconFetch from "favicon-fetch";
import { SourceIcon } from "./SourceIcon";
import { ValidSources } from "@/lib/types";
import { OnyxIcon } from "./icons/icons";
const CACHE_DURATION = 24 * 60 * 60 * 1000;
@ -48,6 +49,9 @@ export function SearchResultIcon({ url }: { url: string }) {
if (!faviconUrl) {
return <SourceIcon sourceType={ValidSources.Web} iconSize={18} />;
}
if (url.includes("docs.onyx.app")) {
return <OnyxIcon size={18} />;
}
return (
<div className="rounded-full w-[18px] h-[18px] overflow-hidden bg-gray-200">

View File

@ -169,7 +169,7 @@ export function UserDropdown({
<div
className={`
p-2
w-[175px]
${page != "admin" && showNotifications ? "w-72" : "w-[175px]"}
text-strong
text-sm
border

View File

@ -12,6 +12,7 @@ import { useAssistants } from "../context/AssistantsContext";
import { useUser } from "../user/UserProvider";
import { XIcon } from "../icons/icons";
import { Spinner } from "@phosphor-icons/react";
import { useRouter } from "next/navigation";
export const Notifications = ({
notifications,
@ -23,7 +24,7 @@ export const Notifications = ({
navigateToDropdown: () => void;
}) => {
const [showDropdown, setShowDropdown] = useState(false);
const router = useRouter();
const { refreshAssistants } = useAssistants();
const { refreshUser } = useUser();
@ -90,10 +91,10 @@ export const Notifications = ({
notification: Notification,
persona: Persona
) => {
addAssistantToList(persona.id);
await dismissNotification(notification.id);
await refreshUser();
await refreshAssistants();
router.push(`/chat?assistantId=${persona.id}`);
};
const sortedNotifications = notifications
@ -204,7 +205,7 @@ export const Notifications = ({
}
className="px-3 py-1 text-sm font-medium text-blue-600 hover:text-blue-800 transition duration-150 ease-in-out"
>
Accept
Chat
</button>
<button
onClick={() => dismissNotification(notification.id)}

View File

@ -4,6 +4,8 @@ import { OnyxDocument } from "@/lib/search/interfaces";
import { truncateString } from "@/lib/utils";
import { openDocument } from "@/lib/search/utils";
import { ValidSources } from "@/lib/types";
import React from "react";
import { SearchResultIcon } from "@/components/SearchResultIcon";
export const ResultIcon = ({
doc,
@ -55,70 +57,107 @@ export default function SourceCard({
interface SeeMoreBlockProps {
toggleDocumentSelection: () => void;
uniqueSources: ValidSources[];
docs: OnyxDocument[];
webSourceDomains: string[];
toggled: boolean;
fullWidth?: boolean;
}
const getDomainFromUrl = (url: string) => {
try {
const parsedUrl = new URL(url);
return parsedUrl.hostname;
} catch (error) {
return null;
}
};
export function getUniqueIcons(docs: OnyxDocument[]): JSX.Element[] {
const uniqueIcons: JSX.Element[] = [];
const seenDomains = new Set<string>();
const seenSourceTypes = new Set<ValidSources>();
for (const doc of docs) {
// If it's a web source, we check domain uniqueness
if (doc.source_type === ValidSources.Web && doc.link) {
const domain = getDomainFromUrl(doc.link);
if (domain && !seenDomains.has(domain)) {
seenDomains.add(domain);
// Use your SearchResultIcon with the doc.url
uniqueIcons.push(
<SearchResultIcon url={doc.link} key={`web-${doc.document_id}`} />
);
}
} else {
// Otherwise, use sourceType uniqueness
if (!seenSourceTypes.has(doc.source_type)) {
seenSourceTypes.add(doc.source_type);
// Use your SourceIcon with the doc.sourceType
uniqueIcons.push(
<SourceIcon
sourceType={doc.source_type}
iconSize={18}
key={doc.document_id}
/>
);
}
}
}
// If we have zero icons, we might want a fallback (optional):
if (uniqueIcons.length === 0) {
// Fallback: just use a single SourceIcon, repeated 3 times
return [
<SourceIcon
sourceType={ValidSources.Web}
iconSize={18}
key="fallback-1"
/>,
<SourceIcon
sourceType={ValidSources.Web}
iconSize={18}
key="fallback-2"
/>,
<SourceIcon
sourceType={ValidSources.Web}
iconSize={18}
key="fallback-3"
/>,
];
}
// Duplicate last icon if fewer than 3 icons
while (uniqueIcons.length < 3) {
// The last icon in the array
const lastIcon = uniqueIcons[uniqueIcons.length - 1];
// Clone it with a new key
uniqueIcons.push(
React.cloneElement(lastIcon, {
key: `${lastIcon.key}-dup-${uniqueIcons.length}`,
})
);
}
// Slice to just the first 3 if there are more than 3
return uniqueIcons.slice(0, 3);
}
export function SeeMoreBlock({
toggleDocumentSelection,
webSourceDomains,
uniqueSources,
docs,
toggled,
fullWidth = false,
}: SeeMoreBlockProps) {
// Gather total sources (unique + web).
const totalSources = uniqueSources.length + webSourceDomains.length;
// Filter out "web" from unique sources if we have any webSourceDomains
// (preserves the original logic).
const filteredUniqueSources = uniqueSources.filter(
(source) => source !== "web" && webSourceDomains.length > 0
);
// Build a list of up to three icons from the filtered unique sources and web sources.
// If we don't reach three icons but have at least one, we'll duplicate the last one.
const iconsToRender: Array<{ type: "source" | "web"; data: string }> = [];
// Push from filtered unique sources (max 3).
for (
let i = 0;
i < filteredUniqueSources.length && iconsToRender.length < 3;
i++
) {
iconsToRender.push({ type: "source", data: filteredUniqueSources[i] });
}
// Then push from web source domains (until total of 3).
for (
let i = 0;
i < webSourceDomains.length && iconsToRender.length < 3;
i++
) {
iconsToRender.push({ type: "web", data: webSourceDomains[i] });
}
// If we have fewer than 3 but at least one icon, duplicate the last until we reach 3.
while (iconsToRender.length < 3 && iconsToRender.length > 0) {
iconsToRender.push(iconsToRender[iconsToRender.length - 1]);
}
const iconsToRender = getUniqueIcons(docs);
return (
<button
onClick={toggleDocumentSelection}
className="w-full max-w-[260px] h-[80px] p-3 bg-[#f1eee8] text-left hover:bg-[#ebe7de] cursor-pointer rounded-lg flex flex-col justify-between overflow-hidden"
className={`w-full ${fullWidth ? "w-full" : "max-w-[200px]"}
h-[80px] p-3 border border-[1.5px] border-[#D9D1c0] bg-[#f1eee8] text-left hover:bg-[#ebe7de] cursor-pointer rounded-lg flex flex-col justify-between overflow-hidden`}
>
<div className="flex items-center gap-1">
{iconsToRender.map((icon, index) =>
icon.type === "source" ? (
<SourceIcon
key={index}
sourceType={icon.data as ValidSources}
iconSize={14}
/>
) : (
<WebResultIcon key={index} url={icon.data} size={14} />
)
)}
{iconsToRender.map((icon, index) => icon)}
</div>
<div className="text-text-darker text-xs font-semibold">
{toggled ? "Hide Results" : "Show All"}

View File

@ -334,7 +334,7 @@ export function FilterPopup({
/>
</div>
</div>
<ul className="space-y-1">
<ul className="space-y-1 default-scrollbar overflow-y-auto max-h-64">
{availableSources.map((source) => (
<SelectableDropdown
icon={

View File

@ -63,7 +63,6 @@ export async function fetchSettingsSS(): Promise<CombinedSettings | null> {
} else {
settings = await results[0].json();
}
console.log(JSON.stringify(settings));
let enterpriseSettings: EnterpriseSettings | null = null;
if (tasks.length > 1) {
@ -95,6 +94,10 @@ export async function fetchSettingsSS(): Promise<CombinedSettings | null> {
}
}
if (enterpriseSettings && settings.pro_search_disabled == null) {
settings.pro_search_disabled = true;
}
const webVersion = getWebVersion();
const combinedSettings: CombinedSettings = {

View File

@ -1,15 +1,24 @@
import React from "react";
import { MdDragIndicator } from "react-icons/md";
export const DragHandle = (props: any) => {
interface DragHandleProps extends React.HTMLAttributes<HTMLDivElement> {
isDragging?: boolean;
size?: number;
}
export const DragHandle: React.FC<DragHandleProps> = ({
isDragging,
size = 16,
...props
}) => {
return (
<div
className={
props.isDragging ? "hover:cursor-grabbing" : "hover:cursor-grab"
}
className={`flex items-center justify-center ${
isDragging ? "cursor-grabbing" : "cursor-grab"
}`}
{...props}
>
<MdDragIndicator />
<MdDragIndicator size={size} />
</div>
);
};

View File

@ -6,12 +6,12 @@ import { Row } from "./interfaces";
export function DraggableRow({
row,
forceDragging,
isAdmin = true,
isDragOverlay = false,
}: {
row: Row;
forceDragging?: boolean;
isAdmin?: boolean;
isDragOverlay?: boolean;
}) {
const {
attributes,
@ -22,29 +22,25 @@ export function DraggableRow({
isDragging,
} = useSortable({
id: row.id,
disabled: isDragOverlay,
});
const style = {
transform: CSS.Transform.toString(transform),
transition: transition,
transition,
};
return (
<TableRow
ref={setNodeRef}
style={style}
className={isDragging ? "invisible" : "bg-background"}
style={isDragOverlay ? undefined : style}
className={isDragging && !isDragOverlay ? "opacity-0" : ""}
>
<TableCell>
{isAdmin && (
<DragHandle
isDragging={isDragging || forceDragging}
{...attributes}
{...listeners}
/>
)}
{isAdmin && <DragHandle isDragging={isDragging} {...listeners} />}
</TableCell>
{row.cells.map((column, ind) => (
<TableCell key={ind}>{column}</TableCell>
{row.cells.map((cell, index) => (
<TableCell key={index}>{cell}</TableCell>
))}
</TableRow>
);

View File

@ -27,7 +27,6 @@ import {
} from "@dnd-kit/sortable";
import { DraggableRow } from "./DraggableRow";
import { Row } from "./interfaces";
import { StaticRow } from "./StaticRow";
export function DraggableTable({
headers,
@ -43,8 +42,17 @@ export function DraggableTable({
const [activeId, setActiveId] = useState<UniqueIdentifier | null>();
const items = useMemo(() => rows?.map(({ id }) => id), [rows]);
const sensors = useSensors(
useSensor(MouseSensor, {}),
useSensor(TouchSensor, {}),
useSensor(MouseSensor, {
activationConstraint: {
distance: 5,
},
}),
useSensor(TouchSensor, {
activationConstraint: {
delay: 250,
tolerance: 5,
},
}),
useSensor(KeyboardSensor, {})
);
@ -87,7 +95,7 @@ export function DraggableTable({
collisionDetection={closestCenter}
modifiers={[restrictToVerticalAxis]}
>
<Table className="overflow-y-visible">
<Table>
<TableHeader>
<TableRow>
<TableHead></TableHead>
@ -99,24 +107,28 @@ export function DraggableTable({
<TableBody>
<SortableContext items={items} strategy={verticalListSortingStrategy}>
{rows.map((row) => {
return <DraggableRow key={row.id} row={row} isAdmin={isAdmin} />;
})}
{rows.map((row) => (
<DraggableRow key={row.id} row={row} isAdmin={isAdmin} />
))}
</SortableContext>
{isAdmin && (
<DragOverlay>
{selectedRow && (
<Table className="overflow-y-visible">
<TableBody>
<StaticRow key={selectedRow.id} row={selectedRow} />
</TableBody>
</Table>
)}
</DragOverlay>
)}
</TableBody>
</Table>
{isAdmin && (
<DragOverlay>
{selectedRow && (
<Table>
<TableBody>
<DraggableRow
row={selectedRow}
isAdmin={isAdmin}
isDragOverlay
/>
</TableBody>
</Table>
)}
</DragOverlay>
)}
</DndContext>
);
}