wip: relay filter chunking for REQ subscriptions

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Alejandro Gómez
2026-03-19 12:12:14 +01:00
parent 92ea6e347b
commit 9af896127e
4 changed files with 431 additions and 4 deletions

View File

@@ -83,6 +83,8 @@ import {
shouldAnimate,
} from "@/lib/req-state-machine";
import { resolveFilterAliases, getTagValues } from "@/lib/nostr-utils";
import { chunkFiltersByRelay } from "@/lib/relay-filter-chunking";
import { useStableValue } from "@/hooks/useStable";
import { FilterSummaryBadges } from "./nostr/FilterSummaryBadges";
import { useNostrEvent } from "@/hooks/useNostrEvent";
import { MemoizedCompactEventRow } from "./nostr/CompactEventRow";
@@ -136,6 +138,7 @@ interface QueryDropdownProps {
nip05PTags?: string[];
domainAuthors?: string[];
domainPTags?: string[];
relayFilterMap?: Record<string, import("nostr-tools").Filter[]>;
}
function QueryDropdown({
@@ -143,6 +146,7 @@ function QueryDropdown({
nip05Authors,
domainAuthors,
domainPTags,
relayFilterMap,
}: QueryDropdownProps) {
const { copy: handleCopy, copied } = useCopy();
@@ -687,6 +691,100 @@ function QueryDropdown({
</div>
)}
{/* Per-Relay Chunked Filters (NIP-65 Outbox) */}
{relayFilterMap && Object.keys(relayFilterMap).length > 0 && (
<Collapsible>
<CollapsibleTrigger className="flex items-center gap-2 text-xs font-medium text-muted-foreground hover:text-foreground transition-colors w-full">
<Sparkles className="size-3" />
Chunked REQ (NIP-65 Outbox) {
Object.keys(relayFilterMap).length
}{" "}
relays
<ChevronDown className="size-3 ml-auto" />
</CollapsibleTrigger>
<CollapsibleContent>
<div className="mt-2 space-y-2">
{/* Common fields shared across all relays */}
{(() => {
const commonFields: Record<string, unknown> = {};
for (const [key, value] of Object.entries(filter)) {
if (key !== "authors" && key !== "#p") {
commonFields[key] = value;
}
}
return Object.keys(commonFields).length > 0 ? (
<div className="text-xs text-muted-foreground px-2 py-1 bg-muted/30 rounded border border-border/40">
<span className="font-medium">Common:</span>{" "}
{filter.kinds && `kinds [${filter.kinds.join(", ")}]`}
{filter.since &&
`, since ${new Date(filter.since * 1000).toLocaleDateString()}`}
{filter.until &&
`, until ${new Date(filter.until * 1000).toLocaleDateString()}`}
{filter.limit && `, limit ${filter.limit}`}
{filter.search && `, search "${filter.search}"`}
{filter["#t"] && `, #t [${filter["#t"].join(", ")}]`}
</div>
) : null;
})()}
{/* Per-relay filters */}
{Object.entries(relayFilterMap).map(([relayUrl, filters]) => {
const authorCount = filters.reduce(
(sum, f) => sum + (f.authors?.length || 0),
0,
);
const isFallback =
filters.length === 1 &&
JSON.stringify(filters[0]) === JSON.stringify(filter);
return (
<Collapsible key={relayUrl}>
<CollapsibleTrigger className="flex items-center gap-2 text-xs w-full px-2 py-1.5 rounded hover:bg-muted/50 transition-colors">
<ChevronRight className="size-3 text-muted-foreground" />
<RelayLink url={relayUrl} />
<span className="text-muted-foreground ml-auto flex items-center gap-2">
{authorCount > 0 && (
<span>
{authorCount} author{authorCount !== 1 && "s"}
</span>
)}
{isFallback && (
<span className="text-[10px] bg-muted px-1 py-0.5 rounded font-medium">
FB
</span>
)}
</span>
</CollapsibleTrigger>
<CollapsibleContent>
<div className="ml-5 mt-1 space-y-1">
{filters.map((f, i) => (
<div key={i}>
{f.authors && f.authors.length > 0 && (
<div className="flex flex-wrap gap-1 items-center">
<span className="text-[10px] text-muted-foreground font-medium">
authors:
</span>
{f.authors.map((pubkey) => (
<UserName
key={pubkey}
pubkey={pubkey}
className="text-xs"
/>
))}
</div>
)}
</div>
))}
</div>
</CollapsibleContent>
</Collapsible>
);
})}
</div>
</CollapsibleContent>
</Collapsible>
)}
{/* Raw Query - Always at bottom */}
<Collapsible>
<CollapsibleTrigger className="flex items-center gap-2 text-xs font-medium text-muted-foreground hover:text-foreground transition-colors w-full">
@@ -829,6 +927,15 @@ export default function ReqViewer({
// Streaming is the default behavior, closeOnEose inverts it
const stream = !closeOnEose;
// Per-relay filter chunking: only send relevant authors/#p to each relay
const relayFilterMap = useMemo(() => {
// Only chunk when using NIP-65 selection (not explicit relays)
if (relays || !reasoning?.length) return undefined;
return chunkFiltersByRelay(resolvedFilter, reasoning);
}, [relays, reasoning, resolvedFilter]);
const stableRelayFilterMap = useStableValue(relayFilterMap);
const {
events,
loading,
@@ -840,7 +947,11 @@ export default function ReqViewer({
`req-${JSON.stringify(filter)}-${closeOnEose}`,
resolvedFilter,
normalizedRelays,
{ limit: resolvedFilter.limit || 50, stream },
{
limit: resolvedFilter.limit || 50,
stream,
relayFilterMap: stableRelayFilterMap,
},
);
const [viewMode, setViewMode] = useState(view);
@@ -1369,6 +1480,7 @@ export default function ReqViewer({
nip05PTags={nip05PTags}
domainAuthors={domainAuthors}
domainPTags={domainPTags}
relayFilterMap={stableRelayFilterMap}
/>
)}

View File

@@ -11,6 +11,8 @@ import { deriveOverallState } from "@/lib/req-state-machine";
interface UseReqTimelineEnhancedOptions {
limit?: number;
stream?: boolean;
/** Per-relay chunked filters from NIP-65 outbox splitting */
relayFilterMap?: Record<string, Filter[]>;
}
interface UseReqTimelineEnhancedReturn {
@@ -49,7 +51,8 @@ export function useReqTimelineEnhanced(
options: UseReqTimelineEnhancedOptions = { limit: 50 },
): UseReqTimelineEnhancedReturn {
const eventStore = useEventStore();
const { limit, stream = false } = options;
const { limit, stream = false, relayFilterMap } = options;
const stableRelayFilterMap = useStableValue(relayFilterMap);
// Core state (compatible with original useReqTimeline)
const [loading, setLoading] = useState(false);
@@ -191,8 +194,14 @@ export function useReqTimelineEnhanced(
const subscriptions = relays.map((url) => {
const relay = pool.relay(url);
// Use per-relay chunked filters if available, otherwise use the full filter
const relayFilters = stableRelayFilterMap?.[url];
const filtersForRelay = relayFilters
? relayFilters.map((f) => ({ ...f, limit: limit || f.limit }))
: filtersWithLimit;
return relay
.subscription(filtersWithLimit, {
.subscription(filtersForRelay, {
reconnect: 5, // v5: retries renamed to reconnect
resubscribe: true,
})
@@ -317,7 +326,15 @@ export function useReqTimelineEnhanced(
return () => {
subscriptions.forEach((sub) => sub.unsubscribe());
};
}, [id, stableFilters, stableRelays, limit, stream, eventStore]);
}, [
id,
stableFilters,
stableRelays,
stableRelayFilterMap,
limit,
stream,
eventStore,
]);
// Derive overall state from individual relay states
const overallState = useMemo(() => {

View File

@@ -0,0 +1,214 @@
import { describe, it, expect } from "vitest";
import { chunkFiltersByRelay } from "./relay-filter-chunking";
import type { RelaySelectionReasoning } from "@/types/relay-selection";
describe("chunkFiltersByRelay", () => {
const relay1 = "wss://relay1.example.com/";
const relay2 = "wss://relay2.example.com/";
const relay3 = "wss://relay3.example.com/";
const alice = "aaaa".repeat(16);
const bob = "bbbb".repeat(16);
const carol = "cccc".repeat(16);
const dave = "dddd".repeat(16);
it("splits 2 authors on different relays so each only gets its author", () => {
const reasoning: RelaySelectionReasoning[] = [
{ relay: relay1, writers: [alice], readers: [], isFallback: false },
{ relay: relay2, writers: [bob], readers: [], isFallback: false },
];
const result = chunkFiltersByRelay(
{ kinds: [1], authors: [alice, bob] },
reasoning,
);
expect(result[relay1]).toEqual([{ kinds: [1], authors: [alice] }]);
expect(result[relay2]).toEqual([{ kinds: [1], authors: [bob] }]);
});
it("gives both authors to a relay when they share it", () => {
const reasoning: RelaySelectionReasoning[] = [
{
relay: relay1,
writers: [alice, bob],
readers: [],
isFallback: false,
},
];
const result = chunkFiltersByRelay(
{ kinds: [1], authors: [alice, bob] },
reasoning,
);
expect(result[relay1]).toEqual([{ kinds: [1], authors: [alice, bob] }]);
});
it("gives fallback relays the full unmodified filter", () => {
const filter = { kinds: [1], authors: [alice, bob], "#p": [carol] };
const reasoning: RelaySelectionReasoning[] = [
{ relay: relay1, writers: [alice], readers: [], isFallback: false },
{ relay: relay3, writers: [], readers: [], isFallback: true },
];
const result = chunkFiltersByRelay(filter, reasoning);
// Fallback gets exact original filter
expect(result[relay3]).toEqual([filter]);
// Non-fallback gets chunked authors, but bob is unassigned so goes to all
expect(result[relay1]![0].authors).toContain(alice);
expect(result[relay1]![0].authors).toContain(bob);
});
it("includes unassigned authors (no kind:10002) in ALL relay filters", () => {
const reasoning: RelaySelectionReasoning[] = [
{ relay: relay1, writers: [alice], readers: [], isFallback: false },
{ relay: relay2, writers: [bob], readers: [], isFallback: false },
];
const result = chunkFiltersByRelay(
{ kinds: [1], authors: [alice, bob, dave] },
reasoning,
);
// dave is unassigned, should appear in both relays
expect(result[relay1]![0].authors).toContain(alice);
expect(result[relay1]![0].authors).toContain(dave);
expect(result[relay1]![0].authors).not.toContain(bob);
expect(result[relay2]![0].authors).toContain(bob);
expect(result[relay2]![0].authors).toContain(dave);
expect(result[relay2]![0].authors).not.toContain(alice);
});
it("passes #p through unchanged to all relays", () => {
const reasoning: RelaySelectionReasoning[] = [
{ relay: relay1, writers: [alice], readers: [carol], isFallback: false },
{ relay: relay2, writers: [bob], readers: [], isFallback: false },
];
const result = chunkFiltersByRelay(
{ kinds: [1], authors: [alice, bob], "#p": [carol, dave] },
reasoning,
);
// Both relays should get the full #p array unchanged
expect(result[relay1]![0]["#p"]).toEqual([carol, dave]);
expect(result[relay2]![0]["#p"]).toEqual([carol, dave]);
});
it("returns empty object for empty reasoning", () => {
const result = chunkFiltersByRelay({ kinds: [1], authors: [alice] }, []);
expect(result).toEqual({});
});
it("preserves non-pubkey filter fields", () => {
const reasoning: RelaySelectionReasoning[] = [
{ relay: relay1, writers: [alice], readers: [], isFallback: false },
];
const result = chunkFiltersByRelay(
{
kinds: [1, 30023],
authors: [alice],
since: 1000,
until: 2000,
limit: 50,
"#t": ["nostr"],
"#p": [carol],
search: "hello",
},
reasoning,
);
expect(result[relay1]).toEqual([
{
kinds: [1, 30023],
authors: [alice],
since: 1000,
until: 2000,
limit: 50,
"#t": ["nostr"],
"#p": [carol],
search: "hello",
},
]);
});
it("returns empty object for filter with no authors", () => {
const reasoning: RelaySelectionReasoning[] = [
{ relay: relay1, writers: [alice], readers: [carol], isFallback: false },
];
// Filter only has #p, no authors — nothing to chunk
const result = chunkFiltersByRelay(
{ kinds: [1], "#p": [carol] },
reasoning,
);
expect(result).toEqual({});
});
it("returns empty object for filter with no authors and no #p", () => {
const reasoning: RelaySelectionReasoning[] = [
{ relay: relay1, writers: [alice], readers: [], isFallback: false },
];
const result = chunkFiltersByRelay({ kinds: [1] }, reasoning);
expect(result).toEqual({});
});
it("handles filter array input — each filter chunked independently and merged per relay", () => {
const reasoning: RelaySelectionReasoning[] = [
{ relay: relay1, writers: [alice], readers: [carol], isFallback: false },
{ relay: relay2, writers: [bob], readers: [], isFallback: false },
];
const result = chunkFiltersByRelay(
[
{ kinds: [1], authors: [alice, bob] },
{ kinds: [7], authors: [alice] },
],
reasoning,
);
// relay1 gets alice from both filters
expect(result[relay1]).toHaveLength(2);
expect(result[relay1]![0]).toEqual({ kinds: [1], authors: [alice] });
expect(result[relay1]![1]).toEqual({ kinds: [7], authors: [alice] });
// relay2 gets bob from first filter only
expect(result[relay2]).toHaveLength(1);
expect(result[relay2]![0]).toEqual({ kinds: [1], authors: [bob] });
});
it("skips a relay when it has no relevant authors", () => {
const reasoning: RelaySelectionReasoning[] = [
{ relay: relay1, writers: [alice], readers: [], isFallback: false },
{ relay: relay2, writers: [dave], readers: [], isFallback: false },
];
// dave is assigned to relay2 but not in the filter — relay2 gets skipped
const result = chunkFiltersByRelay(
{ kinds: [1], authors: [alice] },
reasoning,
);
expect(result[relay1]).toBeDefined();
expect(result[relay2]).toBeUndefined();
});
it("deduplicates authors that appear in both reasoning and unassigned", () => {
const reasoning: RelaySelectionReasoning[] = [
{ relay: relay1, writers: [alice], readers: [], isFallback: false },
];
const result = chunkFiltersByRelay(
{ kinds: [1], authors: [alice] },
reasoning,
);
// alice should appear exactly once
expect(result[relay1]![0].authors).toEqual([alice]);
});
});

View File

@@ -0,0 +1,84 @@
/**
* Per-Relay Filter Chunking (Outbox-Aware REQ Splitting)
*
* Splits filters so each relay only receives the authors relevant to it,
* based on NIP-65 relay selection reasoning.
*
* Only `authors` are chunked (by outbox/write relays). All other filter
* fields — including `#p` — are passed through unchanged to every relay.
* `#p` is a content filter ("find events tagging these pubkeys"), not a
* routing signal, so it belongs on all relays.
*/
import type { Filter } from "nostr-tools";
import type { RelaySelectionReasoning } from "@/types/relay-selection";
/**
* Build per-relay chunked filters from relay selection reasoning.
*
* Returns a plain object (not Map) so useStableValue (JSON.stringify) works.
*/
export function chunkFiltersByRelay(
filters: Filter | Filter[],
reasoning: RelaySelectionReasoning[],
): Record<string, Filter[]> {
if (!reasoning.length) return {};
const filterArray = Array.isArray(filters) ? filters : [filters];
// Collect all assigned writers across non-fallback reasoning entries
const allAssignedWriters = new Set<string>();
for (const r of reasoning) {
if (!r.isFallback) {
for (const w of r.writers) allAssignedWriters.add(w);
}
}
const result: Record<string, Filter[]> = {};
for (const filter of filterArray) {
const originalAuthors = filter.authors;
// If filter has no authors, nothing to chunk
if (!originalAuthors?.length) continue;
// Find unassigned authors (no kind:10002) — these go to ALL relays
const unassignedAuthors = originalAuthors.filter(
(a) => !allAssignedWriters.has(a),
);
// Build base filter (everything except authors)
const base: Filter = {};
for (const [key, value] of Object.entries(filter)) {
if (key !== "authors") {
(base as Record<string, unknown>)[key] = value;
}
}
for (const r of reasoning) {
// Fallback relays get the full original filter
if (r.isFallback) {
if (!result[r.relay]) result[r.relay] = [];
result[r.relay].push(filter);
continue;
}
// Build chunked authors: reasoning writers that overlap with filter authors + unassigned
const authorSet = new Set(originalAuthors);
const relayAuthors = r.writers.filter((w) => authorSet.has(w));
const chunkedAuthors = [
...new Set([...relayAuthors, ...unassignedAuthors]),
];
// If no authors for this relay, skip it
if (chunkedAuthors.length === 0) continue;
const chunkedFilter: Filter = { ...base, authors: chunkedAuthors };
if (!result[r.relay]) result[r.relay] = [];
result[r.relay].push(chunkedFilter);
}
}
return result;
}