performReq now uses "read" relays from p/P-tags when no authors are specified in outbox mode.

This commit is contained in:
fiatjaf
2026-04-02 03:46:54 -03:00
parent 65ddf7b821
commit 2bc1cf3417

144
req.go
View File

@@ -291,6 +291,24 @@ func performReq(
} else if outbox {
defs := make([]nostr.DirectedFilter, 0, len(filter.Authors)*2)
pTagPubkeys := make([]nostr.PubKey, 0, 4)
if len(filter.Authors) == 0 && filter.Tags != nil {
seen := make(map[nostr.PubKey]struct{})
pTags := append([]string{}, filter.Tags["p"]...)
pTags = append(pTags, filter.Tags["P"]...)
for _, value := range pTags {
pubkey, err := parsePubKey(value)
if err != nil {
continue
}
if _, ok := seen[pubkey]; ok {
continue
}
seen[pubkey] = struct{}{}
pTagPubkeys = append(pTagPubkeys, pubkey)
}
}
for _, relayUrl := range relayUrls {
defs = append(defs, nostr.DirectedFilter{
Filter: filter,
@@ -298,54 +316,96 @@ func performReq(
})
}
// relays for each pubkey
errg := errgroup.Group{}
errg.SetLimit(16)
mu := sync.Mutex{}
logverbose("gathering outbox relays for %d authors...\n", len(filter.Authors))
for _, pubkey := range filter.Authors {
errg.Go(func() error {
n := int(outboxRelaysPerPubKey)
for _, url := range sys.FetchOutboxRelays(ctx, pubkey, n) {
if slices.Contains(relayUrls, url) {
// already specified globally, ignore
continue
}
if !nostr.IsValidRelayURL(url) {
continue
}
matchUrl := func(def nostr.DirectedFilter) bool { return def.Relay == url }
idx := slices.IndexFunc(defs, matchUrl)
if idx == -1 {
// new relay, add it
mu.Lock()
// check again after locking to prevent races
idx = slices.IndexFunc(defs, matchUrl)
if idx == -1 {
// then add it
filter := filter.Clone()
filter.Authors = []nostr.PubKey{pubkey}
defs = append(defs, nostr.DirectedFilter{
Filter: filter,
Relay: url,
})
mu.Unlock()
continue // done with this relay url
if len(filter.Authors) == 0 && len(pTagPubkeys) > 0 {
// relays for p tags when no authors are given
errg := errgroup.Group{}
errg.SetLimit(16)
mu := sync.Mutex{}
logverbose("gathering inbox relays for %d p-tags...\n", len(pTagPubkeys))
for _, pubkey := range pTagPubkeys {
errg.Go(func() error {
n := int(outboxRelaysPerPubKey)
for _, url := range sys.FetchInboxRelays(ctx, pubkey, n) {
if slices.Contains(relayUrls, url) {
// already specified globally, ignore
continue
}
if !nostr.IsValidRelayURL(url) {
continue
}
// otherwise we'll just use the idx
mu.Unlock()
matchUrl := func(def nostr.DirectedFilter) bool { return def.Relay == url }
idx := slices.IndexFunc(defs, matchUrl)
if idx == -1 {
// new relay, add it
mu.Lock()
idx = slices.IndexFunc(defs, matchUrl)
if idx == -1 {
defs = append(defs, nostr.DirectedFilter{
Filter: filter,
Relay: url,
})
mu.Unlock()
continue
}
mu.Unlock()
}
}
// existing relay, add this pubkey
defs[idx].Authors = append(defs[idx].Authors, pubkey)
}
return nil
})
}
errg.Wait()
} else {
// relays for each pubkey
errg := errgroup.Group{}
errg.SetLimit(16)
mu := sync.Mutex{}
logverbose("gathering outbox relays for %d authors...\n", len(filter.Authors))
for _, pubkey := range filter.Authors {
errg.Go(func() error {
n := int(outboxRelaysPerPubKey)
for _, url := range sys.FetchOutboxRelays(ctx, pubkey, n) {
if slices.Contains(relayUrls, url) {
// already specified globally, ignore
continue
}
if !nostr.IsValidRelayURL(url) {
continue
}
return nil
})
matchUrl := func(def nostr.DirectedFilter) bool { return def.Relay == url }
idx := slices.IndexFunc(defs, matchUrl)
if idx == -1 {
// new relay, add it
mu.Lock()
// check again after locking to prevent races
idx = slices.IndexFunc(defs, matchUrl)
if idx == -1 {
// then add it
filter := filter.Clone()
filter.Authors = []nostr.PubKey{pubkey}
defs = append(defs, nostr.DirectedFilter{
Filter: filter,
Relay: url,
})
mu.Unlock()
continue // done with this relay url
}
// otherwise we'll just use the idx
mu.Unlock()
}
// existing relay, add this pubkey
defs[idx].Authors = append(defs[idx].Authors, pubkey)
}
return nil
})
}
errg.Wait()
}
errg.Wait()
if stream {
logverbose("running subscription with %d directed filters...\n", len(defs))