move nostr-sdk repository into here because why not?

This commit is contained in:
fiatjaf
2024-09-10 22:37:48 -03:00
parent c6ea51653b
commit 072da132f4
29 changed files with 1970 additions and 33 deletions

10
sdk/cache/interface.go vendored Normal file
View File

@@ -0,0 +1,10 @@
package cache
import "time"
type Cache32[V any] interface {
Get(k string) (v V, ok bool)
Delete(k string)
Set(k string, v V) bool
SetWithTTL(k string, v V, d time.Duration) bool
}

48
sdk/cache/memory/cache.go vendored Normal file
View File

@@ -0,0 +1,48 @@
package cache_memory
import (
"encoding/binary"
"encoding/hex"
"time"
ristretto "github.com/fiatjaf/generic-ristretto"
)
type RistrettoCache[V any] struct {
Cache *ristretto.Cache[string, V]
}
func New32[V any](max int64) *RistrettoCache[V] {
cache, _ := ristretto.NewCache(&ristretto.Config[string, V]{
NumCounters: max * 10,
MaxCost: max,
BufferItems: 64,
KeyToHash: func(key string) (uint64, uint64) { return h32(key), 0 },
})
return &RistrettoCache[V]{Cache: cache}
}
func (s RistrettoCache[V]) Get(k string) (v V, ok bool) { return s.Cache.Get(k) }
func (s RistrettoCache[V]) Delete(k string) { s.Cache.Del(k) }
func (s RistrettoCache[V]) Set(k string, v V) bool { return s.Cache.Set(k, v, 1) }
func (s RistrettoCache[V]) SetWithTTL(k string, v V, d time.Duration) bool {
return s.Cache.SetWithTTL(k, v, 1, d)
}
func h32(key string) uint64 {
// we get an event id or pubkey as hex,
// so just extract the last 8 bytes from it and turn them into a uint64
return shortUint64(key)
}
func shortUint64(idOrPubkey string) uint64 {
length := len(idOrPubkey)
if length < 8 {
return 0
}
b, err := hex.DecodeString(idOrPubkey[length-8:])
if err != nil {
return 0
}
return uint64(binary.BigEndian.Uint32(b))
}

50
sdk/follows.go Normal file
View File

@@ -0,0 +1,50 @@
package sdk
import (
"context"
"net/url"
"strings"
"github.com/nbd-wtf/go-nostr"
)
type FollowList = GenericList[Follow]
type Follow struct {
Pubkey string
Relay string
Petname string
}
func (f Follow) Value() string { return f.Pubkey }
func (sys *System) FetchFollowList(ctx context.Context, pubkey string) FollowList {
fl, _ := fetchGenericList[Follow](sys, ctx, pubkey, 3, parseFollow, sys.FollowListCache, false)
return fl
}
func parseFollow(tag nostr.Tag) (fw Follow, ok bool) {
if len(tag) < 2 {
return fw, false
}
if tag[0] != "p" {
return fw, false
}
fw.Pubkey = tag[1]
if !nostr.IsValidPublicKey(fw.Pubkey) {
return fw, false
}
if len(tag) > 2 {
if _, err := url.Parse(tag[2]); err == nil {
fw.Relay = nostr.NormalizeURL(tag[2])
}
if len(tag) > 3 {
fw.Petname = strings.TrimSpace(tag[3])
}
return fw, true
}
return fw, false
}

21
sdk/helpers.go Normal file
View File

@@ -0,0 +1,21 @@
package sdk
import (
"strings"
)
// IsVirtualRelay returns true if the given normalized relay URL shouldn't be considered for outbox-model calculations.
func IsVirtualRelay(url string) bool {
if len(url) < 6 {
// this is just invalid
return true
}
if strings.HasPrefix(url, "wss://feeds.nostr.band") ||
strings.HasPrefix(url, "wss://filter.nostr.wine") ||
strings.HasPrefix(url, "wss://cache") {
return true
}
return false
}

8
sdk/hints/interface.go Normal file
View File

@@ -0,0 +1,8 @@
package hints
import "github.com/nbd-wtf/go-nostr"
type HintsDB interface {
TopN(pubkey string, n int) []string
Save(pubkey string, relay string, key HintKey, score nostr.Timestamp)
}

49
sdk/hints/keys.go Normal file
View File

@@ -0,0 +1,49 @@
package hints
import "github.com/nbd-wtf/go-nostr"
const END_OF_WORLD nostr.Timestamp = 2208999600 // 2040-01-01
type HintKey int
const (
LastFetchAttempt HintKey = iota
MostRecentEventFetched
LastInRelayList
LastInTag
LastInNprofile
LastInNevent
LastInNIP05
)
var KeyBasePoints = [7]int64{
-500, // attempting has negative power because it may fail
700, // when it succeeds that should cancel the negative effect of trying
350, // a relay list is a very strong indicator
5, // tag hints are often autogenerated so we don't care very much about them (that may change)
22, // it feels like people take nprofiles slightly more seriously so we value these a bit more
8, // these are also not often too bad
7, // nip05 hints should be a strong indicator, although in practice they're kinda bad
}
func (hk HintKey) BasePoints() int64 { return KeyBasePoints[hk] }
func (hk HintKey) String() string {
switch hk {
case LastFetchAttempt:
return "last_fetch_attempt"
case MostRecentEventFetched:
return "most_recent_event_fetched"
case LastInRelayList:
return "last_in_relay_list"
case LastInTag:
return "last_in_tag"
case LastInNprofile:
return "last_in_nprofile"
case LastInNevent:
return "last_in_nevent"
case LastInNIP05:
return "last_in_nip05"
}
return "<unexpected>"
}

View File

@@ -0,0 +1,142 @@
package memory
import (
"fmt"
"math"
"slices"
"sync"
"github.com/nbd-wtf/go-nostr"
"github.com/nbd-wtf/go-nostr/sdk/hints"
)
var _ hints.HintsDB = (*HintDB)(nil)
type HintDB struct {
RelayBySerial []string
OrderedRelaysByPubKey map[string]RelaysForPubKey
sync.Mutex
}
func NewHintDB() *HintDB {
return &HintDB{
RelayBySerial: make([]string, 0, 100),
OrderedRelaysByPubKey: make(map[string]RelaysForPubKey, 100),
}
}
func (db *HintDB) Save(pubkey string, relay string, key hints.HintKey, ts nostr.Timestamp) {
now := nostr.Now()
// this is used for calculating what counts as a usable hint
threshold := (now - 60*60*24*180)
if threshold < 0 {
threshold = 0
}
relayIndex := slices.Index(db.RelayBySerial, relay)
if relayIndex == -1 {
relayIndex = len(db.RelayBySerial)
db.RelayBySerial = append(db.RelayBySerial, relay)
}
db.Lock()
defer db.Unlock()
// fmt.Println(" ", relay, "index", relayIndex, "--", "adding", hints.HintKey(key).String(), ts)
rfpk, _ := db.OrderedRelaysByPubKey[pubkey]
entries := rfpk.Entries
entryIndex := slices.IndexFunc(entries, func(re RelayEntry) bool { return re.Relay == relayIndex })
if entryIndex == -1 {
// we don't have an entry for this relay, so add one
entryIndex = len(entries)
entry := RelayEntry{
Relay: relayIndex,
}
entry.Timestamps[key] = ts
entries = append(entries, entry)
} else {
// just update this entry
if entries[entryIndex].Timestamps[key] < ts {
entries[entryIndex].Timestamps[key] = ts
} else {
// no need to update anything
return
}
}
rfpk.Entries = entries
db.OrderedRelaysByPubKey[pubkey] = rfpk
}
func (db *HintDB) TopN(pubkey string, n int) []string {
db.Lock()
defer db.Unlock()
urls := make([]string, 0, n)
if rfpk, ok := db.OrderedRelaysByPubKey[pubkey]; ok {
// sort everything from scratch
slices.SortFunc(rfpk.Entries, func(a, b RelayEntry) int {
return int(b.Sum() - a.Sum())
})
for i, re := range rfpk.Entries {
urls = append(urls, db.RelayBySerial[re.Relay])
if i+1 == n {
break
}
}
}
return urls
}
func (db *HintDB) PrintScores() {
db.Lock()
defer db.Unlock()
fmt.Println("= print scores")
for pubkey, rfpk := range db.OrderedRelaysByPubKey {
fmt.Println("== relay scores for", pubkey)
for i, re := range rfpk.Entries {
fmt.Printf(" %3d :: %30s (%3d) ::> %12d\n", i, db.RelayBySerial[re.Relay], re.Relay, re.Sum())
}
}
}
type RelaysForPubKey struct {
Entries []RelayEntry
}
type RelayEntry struct {
Relay int
Timestamps [8]nostr.Timestamp
}
func (re RelayEntry) Sum() int64 {
now := nostr.Now() + 24*60*60
var sum int64
for i, ts := range re.Timestamps {
if ts == 0 {
continue
}
hk := hints.HintKey(i)
divisor := int64(now - ts)
if divisor == 0 {
divisor = 1
} else {
divisor = int64(math.Pow(float64(divisor), 1.3))
}
multiplier := hk.BasePoints()
value := multiplier * 10000000000 / divisor
// fmt.Println(" ", i, "value:", value)
sum += value
}
return sum
}

View File

@@ -0,0 +1,143 @@
package memory
import (
"testing"
"time"
"github.com/nbd-wtf/go-nostr"
"github.com/nbd-wtf/go-nostr/sdk/hints"
"github.com/stretchr/testify/require"
)
func TestRelayPicking(t *testing.T) {
hdb := NewHintDB()
const key1 = "0000000000000000000000000000000000000000000000000000000000000001"
const key2 = "0000000000000000000000000000000000000000000000000000000000000002"
const key3 = "0000000000000000000000000000000000000000000000000000000000000003"
const key4 = "0000000000000000000000000000000000000000000000000000000000000004"
const relayA = "wss://aaa.com"
const relayB = "wss://bbb.online"
const relayC = "wss://ccc.technology"
hour := nostr.Timestamp((time.Hour).Seconds())
day := hour * 24
// key1: finding out
// add some random parameters things and see what we get
hdb.Save(key1, relayA, hints.LastInTag, nostr.Now()-60*hour)
hdb.Save(key1, relayB, hints.LastInRelayList, nostr.Now()-day*10)
hdb.Save(key1, relayB, hints.LastInNevent, nostr.Now()-day*30)
hdb.Save(key1, relayA, hints.LastInNprofile, nostr.Now()-hour*10)
hdb.PrintScores()
require.Equal(t, []string{relayB, relayA}, hdb.TopN(key1, 3))
hdb.Save(key1, relayA, hints.LastFetchAttempt, nostr.Now()-5*hour)
hdb.Save(key1, relayC, hints.LastInNIP05, nostr.Now()-5*hour)
hdb.PrintScores()
require.Equal(t, []string{relayB, relayC, relayA}, hdb.TopN(key1, 3))
hdb.Save(key1, relayC, hints.LastInTag, nostr.Now()-5*hour)
hdb.Save(key1, relayC, hints.LastFetchAttempt, nostr.Now()-5*hour)
hdb.PrintScores()
require.Equal(t, []string{relayB, relayA, relayC}, hdb.TopN(key1, 3))
hdb.Save(key1, relayA, hints.MostRecentEventFetched, nostr.Now()-day*60)
hdb.PrintScores()
require.Equal(t, []string{relayB, relayA, relayC}, hdb.TopN(key1, 3))
// now let's try a different thing for key2
// key2 has a relay list with A and B
hdb.Save(key2, relayA, hints.LastInRelayList, nostr.Now()-day*25)
hdb.Save(key2, relayB, hints.LastInRelayList, nostr.Now()-day*25)
// but it's old, recently we only see hints for relay C
hdb.Save(key2, relayC, hints.LastInTag, nostr.Now()-5*hour)
hdb.Save(key2, relayC, hints.LastInNIP05, nostr.Now()-5*hour)
hdb.Save(key2, relayC, hints.LastInNevent, nostr.Now()-5*hour)
hdb.Save(key2, relayC, hints.LastInNprofile, nostr.Now()-5*hour)
// at this point we just barely see C coming first
hdb.PrintScores()
require.Equal(t, []string{relayC, relayA, relayB}, hdb.TopN(key2, 3))
// yet a different thing for key3
// it doesn't have relay lists published because it's banned everywhere
// all it has are references to its posts from others
hdb.Save(key3, relayA, hints.LastInTag, nostr.Now()-day*2)
hdb.Save(key3, relayB, hints.LastInNevent, nostr.Now()-day)
hdb.Save(key3, relayB, hints.LastInTag, nostr.Now()-day)
hdb.PrintScores()
require.Equal(t, []string{relayB, relayA}, hdb.TopN(key3, 3))
// we try to fetch events for key3 and we get a very recent one for relay A, an older for relay B
hdb.Save(key3, relayA, hints.LastFetchAttempt, nostr.Now()-5*hour)
hdb.Save(key3, relayA, hints.MostRecentEventFetched, nostr.Now()-day)
hdb.Save(key3, relayB, hints.LastFetchAttempt, nostr.Now()-5*hour)
hdb.Save(key3, relayB, hints.MostRecentEventFetched, nostr.Now()-day*30)
hdb.PrintScores()
require.Equal(t, []string{relayA, relayB}, hdb.TopN(key3, 3))
// for key4 we'll try the alex jones case
// key4 used to publish normally to a bunch of big relays until it got banned
// then it started publishing only to its personal relay
// how long until clients realize that?
banDate := nostr.Now() - day*10
hdb.Save(key4, relayA, hints.LastInRelayList, banDate)
hdb.Save(key4, relayA, hints.LastFetchAttempt, banDate)
hdb.Save(key4, relayA, hints.MostRecentEventFetched, banDate)
hdb.Save(key4, relayA, hints.LastInNprofile, banDate+8*day)
hdb.Save(key4, relayA, hints.LastInNIP05, banDate+5*day)
hdb.Save(key4, relayB, hints.LastInRelayList, banDate)
hdb.Save(key4, relayB, hints.LastFetchAttempt, banDate)
hdb.Save(key4, relayB, hints.MostRecentEventFetched, banDate)
hdb.Save(key4, relayB, hints.LastInNevent, banDate+5*day)
hdb.Save(key4, relayB, hints.LastInNIP05, banDate+8*day)
hdb.Save(key4, relayB, hints.LastInNprofile, banDate+5*day)
hdb.PrintScores()
require.Equal(t, []string{relayA, relayB}, hdb.TopN(key4, 3))
// information about the new relay starts to spread through relay hints in tags only
hdb.Save(key4, relayC, hints.LastInTag, nostr.Now()-5*day)
hdb.Save(key4, relayC, hints.LastInTag, nostr.Now()-5*day)
hdb.Save(key4, relayC, hints.LastInNevent, nostr.Now()-5*day)
hdb.Save(key4, relayC, hints.LastInNIP05, nostr.Now()-5*day)
// as long as we see one tag hint the new relay will already be in our map
hdb.PrintScores()
require.Equal(t, []string{relayA, relayB, relayC}, hdb.TopN(key4, 3))
// client tries to fetch stuff from the old relays, but gets nothing new
hdb.Save(key4, relayA, hints.LastFetchAttempt, nostr.Now()-5*hour)
hdb.Save(key4, relayB, hints.LastFetchAttempt, nostr.Now()-5*hour)
// which is enough for us to transition to the new relay as the toppermost of the uppermost
hdb.PrintScores()
require.Equal(t, []string{relayC, relayA, relayB}, hdb.TopN(key4, 3))
// what if the big relays are attempting to game this algorithm by allowing some of our
// events from time to time while still shadowbanning us?
hdb.Save(key4, relayA, hints.MostRecentEventFetched, nostr.Now()-5*hour)
hdb.Save(key4, relayB, hints.MostRecentEventFetched, nostr.Now()-5*hour)
hdb.PrintScores()
require.Equal(t, []string{relayA, relayB, relayC}, hdb.TopN(key4, 3))
// we'll need overwhelming force from the third relay
// (actually just a relay list with just its name in it will be enough)
hdb.Save(key4, relayC, hints.LastFetchAttempt, nostr.Now()-5*hour)
hdb.Save(key4, relayC, hints.MostRecentEventFetched, nostr.Now()-6*hour)
hdb.Save(key4, relayC, hints.LastInRelayList, nostr.Now()-6*hour)
hdb.PrintScores()
require.Equal(t, []string{relayC, relayA, relayB}, hdb.TopN(key4, 3))
//
//
// things remain the same for key1, key2 and key3
require.Equal(t, []string{relayC, relayA}, hdb.TopN(key2, 2))
require.Equal(t, []string{relayB, relayA, relayC}, hdb.TopN(key1, 3))
require.Equal(t, []string{relayA, relayB}, hdb.TopN(key3, 3))
}

65
sdk/input.go Normal file
View File

@@ -0,0 +1,65 @@
package sdk
import (
"context"
"encoding/hex"
"github.com/nbd-wtf/go-nostr"
"github.com/nbd-wtf/go-nostr/nip05"
"github.com/nbd-wtf/go-nostr/nip19"
)
// InputToProfile turns any npub/nprofile/hex/nip05 input into a ProfilePointer (or nil).
func InputToProfile(ctx context.Context, input string) *nostr.ProfilePointer {
// handle if it is a hex string
if len(input) == 64 {
if _, err := hex.DecodeString(input); err == nil {
return &nostr.ProfilePointer{PublicKey: input}
}
}
// handle nip19 codes, if that's the case
prefix, data, _ := nip19.Decode(input)
switch prefix {
case "npub":
input = data.(string)
return &nostr.ProfilePointer{PublicKey: input}
case "nprofile":
pp := data.(nostr.ProfilePointer)
return &pp
}
// handle nip05 ids, if that's the case
pp, _ := nip05.QueryIdentifier(ctx, input)
if pp != nil {
return pp
}
return nil
}
// InputToEventPointer turns any note/nevent/hex input into a EventPointer (or nil).
func InputToEventPointer(input string) *nostr.EventPointer {
// handle if it is a hex string
if len(input) == 64 {
if _, err := hex.DecodeString(input); err == nil {
return &nostr.EventPointer{ID: input}
}
}
// handle nip19 codes, if that's the case
prefix, data, _ := nip19.Decode(input)
switch prefix {
case "note":
if input, ok := data.(string); ok {
return &nostr.EventPointer{ID: input}
}
case "nevent":
if ep, ok := data.(nostr.EventPointer); ok {
return &ep
}
}
// handle nip05 ids, if that's the case
return nil
}

82
sdk/list.go Normal file
View File

@@ -0,0 +1,82 @@
package sdk
import (
"context"
"slices"
"time"
"github.com/nbd-wtf/go-nostr"
"github.com/nbd-wtf/go-nostr/sdk/cache"
)
type GenericList[I TagItemWithValue] struct {
PubKey string `json:"-"` // must always be set otherwise things will break
Event *nostr.Event `json:"-"` // may be empty if a contact list event wasn't found
Items []I
}
type TagItemWithValue interface {
Value() string
}
func fetchGenericList[I TagItemWithValue](
sys *System,
ctx context.Context,
pubkey string,
kind int,
parseTag func(nostr.Tag) (I, bool),
cache cache.Cache32[GenericList[I]],
skipFetch bool,
) (fl GenericList[I], fromInternal bool) {
if cache != nil {
if v, ok := cache.Get(pubkey); ok {
return v, true
}
}
events, _ := sys.StoreRelay.QuerySync(ctx, nostr.Filter{Kinds: []int{kind}, Authors: []string{pubkey}})
if len(events) != 0 {
items := parseItemsFromEventTags(events[0], parseTag)
v := GenericList[I]{
PubKey: pubkey,
Event: events[0],
Items: items,
}
cache.SetWithTTL(pubkey, v, time.Hour*6)
return v, true
}
v := GenericList[I]{PubKey: pubkey}
if !skipFetch {
thunk := sys.replaceableLoaders[kind].Load(ctx, pubkey)
evt, err := thunk()
if err == nil {
items := parseItemsFromEventTags(evt, parseTag)
v.Items = items
if cache != nil {
cache.SetWithTTL(pubkey, v, time.Hour*6)
}
sys.StoreRelay.Publish(ctx, *evt)
}
}
return v, false
}
func parseItemsFromEventTags[I TagItemWithValue](
evt *nostr.Event,
parseTag func(nostr.Tag) (I, bool),
) []I {
result := make([]I, 0, len(evt.Tags))
for _, tag := range evt.Tags {
item, ok := parseTag(tag)
if ok {
// check if this already exists before adding
if slices.IndexFunc(result, func(i I) bool { return i.Value() == item.Value() }) == -1 {
result = append(result, item)
}
}
}
return result
}

155
sdk/metadata.go Normal file
View File

@@ -0,0 +1,155 @@
package sdk
import (
"context"
"encoding/json"
"fmt"
"strings"
"sync"
"time"
"github.com/nbd-wtf/go-nostr"
"github.com/nbd-wtf/go-nostr/nip19"
"github.com/nbd-wtf/go-nostr/sdk/hints"
)
type ProfileMetadata struct {
PubKey string `json:"-"` // must always be set otherwise things will break
Event *nostr.Event `json:"-"` // may be empty if a profile metadata event wasn't found
// every one of these may be empty
Name string `json:"name,omitempty"`
DisplayName string `json:"display_name,omitempty"`
About string `json:"about,omitempty"`
Website string `json:"website,omitempty"`
Picture string `json:"picture,omitempty"`
Banner string `json:"banner,omitempty"`
NIP05 string `json:"nip05,omitempty"`
LUD16 string `json:"lud16,omitempty"`
}
func (p ProfileMetadata) Npub() string {
v, _ := nip19.EncodePublicKey(p.PubKey)
return v
}
func (p ProfileMetadata) NpubShort() string {
npub := p.Npub()
return npub[0:7] + "…" + npub[58:]
}
func (p ProfileMetadata) Nprofile(ctx context.Context, sys *System, nrelays int) string {
v, _ := nip19.EncodeProfile(p.PubKey, sys.FetchOutboxRelays(ctx, p.PubKey, 2))
return v
}
func (p ProfileMetadata) ShortName() string {
if p.Name != "" {
return p.Name
}
if p.DisplayName != "" {
return p.DisplayName
}
return p.NpubShort()
}
// FetchProfileFromInput takes an nprofile, npub, nip05 or hex pubkey and returns a ProfileMetadata,
// updating the hintsDB in the process with any eventual relay hints
func (sys System) FetchProfileFromInput(ctx context.Context, nip19OrNip05Code string) (ProfileMetadata, error) {
p := InputToProfile(ctx, nip19OrNip05Code)
if p == nil {
return ProfileMetadata{}, fmt.Errorf("couldn't decode profile reference")
}
hintType := hints.LastInNIP05
if strings.HasPrefix(nip19OrNip05Code, "nprofile") {
hintType = hints.LastInNprofile
}
for _, r := range p.Relays {
nm := nostr.NormalizeURL(r)
if !IsVirtualRelay(nm) {
sys.Hints.Save(p.PublicKey, nm, hintType, nostr.Now())
}
}
pm := sys.FetchProfileMetadata(ctx, p.PublicKey)
return pm, nil
}
// FetchProfileMetadata fetches metadata for a given user from the local cache, or from the local store,
// or, failing these, from the target user's defined outbox relays -- then caches the result.
func (sys *System) FetchProfileMetadata(ctx context.Context, pubkey string) (pm ProfileMetadata) {
if v, ok := sys.MetadataCache.Get(pubkey); ok {
return v
}
res, _ := sys.StoreRelay.QuerySync(ctx, nostr.Filter{Kinds: []int{0}, Authors: []string{pubkey}})
if len(res) != 0 {
if m, err := ParseMetadata(res[0]); err == nil {
m.PubKey = pubkey
m.Event = res[0]
sys.MetadataCache.SetWithTTL(pubkey, m, time.Hour*6)
return m
}
}
pm.PubKey = pubkey
thunk0 := sys.replaceableLoaders[0].Load(ctx, pubkey)
evt, err := thunk0()
if err == nil {
pm, _ = ParseMetadata(evt)
// save on store even if the metadata json is malformed
if sys.StoreRelay != nil && pm.Event != nil {
sys.StoreRelay.Publish(ctx, *pm.Event)
}
}
// save on cache even if the metadata isn't found (unless the context was canceled)
if err == nil || err != context.Canceled {
sys.MetadataCache.SetWithTTL(pubkey, pm, time.Hour*6)
}
return pm
}
// FetchUserEvents fetches events from each users' outbox relays, grouping queries when possible.
func (sys *System) FetchUserEvents(ctx context.Context, filter nostr.Filter) (map[string][]*nostr.Event, error) {
filters, err := sys.ExpandQueriesByAuthorAndRelays(ctx, filter)
if err != nil {
return nil, fmt.Errorf("failed to expand queries: %w", err)
}
results := make(map[string][]*nostr.Event)
wg := sync.WaitGroup{}
wg.Add(len(filters))
for relayURL, filter := range filters {
go func(relayURL string, filter nostr.Filter) {
defer wg.Done()
filter.Limit = filter.Limit * len(filter.Authors) // hack
for ie := range sys.Pool.SubManyEose(ctx, []string{relayURL}, nostr.Filters{filter}) {
results[ie.PubKey] = append(results[ie.PubKey], ie.Event)
}
}(relayURL, filter)
}
wg.Wait()
return results, nil
}
func ParseMetadata(event *nostr.Event) (meta ProfileMetadata, err error) {
if event.Kind != 0 {
err = fmt.Errorf("event %s is kind %d, not 0", event.ID, event.Kind)
} else if er := json.Unmarshal([]byte(event.Content), &meta); er != nil {
cont := event.Content
if len(cont) > 100 {
cont = cont[0:99]
}
err = fmt.Errorf("failed to parse metadata (%s) from event %s: %w", cont, event.ID, er)
}
meta.PubKey = event.PubKey
meta.Event = event
return meta, err
}

10
sdk/mutes.go Normal file
View File

@@ -0,0 +1,10 @@
package sdk
import "context"
type MuteList = GenericList[Follow]
func (sys *System) FetchMuteList(ctx context.Context, pubkey string) MuteList {
ml, _ := fetchGenericList[Follow](sys, ctx, pubkey, 10000, parseFollow, nil, false)
return ml
}

89
sdk/outbox.go Normal file
View File

@@ -0,0 +1,89 @@
package sdk
import (
"context"
"fmt"
"sync"
"time"
"github.com/nbd-wtf/go-nostr"
)
func (sys *System) FetchOutboxRelays(ctx context.Context, pubkey string, n int) []string {
if relays, ok := sys.outboxShortTermCache.Get(pubkey); ok {
if len(relays) > n {
relays = relays[0:n]
}
return relays
}
if rl, ok := sys.RelayListCache.Get(pubkey); !ok || (rl.Event != nil && rl.Event.CreatedAt < nostr.Now()-60*60*24*7) {
// try to fetch relays list again if we don't have one or if ours is a week old
fetchGenericList(sys, ctx, pubkey, 10002, parseRelayFromKind10002, sys.RelayListCache, false)
}
relays := sys.Hints.TopN(pubkey, 6)
if len(relays) == 0 {
return []string{"wss://relay.damus.io", "wss://nos.lol"}
}
sys.outboxShortTermCache.SetWithTTL(pubkey, relays, time.Minute*2)
if len(relays) > n {
relays = relays[0:n]
}
return relays
}
func (sys *System) ExpandQueriesByAuthorAndRelays(
ctx context.Context,
filter nostr.Filter,
) (map[string]nostr.Filter, error) {
n := len(filter.Authors)
if n == 0 {
return nil, fmt.Errorf("no authors in filter")
}
relaysForPubkey := make(map[string][]string, n)
mu := sync.Mutex{}
wg := sync.WaitGroup{}
wg.Add(n)
for _, pubkey := range filter.Authors {
go func(pubkey string) {
defer wg.Done()
relayURLs := sys.FetchOutboxRelays(ctx, pubkey, 3)
c := 0
for _, r := range relayURLs {
relay, err := sys.Pool.EnsureRelay(r)
if err != nil {
continue
}
mu.Lock()
relaysForPubkey[pubkey] = append(relaysForPubkey[pubkey], relay.URL)
mu.Unlock()
c++
if c == 3 {
return
}
}
}(pubkey)
}
wg.Wait()
filterForRelay := make(map[string]nostr.Filter, n) // { [relay]: filter }
for pubkey, relays := range relaysForPubkey {
for _, relay := range relays {
flt, ok := filterForRelay[relay]
if !ok {
flt = filter.Clone()
filterForRelay[relay] = flt
}
flt.Authors = append(flt.Authors, pubkey)
}
}
return filterForRelay, nil
}

108
sdk/references.go Normal file
View File

@@ -0,0 +1,108 @@
package sdk
import (
"regexp"
"strconv"
"strings"
"github.com/nbd-wtf/go-nostr"
"github.com/nbd-wtf/go-nostr/nip19"
)
type Reference struct {
Text string
Start int
End int
Profile *nostr.ProfilePointer
Event *nostr.EventPointer
Entity *nostr.EntityPointer
}
var mentionRegex = regexp.MustCompile(`\bnostr:((note|npub|naddr|nevent|nprofile)1\w+)\b|#\[(\d+)\]`)
// ParseReferences parses both NIP-08 and NIP-27 references in a single unifying interface.
func ParseReferences(evt *nostr.Event) []*Reference {
var references []*Reference
content := evt.Content
for _, ref := range mentionRegex.FindAllStringSubmatchIndex(evt.Content, -1) {
reference := &Reference{
Text: content[ref[0]:ref[1]],
Start: ref[0],
End: ref[1],
}
if ref[6] == -1 {
// didn't find a NIP-10 #[0] reference, so it's a NIP-27 mention
nip19code := content[ref[2]:ref[3]]
if prefix, data, err := nip19.Decode(nip19code); err == nil {
switch prefix {
case "npub":
reference.Profile = &nostr.ProfilePointer{
PublicKey: data.(string), Relays: []string{},
}
case "nprofile":
pp := data.(nostr.ProfilePointer)
reference.Profile = &pp
case "note":
reference.Event = &nostr.EventPointer{ID: data.(string), Relays: []string{}}
case "nevent":
evp := data.(nostr.EventPointer)
reference.Event = &evp
case "naddr":
addr := data.(nostr.EntityPointer)
reference.Entity = &addr
}
}
} else {
// it's a NIP-10 mention.
// parse the number, get data from event tags.
n := content[ref[6]:ref[7]]
idx, err := strconv.Atoi(n)
if err != nil || len(evt.Tags) <= idx {
continue
}
if tag := evt.Tags[idx]; tag != nil && len(tag) >= 2 {
switch tag[0] {
case "p":
relays := make([]string, 0, 1)
if len(tag) > 2 && tag[2] != "" {
relays = append(relays, tag[2])
}
reference.Profile = &nostr.ProfilePointer{
PublicKey: tag[1],
Relays: relays,
}
case "e":
relays := make([]string, 0, 1)
if len(tag) > 2 && tag[2] != "" {
relays = append(relays, tag[2])
}
reference.Event = &nostr.EventPointer{
ID: tag[1],
Relays: relays,
}
case "a":
if parts := strings.Split(tag[1], ":"); len(parts) == 3 {
kind, _ := strconv.Atoi(parts[0])
relays := make([]string, 0, 1)
if len(tag) > 2 && tag[2] != "" {
relays = append(relays, tag[2])
}
reference.Entity = &nostr.EntityPointer{
Identifier: parts[2],
PublicKey: parts[1],
Kind: kind,
Relays: relays,
}
}
}
}
}
references = append(references, reference)
}
return references
}

108
sdk/references_test.go Normal file
View File

@@ -0,0 +1,108 @@
package sdk
import (
"fmt"
"testing"
"github.com/nbd-wtf/go-nostr"
)
func TestParseReferences(t *testing.T) {
evt := nostr.Event{
Tags: nostr.Tags{
{"p", "c9d556c6d3978d112d30616d0d20aaa81410e3653911dd67787b5aaf9b36ade8", "wss://nostr.com"},
{"e", "a84c5de86efc2ec2cff7bad077c4171e09146b633b7ad117fffe088d9579ac33", "wss://other.com", "reply"},
{"e", "31d7c2875b5fc8e6f9c8f9dc1f84de1b6b91d1947ea4c59225e55c325d330fa8", ""},
},
Content: "hello #[0], have you seen #[2]? it was made by nostr:nprofile1qqsvc6ulagpn7kwrcwdqgp797xl7usumqa6s3kgcelwq6m75x8fe8yc5usxdg on nostr:nevent1qqsvc6ulagpn7kwrcwdqgp797xl7usumqa6s3kgcelwq6m75x8fe8ychxp5v4! broken #[3]",
}
expected := []Reference{
{
Text: "#[0]",
Start: 6,
End: 10,
Profile: &nostr.ProfilePointer{
PublicKey: "c9d556c6d3978d112d30616d0d20aaa81410e3653911dd67787b5aaf9b36ade8",
Relays: []string{"wss://nostr.com"},
},
},
{
Text: "#[2]",
Start: 26,
End: 30,
Event: &nostr.EventPointer{
ID: "31d7c2875b5fc8e6f9c8f9dc1f84de1b6b91d1947ea4c59225e55c325d330fa8",
Relays: []string{},
},
},
{
Text: "nostr:nprofile1qqsvc6ulagpn7kwrcwdqgp797xl7usumqa6s3kgcelwq6m75x8fe8yc5usxdg",
Start: 47,
End: 123,
Profile: &nostr.ProfilePointer{
PublicKey: "cc6b9fea033f59c3c39a0407c5f1bfee439b077508d918cfdc0d6fd431d39393",
Relays: []string{},
},
},
{
Text: "nostr:nevent1qqsvc6ulagpn7kwrcwdqgp797xl7usumqa6s3kgcelwq6m75x8fe8ychxp5v4",
Start: 127,
End: 201,
Event: &nostr.EventPointer{
ID: "cc6b9fea033f59c3c39a0407c5f1bfee439b077508d918cfdc0d6fd431d39393",
Relays: []string{},
Author: "",
},
},
}
got := ParseReferences(&evt)
if len(got) != len(expected) {
t.Errorf("got %d references, expected %d", len(got), len(expected))
}
for i, g := range got {
e := expected[i]
if g.Text != e.Text {
t.Errorf("%d: got text %s, expected %s", i, g.Text, e.Text)
}
if g.Start != e.Start {
t.Errorf("%d: got start %d, expected %d", i, g.Start, e.Start)
}
if g.End != e.End {
t.Errorf("%d: got end %d, expected %d", i, g.End, e.End)
}
if (g.Entity == nil && e.Entity != nil) ||
(g.Event == nil && e.Event != nil) ||
(g.Profile == nil && e.Profile != nil) {
t.Errorf("%d: got some unexpected nil", i)
}
if g.Profile != nil && (g.Profile.PublicKey != e.Profile.PublicKey ||
len(g.Profile.Relays) != len(e.Profile.Relays) ||
(len(g.Profile.Relays) > 0 && g.Profile.Relays[0] != e.Profile.Relays[0])) {
t.Errorf("%d: profile value is wrong", i)
}
if g.Event != nil && (g.Event.ID != e.Event.ID ||
g.Event.Author != e.Event.Author ||
len(g.Event.Relays) != len(e.Event.Relays) ||
(len(g.Event.Relays) > 0 && g.Event.Relays[0] != e.Event.Relays[0])) {
fmt.Println(g.Event.ID, g.Event.Relays, len(g.Event.Relays), g.Event.Relays[0] == "")
fmt.Println(e.Event.Relays, len(e.Event.Relays))
t.Errorf("%d: event value is wrong", i)
}
if g.Entity != nil && (g.Entity.PublicKey != e.Entity.PublicKey ||
g.Entity.Identifier != e.Entity.Identifier ||
g.Entity.Kind != e.Entity.Kind ||
len(g.Entity.Relays) != len(g.Entity.Relays)) {
t.Errorf("%d: entity value is wrong", i)
}
}
}

41
sdk/relays.go Normal file
View File

@@ -0,0 +1,41 @@
package sdk
import (
"github.com/nbd-wtf/go-nostr"
)
type RelayList = GenericList[Relay]
type Relay struct {
URL string
Inbox bool
Outbox bool
}
func (r Relay) Value() string { return r.URL }
func parseRelayFromKind10002(tag nostr.Tag) (rl Relay, ok bool) {
if u := tag.Value(); u != "" && tag[0] == "r" {
if !nostr.IsValidRelayURL(u) {
return rl, false
}
u := nostr.NormalizeURL(u)
relay := Relay{
URL: u,
}
if len(tag) == 2 {
relay.Inbox = true
relay.Outbox = true
} else if tag[2] == "write" {
relay.Outbox = true
} else if tag[2] == "read" {
relay.Inbox = true
}
return relay, true
}
return rl, false
}

194
sdk/replaceable_loader.go Normal file
View File

@@ -0,0 +1,194 @@
package sdk
import (
"context"
"fmt"
"strconv"
"sync"
"time"
"github.com/graph-gophers/dataloader/v7"
"github.com/nbd-wtf/go-nostr"
)
type EventResult dataloader.Result[*nostr.Event]
func (sys *System) initializeDataloaders() {
sys.replaceableLoaders = make(map[int]*dataloader.Loader[string, *nostr.Event])
for _, kind := range []int{0, 3, 10000, 10001, 10002, 10003, 10004, 10005, 10006, 10007, 10015, 10030} {
sys.replaceableLoaders[kind] = sys.createReplaceableDataloader(kind)
}
}
func (sys *System) createReplaceableDataloader(kind int) *dataloader.Loader[string, *nostr.Event] {
return dataloader.NewBatchedLoader(
func(
ctx context.Context,
pubkeys []string,
) []*dataloader.Result[*nostr.Event] {
return sys.batchLoadReplaceableEvents(ctx, kind, pubkeys)
},
dataloader.WithBatchCapacity[string, *nostr.Event](60),
dataloader.WithClearCacheOnBatch[string, *nostr.Event](),
dataloader.WithWait[string, *nostr.Event](time.Millisecond*350),
)
}
func (sys *System) batchLoadReplaceableEvents(
ctx context.Context,
kind int,
pubkeys []string,
) []*dataloader.Result[*nostr.Event] {
ctx, cancel := context.WithTimeout(context.Background(), time.Second*4)
defer cancel()
batchSize := len(pubkeys)
results := make([]*dataloader.Result[*nostr.Event], batchSize)
keyPositions := make(map[string]int) // { [pubkey]: slice_index }
relayFilters := make(map[string]nostr.Filter) // { [relayUrl]: filter }
wg := sync.WaitGroup{}
wg.Add(len(pubkeys))
cm := sync.Mutex{}
for i, pubkey := range pubkeys {
// build batched queries for the external relays
keyPositions[pubkey] = i // this is to help us know where to save the result later
go func(i int, pubkey string) {
defer wg.Done()
// if we're attempting this query with a short key (last 8 characters), stop here
if len(pubkey) != 64 {
results[i] = &dataloader.Result[*nostr.Event]{
Error: fmt.Errorf("won't proceed to query relays with a shortened key (%d)", kind),
}
return
}
// save attempts here so we don't try the same failed query over and over
if doItNow := DoThisNotMoreThanOnceAnHour("repl:" + strconv.Itoa(kind) + pubkey); !doItNow {
results[i] = &dataloader.Result[*nostr.Event]{
Error: fmt.Errorf("last attempt failed, waiting more to try again"),
}
return
}
// gather relays we'll use for this pubkey
relays := sys.determineRelaysToQuery(ctx, pubkey, kind)
// by default we will return an error (this will be overwritten when we find an event)
results[i] = &dataloader.Result[*nostr.Event]{
Error: fmt.Errorf("couldn't find a kind %d event anywhere %v", kind, relays),
}
cm.Lock()
for _, relay := range relays {
// each relay will have a custom filter
filter, ok := relayFilters[relay]
if !ok {
filter = nostr.Filter{
Kinds: []int{kind},
Authors: make([]string, 0, batchSize-i /* this and all pubkeys after this can be added */),
}
}
filter.Authors = append(filter.Authors, pubkey)
relayFilters[relay] = filter
}
cm.Unlock()
}(i, pubkey)
}
// query all relays with the prepared filters
wg.Wait()
multiSubs := sys.batchReplaceableRelayQueries(ctx, relayFilters)
for {
select {
case evt, more := <-multiSubs:
if !more {
return results
}
// insert this event at the desired position
pos := keyPositions[evt.PubKey] // @unchecked: it must succeed because it must be a key we passed
if results[pos].Data == nil || results[pos].Data.CreatedAt < evt.CreatedAt {
results[pos] = &dataloader.Result[*nostr.Event]{Data: evt}
}
case <-ctx.Done():
return results
}
}
}
func (sys *System) determineRelaysToQuery(ctx context.Context, pubkey string, kind int) []string {
relays := make([]string, 0, 10)
// search in specific relays for user
if kind == 10002 {
// prevent infinite loops by jumping directly to this
relays = sys.Hints.TopN(pubkey, 3)
} else if kind == 0 {
// leave room for one hardcoded relay because people are stupid
relays = sys.FetchOutboxRelays(ctx, pubkey, 2)
} else {
relays = sys.FetchOutboxRelays(ctx, pubkey, 3)
}
// use a different set of extra relays depending on the kind
for len(relays) < 3 {
switch kind {
case 0:
relays = append(relays, pickNext(sys.MetadataRelays))
case 3:
relays = append(relays, pickNext(sys.FollowListRelays))
case 10002:
relays = append(relays, pickNext(sys.RelayListRelays))
default:
relays = append(relays, pickNext(sys.FallbackRelays))
}
}
return relays
}
// batchReplaceableRelayQueries subscribes to multiple relays using a different filter for each and returns
// a single channel with all results. it closes on EOSE or when all the expected events were returned.
//
// the number of expected events is given by the number of pubkeys in the .Authors filter field.
// because of that, batchReplaceableRelayQueries is only suitable for querying replaceable events -- and
// care must be taken to not include the same pubkey more than once in the filter .Authors array.
func (sys *System) batchReplaceableRelayQueries(
ctx context.Context,
relayFilters map[string]nostr.Filter,
) <-chan *nostr.Event {
all := make(chan *nostr.Event)
wg := sync.WaitGroup{}
wg.Add(len(relayFilters))
for url, filter := range relayFilters {
go func(url string, filter nostr.Filter) {
defer wg.Done()
n := len(filter.Authors)
ctx, cancel := context.WithTimeout(ctx, time.Millisecond*450+time.Millisecond*50*time.Duration(n))
defer cancel()
received := 0
for ie := range sys.Pool.SubManyEose(ctx, []string{url}, nostr.Filters{filter}) {
all <- ie.Event
received++
if received >= n {
// we got all events we asked for, unless the relay is shitty and sent us two from the same
return
}
}
}(url, filter)
}
go func() {
wg.Wait()
close(all)
}()
return all
}

24
sdk/search.go Normal file
View File

@@ -0,0 +1,24 @@
package sdk
import (
"context"
"github.com/nbd-wtf/go-nostr"
)
func (sys *System) SearchUsers(ctx context.Context, query string) []ProfileMetadata {
limit := 10
profiles := make([]ProfileMetadata, 0, limit*len(sys.UserSearchRelays))
for ie := range sys.Pool.SubManyEose(ctx, sys.UserSearchRelays, nostr.Filters{
{
Search: query,
Limit: limit,
},
}) {
m, _ := ParseMetadata(ie.Event)
profiles = append(profiles, m)
}
return profiles
}

158
sdk/system.go Normal file
View File

@@ -0,0 +1,158 @@
package sdk
import (
"context"
"github.com/fiatjaf/eventstore"
"github.com/fiatjaf/eventstore/slicestore"
"github.com/graph-gophers/dataloader/v7"
"github.com/nbd-wtf/go-nostr"
"github.com/nbd-wtf/go-nostr/sdk/cache"
cache_memory "github.com/nbd-wtf/go-nostr/sdk/cache/memory"
"github.com/nbd-wtf/go-nostr/sdk/hints"
memory_hints "github.com/nbd-wtf/go-nostr/sdk/hints/memory"
)
type System struct {
RelayListCache cache.Cache32[RelayList]
FollowListCache cache.Cache32[FollowList]
MetadataCache cache.Cache32[ProfileMetadata]
Hints hints.HintsDB
Pool *nostr.SimplePool
RelayListRelays []string
FollowListRelays []string
MetadataRelays []string
FallbackRelays []string
UserSearchRelays []string
NoteSearchRelays []string
Store eventstore.Store
StoreRelay nostr.RelayStore
replaceableLoaders map[int]*dataloader.Loader[string, *nostr.Event]
outboxShortTermCache cache.Cache32[[]string]
}
type SystemModifier func(sys *System)
func NewSystem(mods ...SystemModifier) *System {
sys := &System{
RelayListCache: cache_memory.New32[RelayList](1000),
FollowListCache: cache_memory.New32[FollowList](1000),
MetadataCache: cache_memory.New32[ProfileMetadata](1000),
RelayListRelays: []string{"wss://purplepag.es", "wss://user.kindpag.es", "wss://relay.nos.social"},
FollowListRelays: []string{"wss://purplepag.es", "wss://user.kindpag.es", "wss://relay.nos.social"},
MetadataRelays: []string{"wss://purplepag.es", "wss://user.kindpag.es", "wss://relay.nos.social"},
FallbackRelays: []string{
"wss://relay.primal.net",
"wss://relay.damus.io",
"wss://nostr.wine",
"wss://nostr.mom",
"wss://offchain.pub",
"wss://nos.lol",
"wss://mostr.pub",
"wss://relay.nostr.band",
"wss://nostr21.com",
},
UserSearchRelays: []string{
"wss://nostr.wine",
"wss://relay.nostr.band",
"wss://relay.noswhere.com",
},
NoteSearchRelays: []string{
"wss://nostr.wine",
"wss://relay.nostr.band",
"wss://relay.noswhere.com",
},
Hints: memory_hints.NewHintDB(),
outboxShortTermCache: cache_memory.New32[[]string](1000),
}
sys.Pool = nostr.NewSimplePool(context.Background(),
nostr.WithEventMiddleware(sys.trackEventHints),
nostr.WithPenaltyBox(),
)
for _, mod := range mods {
mod(sys)
}
if sys.Store == nil {
sys.Store = &slicestore.SliceStore{}
sys.Store.Init()
}
sys.StoreRelay = eventstore.RelayWrapper{Store: sys.Store}
sys.initializeDataloaders()
return sys
}
func (sys *System) Close() {}
func WithHintsDB(hdb hints.HintsDB) SystemModifier {
return func(sys *System) {
sys.Hints = hdb
}
}
func WithRelayListRelays(list []string) SystemModifier {
return func(sys *System) {
sys.RelayListRelays = list
}
}
func WithMetadataRelays(list []string) SystemModifier {
return func(sys *System) {
sys.MetadataRelays = list
}
}
func WithFollowListRelays(list []string) SystemModifier {
return func(sys *System) {
sys.FollowListRelays = list
}
}
func WithFallbackRelays(list []string) SystemModifier {
return func(sys *System) {
sys.FallbackRelays = list
}
}
func WithUserSearchRelays(list []string) SystemModifier {
return func(sys *System) {
sys.UserSearchRelays = list
}
}
func WithNoteSearchRelays(list []string) SystemModifier {
return func(sys *System) {
sys.NoteSearchRelays = list
}
}
func WithStore(store eventstore.Store) SystemModifier {
return func(sys *System) {
sys.Store = store
}
}
func WithRelayListCache(cache cache.Cache32[RelayList]) SystemModifier {
return func(sys *System) {
sys.RelayListCache = cache
}
}
func WithFollowListCache(cache cache.Cache32[FollowList]) SystemModifier {
return func(sys *System) {
sys.FollowListCache = cache
}
}
func WithMetadataCache(cache cache.Cache32[ProfileMetadata]) SystemModifier {
return func(sys *System) {
sys.MetadataCache = cache
}
}

86
sdk/tracker.go Normal file
View File

@@ -0,0 +1,86 @@
package sdk
import (
"net/url"
"github.com/nbd-wtf/go-nostr"
"github.com/nbd-wtf/go-nostr/sdk/hints"
)
func (sys *System) trackEventHints(ie nostr.IncomingEvent) {
if IsVirtualRelay(ie.Relay.URL) {
return
}
switch ie.Kind {
case nostr.KindRelayListMetadata:
for _, tag := range ie.Tags {
if len(tag) < 2 || tag[0] != "r" {
continue
}
if len(tag) == 2 || (tag[2] == "" || tag[2] == "write") {
sys.Hints.Save(ie.PubKey, tag[1], hints.LastInRelayList, ie.CreatedAt)
}
}
case nostr.KindContactList:
sys.Hints.Save(ie.PubKey, ie.Relay.URL, hints.MostRecentEventFetched, ie.CreatedAt)
for _, tag := range ie.Tags {
if len(tag) < 3 {
continue
}
if IsVirtualRelay(tag[2]) {
continue
}
if p, err := url.Parse(tag[2]); err != nil || (p.Scheme != "wss" && p.Scheme != "ws") {
continue
}
if tag[0] == "p" && nostr.IsValidPublicKey(tag[1]) {
sys.Hints.Save(tag[1], tag[2], hints.LastInTag, ie.CreatedAt)
}
}
case nostr.KindTextNote:
sys.Hints.Save(ie.PubKey, ie.Relay.URL, hints.MostRecentEventFetched, ie.CreatedAt)
for _, tag := range ie.Tags {
if len(tag) < 3 {
continue
}
if IsVirtualRelay(tag[2]) {
continue
}
if p, err := url.Parse(tag[2]); err != nil || (p.Scheme != "wss" && p.Scheme != "ws") {
continue
}
if tag[0] == "p" && nostr.IsValidPublicKey(tag[1]) {
sys.Hints.Save(tag[1], tag[2], hints.LastInTag, ie.CreatedAt)
}
}
for _, ref := range ParseReferences(ie.Event) {
if ref.Profile != nil {
for _, relay := range ref.Profile.Relays {
if IsVirtualRelay(relay) {
continue
}
if p, err := url.Parse(relay); err != nil || (p.Scheme != "wss" && p.Scheme != "ws") {
continue
}
if nostr.IsValidPublicKey(ref.Profile.PublicKey) {
sys.Hints.Save(ref.Profile.PublicKey, relay, hints.LastInNprofile, ie.CreatedAt)
}
}
} else if ref.Event != nil && nostr.IsValidPublicKey(ref.Event.Author) {
for _, relay := range ref.Event.Relays {
if IsVirtualRelay(relay) {
continue
}
if p, err := url.Parse(relay); err != nil || (p.Scheme != "wss" && p.Scheme != "ws") {
continue
}
sys.Hints.Save(ref.Event.Author, relay, hints.LastInNevent, ie.CreatedAt)
}
}
}
}
}

43
sdk/utils.go Normal file
View File

@@ -0,0 +1,43 @@
package sdk
import (
"sync"
"time"
)
var (
_dtnmtoah map[string]time.Time
_dtnmtoahLock sync.Mutex
)
func DoThisNotMoreThanOnceAnHour(key string) (doItNow bool) {
if _dtnmtoah == nil {
go func() {
_dtnmtoah = make(map[string]time.Time)
for {
time.Sleep(time.Minute * 10)
_dtnmtoahLock.Lock()
now := time.Now()
for k, v := range _dtnmtoah {
if v.Before(now) {
delete(_dtnmtoah, k)
}
}
_dtnmtoahLock.Unlock()
}
}()
}
_dtnmtoahLock.Lock()
defer _dtnmtoahLock.Unlock()
_, exists := _dtnmtoah[key]
return !exists
}
var serial = 0
func pickNext(list []string) string {
serial++
return list[serial%len(list)]
}