move all the binary stuff to github.com/fiatjaf/eventstore.

This commit is contained in:
fiatjaf 2024-09-08 12:00:30 -03:00
parent 6ebdf9cc00
commit b2692a2584
8 changed files with 0 additions and 511 deletions

View File

@ -1,26 +0,0 @@
# The simplest binary encoding for Nostr events
Some benchmarks:
```
goos: linux
goarch: amd64
pkg: github.com/nbd-wtf/go-nostr/binary
cpu: AMD Ryzen 3 3200G with Radeon Vega Graphics
BenchmarkBinaryEncoding/easyjson.Marshal-4 12756 109437 ns/op 66058 B/op 227 allocs/op
BenchmarkBinaryEncoding/gob.Encode-4 3807 367426 ns/op 171456 B/op 1501 allocs/op
BenchmarkBinaryEncoding/binary.Marshal-4 2568 486766 ns/op 2736133 B/op 37 allocs/op
BenchmarkBinaryEncoding/binary.MarshalBinary-4 2150 525876 ns/op 2736135 B/op 37 allocs/op
BenchmarkBinaryDecoding/easyjson.Unmarshal-4 13719 92516 ns/op 82680 B/op 360 allocs/op
BenchmarkBinaryDecoding/gob.Decode-4 938 1469278 ns/op 386459 B/op 8549 allocs/op
BenchmarkBinaryDecoding/binary.Unmarshal-4 49454 29724 ns/op 21776 B/op 282 allocs/op
BenchmarkBinaryDecoding/binary.UnmarshalBinary-4 230827 6876 ns/op 2832 B/op 60 allocs/op
BenchmarkBinaryDecoding/easyjson.Unmarshal+sig-4 177 7038434 ns/op 209834 B/op 939 allocs/op
BenchmarkBinaryDecoding/binary.Unmarshal+sig-4 180 6727125 ns/op 148841 B/op 861 allocs/op
PASS
ok github.com/nbd-wtf/go-nostr/binary 16.937s
```
This is 2~5x faster than [NSON](../nson) decoding, which means 8x faster than default easyjson decoding,
but, just like NSON, the performance gains from this encoding is negligible when you add the cost of
signature verification. Which means this encoding must only be used in internal processes.

View File

@ -1,125 +0,0 @@
package binary
import (
"bytes"
"encoding/gob"
"encoding/json"
"testing"
"github.com/mailru/easyjson"
"github.com/nbd-wtf/go-nostr"
"github.com/nbd-wtf/go-nostr/test_common"
)
func BenchmarkBinaryEncoding(b *testing.B) {
events := make([]*nostr.Event, len(test_common.NormalEvents))
binaryEvents := make([]*Event, len(test_common.NormalEvents))
for i, jevt := range test_common.NormalEvents {
evt := &nostr.Event{}
json.Unmarshal([]byte(jevt), evt)
events[i] = evt
binaryEvents[i] = BinaryEvent(evt)
}
b.Run("easyjson.Marshal", func(b *testing.B) {
for i := 0; i < b.N; i++ {
for _, evt := range events {
easyjson.Marshal(evt)
}
}
})
b.Run("gob.Encode", func(b *testing.B) {
for i := 0; i < b.N; i++ {
for _, evt := range events {
var buf bytes.Buffer
gob.NewEncoder(&buf).Encode(evt)
_ = buf.Bytes()
}
}
})
b.Run("binary.Marshal", func(b *testing.B) {
for i := 0; i < b.N; i++ {
for _, evt := range events {
Marshal(evt)
}
}
})
}
func BenchmarkBinaryDecoding(b *testing.B) {
events := make([][]byte, len(test_common.NormalEvents))
gevents := make([][]byte, len(test_common.NormalEvents))
for i, jevt := range test_common.NormalEvents {
evt := &nostr.Event{}
json.Unmarshal([]byte(jevt), evt)
bevt, _ := Marshal(evt)
events[i] = bevt
var buf bytes.Buffer
gob.NewEncoder(&buf).Encode(evt)
gevents[i] = buf.Bytes()
}
b.Run("easyjson.Unmarshal", func(b *testing.B) {
for i := 0; i < b.N; i++ {
for _, jevt := range test_common.NormalEvents {
evt := &nostr.Event{}
err := easyjson.Unmarshal([]byte(jevt), evt)
if err != nil {
b.Fatalf("failed to unmarshal: %s", err)
}
}
}
})
b.Run("gob.Decode", func(b *testing.B) {
for i := 0; i < b.N; i++ {
for _, gevt := range gevents {
evt := &nostr.Event{}
buf := bytes.NewBuffer(gevt)
evt = &nostr.Event{}
gob.NewDecoder(buf).Decode(evt)
}
}
})
b.Run("binary.Unmarshal", func(b *testing.B) {
for i := 0; i < b.N; i++ {
for _, bevt := range events {
evt := &nostr.Event{}
err := Unmarshal(bevt, evt)
if err != nil {
b.Fatalf("failed to unmarshal: %s", err)
}
}
}
})
b.Run("easyjson.Unmarshal+sig", func(b *testing.B) {
for i := 0; i < b.N; i++ {
for _, nevt := range test_common.NormalEvents {
evt := &nostr.Event{}
err := easyjson.Unmarshal([]byte(nevt), evt)
if err != nil {
b.Fatalf("failed to unmarshal: %s", err)
}
evt.CheckSignature()
}
}
})
b.Run("binary.Unmarshal+sig", func(b *testing.B) {
for i := 0; i < b.N; i++ {
for _, bevt := range events {
evt := &nostr.Event{}
err := Unmarshal(bevt, evt)
if err != nil {
b.Fatalf("failed to unmarshal: %s", err)
}
evt.CheckSignature()
}
}
})
}

View File

@ -1,136 +0,0 @@
package binary
import (
"encoding/base64"
"encoding/binary"
"encoding/hex"
"encoding/json"
"fmt"
"testing"
"github.com/nbd-wtf/go-nostr"
"github.com/nbd-wtf/go-nostr/test_common"
"github.com/stretchr/testify/require"
)
func TestBinaryPartialGet(t *testing.T) {
for _, jevt := range test_common.NormalEvents {
evt := &nostr.Event{}
json.Unmarshal([]byte(jevt), &evt)
bevt, err := Marshal(evt)
if err != nil {
t.Fatalf("error marshalling binary: %s", err)
}
if id := hex.EncodeToString(bevt[0:32]); id != evt.ID {
t.Fatalf("partial id wrong. got %v, expected %v", id, evt.ID)
}
if pubkey := hex.EncodeToString(bevt[32:64]); pubkey != evt.PubKey {
t.Fatalf("partial pubkey wrong. got %v, expected %v", pubkey, evt.PubKey)
}
if sig := hex.EncodeToString(bevt[64:128]); sig != evt.Sig {
t.Fatalf("partial sig wrong. got %v, expected %v", sig, evt.Sig)
}
if createdAt := nostr.Timestamp(binary.BigEndian.Uint32(bevt[128:132])); createdAt != evt.CreatedAt {
t.Fatalf("partial created_at wrong. got %v, expected %v", createdAt, evt.CreatedAt)
}
if kind := int(binary.BigEndian.Uint16(bevt[132:134])); kind != evt.Kind {
t.Fatalf("partial kind wrong. got %v, expected %v", kind, evt.Kind)
}
if content := string(bevt[136 : 136+int(binary.BigEndian.Uint16(bevt[134:136]))]); content != evt.Content {
t.Fatalf("partial content wrong. got %v, expected %v", content, evt.Content)
}
}
}
func TestBinaryEncodeBackwardsCompatible(t *testing.T) {
for i, jevt := range test_common.NormalEvents {
t.Run(fmt.Sprintf("%d", i), func(t *testing.T) {
b64bevt := test_common.BinaryEventsBase64[i]
bevt, err := base64.StdEncoding.DecodeString(b64bevt)
require.NoError(t, err)
pevt := &nostr.Event{}
err = json.Unmarshal([]byte(jevt), pevt)
require.NoError(t, err)
encoded, err := Marshal(pevt)
require.NoError(t, err)
require.Equal(t, bevt, encoded)
})
}
}
func TestBinaryEncode(t *testing.T) {
for i, jevt := range test_common.NormalEvents {
t.Run(fmt.Sprintf("%d", i), func(t *testing.T) {
pevt := &nostr.Event{}
if err := json.Unmarshal([]byte(jevt), pevt); err != nil {
t.Fatalf("failed to decode normal json: %s", err)
}
bevt, err := Marshal(pevt)
if err != nil {
t.Fatalf("failed to encode binary: %s", err)
}
evt := &nostr.Event{}
if err := Unmarshal(bevt, evt); err != nil {
t.Fatalf("error unmarshalling binary: %s", err)
}
checkParsedCorrectly(t, pevt, jevt)
checkParsedCorrectly(t, evt, jevt)
})
}
}
func checkParsedCorrectly(t *testing.T, evt *nostr.Event, jevt string) (isBad bool) {
var canonical nostr.Event
err := json.Unmarshal([]byte(jevt), &canonical)
if err != nil {
t.Fatalf("error unmarshalling normal json: %s", err)
}
if evt.ID != canonical.ID {
t.Fatalf("id is wrong: %s != %s", evt.ID, canonical.ID)
isBad = true
}
if evt.PubKey != canonical.PubKey {
t.Fatalf("pubkey is wrong: %s != %s", evt.PubKey, canonical.PubKey)
isBad = true
}
if evt.Sig != canonical.Sig {
t.Fatalf("sig is wrong: %s != %s", evt.Sig, canonical.Sig)
isBad = true
}
if evt.Content != canonical.Content {
t.Fatalf("content is wrong: %s != %s", evt.Content, canonical.Content)
isBad = true
}
if evt.Kind != canonical.Kind {
t.Fatalf("kind is wrong: %d != %d", evt.Kind, canonical.Kind)
isBad = true
}
if evt.CreatedAt != canonical.CreatedAt {
t.Fatalf("created_at is wrong: %v != %v", evt.CreatedAt, canonical.CreatedAt)
isBad = true
}
if len(evt.Tags) != len(canonical.Tags) {
t.Fatalf("tag number is wrong: %v != %v", len(evt.Tags), len(canonical.Tags))
isBad = true
}
for i := range evt.Tags {
if len(evt.Tags[i]) != len(canonical.Tags[i]) {
t.Fatalf("tag[%d] length is wrong: `%v` != `%v`", i, len(evt.Tags[i]), len(canonical.Tags[i]))
isBad = true
}
for j := range evt.Tags[i] {
if evt.Tags[i][j] != canonical.Tags[i][j] {
t.Fatalf("tag[%d][%d] is wrong: `%s` != `%s`", i, j, evt.Tags[i][j], canonical.Tags[i][j])
isBad = true
}
}
}
return isBad
}

View File

@ -1,44 +0,0 @@
package binary
import (
"encoding/hex"
"github.com/nbd-wtf/go-nostr"
)
type Event struct {
PubKey [32]byte
Sig [64]byte
ID [32]byte
Kind uint16
CreatedAt nostr.Timestamp
Content string
Tags nostr.Tags
}
func BinaryEvent(evt *nostr.Event) *Event {
bevt := Event{
Tags: evt.Tags,
Content: evt.Content,
Kind: uint16(evt.Kind),
CreatedAt: evt.CreatedAt,
}
hex.Decode(bevt.ID[:], []byte(evt.ID))
hex.Decode(bevt.PubKey[:], []byte(evt.PubKey))
hex.Decode(bevt.Sig[:], []byte(evt.Sig))
return &bevt
}
func (bevt *Event) ToNormalEvent() *nostr.Event {
return &nostr.Event{
Tags: bevt.Tags,
Content: bevt.Content,
Kind: int(bevt.Kind),
CreatedAt: bevt.CreatedAt,
ID: hex.EncodeToString(bevt.ID[:]),
PubKey: hex.EncodeToString(bevt.PubKey[:]),
Sig: hex.EncodeToString(bevt.Sig[:]),
}
}

View File

@ -1,103 +0,0 @@
package binary
import (
"encoding/binary"
"encoding/hex"
"fmt"
"github.com/nbd-wtf/go-nostr"
)
// Deprecated -- the encoding used here is not very elegant, we'll have a better binary format later.
func Unmarshal(data []byte, evt *nostr.Event) (err error) {
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("failed to decode binary: %v", r)
}
}()
evt.ID = hex.EncodeToString(data[0:32])
evt.PubKey = hex.EncodeToString(data[32:64])
evt.Sig = hex.EncodeToString(data[64:128])
evt.CreatedAt = nostr.Timestamp(binary.BigEndian.Uint32(data[128:132]))
evt.Kind = int(binary.BigEndian.Uint16(data[132:134]))
contentLength := int(binary.BigEndian.Uint16(data[134:136]))
evt.Content = string(data[136 : 136+contentLength])
curr := 136 + contentLength
nTags := binary.BigEndian.Uint16(data[curr : curr+2])
curr++
evt.Tags = make(nostr.Tags, nTags)
for t := range evt.Tags {
curr++
nItems := int(data[curr])
tag := make(nostr.Tag, nItems)
for i := range tag {
curr = curr + 1
itemSize := int(binary.BigEndian.Uint16(data[curr : curr+2]))
itemStart := curr + 2
item := string(data[itemStart : itemStart+itemSize])
tag[i] = item
curr = itemStart + itemSize
}
evt.Tags[t] = tag
}
return err
}
// Deprecated -- the encoding used here is not very elegant, we'll have a better binary format later.
func Marshal(evt *nostr.Event) ([]byte, error) {
content := []byte(evt.Content)
buf := make([]byte, 32+32+64+4+2+2+len(content)+65536+len(evt.Tags)*40 /* blergh */)
hex.Decode(buf[0:32], []byte(evt.ID))
hex.Decode(buf[32:64], []byte(evt.PubKey))
hex.Decode(buf[64:128], []byte(evt.Sig))
if evt.CreatedAt > MaxCreatedAt {
return nil, fmt.Errorf("created_at is too big: %d, max is %d", evt.CreatedAt, MaxCreatedAt)
}
binary.BigEndian.PutUint32(buf[128:132], uint32(evt.CreatedAt))
if evt.Kind > MaxKind {
return nil, fmt.Errorf("kind is too big: %d, max is %d", evt.Kind, MaxKind)
}
binary.BigEndian.PutUint16(buf[132:134], uint16(evt.Kind))
if contentLength := len(content); contentLength > MaxContentSize {
return nil, fmt.Errorf("content is too large: %d, max is %d", contentLength, MaxContentSize)
} else {
binary.BigEndian.PutUint16(buf[134:136], uint16(contentLength))
}
copy(buf[136:], content)
if tagCount := len(evt.Tags); tagCount > MaxTagCount {
return nil, fmt.Errorf("can't encode too many tags: %d, max is %d", tagCount, MaxTagCount)
} else {
binary.BigEndian.PutUint16(buf[136+len(content):136+len(content)+2], uint16(tagCount))
}
buf = buf[0 : 136+len(content)+2]
for _, tag := range evt.Tags {
if itemCount := len(tag); itemCount > MaxTagItemCount {
return nil, fmt.Errorf("can't encode a tag with so many items: %d, max is %d", itemCount, MaxTagItemCount)
} else {
buf = append(buf, uint8(itemCount))
}
for _, item := range tag {
itemb := []byte(item)
itemSize := len(itemb)
if itemSize > MaxTagItemSize {
return nil, fmt.Errorf("tag item is too large: %d, max is %d", itemSize, MaxTagItemSize)
}
buf = binary.BigEndian.AppendUint16(buf, uint16(itemSize))
buf = append(buf, itemb...)
buf = append(buf, 0)
}
}
return buf, nil
}

View File

@ -1,35 +0,0 @@
package binary
import (
"math"
"github.com/nbd-wtf/go-nostr"
)
const (
MaxKind = math.MaxUint16
MaxCreatedAt = math.MaxUint32
MaxContentSize = math.MaxUint16
MaxTagCount = math.MaxUint16
MaxTagItemCount = math.MaxUint8
MaxTagItemSize = math.MaxUint16
)
func EventEligibleForBinaryEncoding(event *nostr.Event) bool {
if len(event.Content) > MaxContentSize || event.Kind > MaxKind || event.CreatedAt > MaxCreatedAt || len(event.Tags) > MaxTagCount {
return false
}
for _, tag := range event.Tags {
if len(tag) > MaxTagItemCount {
return false
}
for _, item := range tag {
if len(item) > MaxTagItemSize {
return false
}
}
}
return true
}

File diff suppressed because one or more lines are too long