mirror of
https://github.com/lightningnetwork/lnd.git
synced 2025-09-01 18:27:43 +02:00
migration30: add benchmark test
This commit is contained in:
@@ -7,8 +7,10 @@ import (
|
||||
|
||||
mig25 "github.com/lightningnetwork/lnd/channeldb/migration25"
|
||||
mig26 "github.com/lightningnetwork/lnd/channeldb/migration26"
|
||||
mig "github.com/lightningnetwork/lnd/channeldb/migration_01_to_11"
|
||||
"github.com/lightningnetwork/lnd/channeldb/migtest"
|
||||
"github.com/lightningnetwork/lnd/kvdb"
|
||||
"github.com/stretchr/testify/mock"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
@@ -448,3 +450,55 @@ func assertRevocationLog(t testing.TB, want, got RevocationLog) {
|
||||
"wrong RefundTimeout")
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkMigration creates a benchmark test for the migration. The test uses
|
||||
// the flag `-benchtime` to specify how many revocation logs we want to test.
|
||||
func BenchmarkMigration(b *testing.B) {
|
||||
// Stop the timer and start it again later when the actual migration
|
||||
// starts.
|
||||
b.StopTimer()
|
||||
|
||||
// Gather number of records by reading `-benchtime` flag.
|
||||
numLogs := b.N
|
||||
|
||||
// Create a mock store.
|
||||
mockStore := &mockStore{}
|
||||
mockStore.On("AddNextEntry", mock.Anything).Return(nil)
|
||||
mockStore.On("Encode", mock.Anything).Return(nil)
|
||||
|
||||
// Build the test data.
|
||||
oldLogs := make([]mig.ChannelCommitment, numLogs)
|
||||
beforeMigration := func(db kvdb.Backend) error {
|
||||
fmt.Printf("\nBuilding test data for %d logs...\n", numLogs)
|
||||
defer fmt.Println("Finished building test data, migrating...")
|
||||
|
||||
// We use a mock store here to bypass the check in
|
||||
// `AddNextEntry` so we don't need a "read" preimage here. This
|
||||
// shouldn't affect our benchmark result as the migration will
|
||||
// load the actual store from db.
|
||||
c := createTestChannel(nil)
|
||||
c.RevocationStore = mockStore
|
||||
|
||||
// Create the test logs.
|
||||
for i := 0; i < numLogs; i++ {
|
||||
oldLog := oldLog2
|
||||
oldLog.CommitHeight = uint64(i)
|
||||
oldLogs[i] = oldLog
|
||||
}
|
||||
|
||||
return setupTestLogs(db, c, oldLogs, nil)
|
||||
}
|
||||
|
||||
// Run the migration test.
|
||||
migtest.ApplyMigrationWithDb(
|
||||
b,
|
||||
beforeMigration,
|
||||
nil,
|
||||
func(db kvdb.Backend) error {
|
||||
b.StartTimer()
|
||||
defer b.StopTimer()
|
||||
|
||||
return MigrateRevocationLog(db)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
51
channeldb/migration30/test_mock.go
Normal file
51
channeldb/migration30/test_mock.go
Normal file
@@ -0,0 +1,51 @@
|
||||
package migration30
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"io"
|
||||
|
||||
"github.com/btcsuite/btcd/chaincfg/chainhash"
|
||||
"github.com/lightningnetwork/lnd/shachain"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
// mockStore mocks the shachain.Store.
|
||||
type mockStore struct {
|
||||
mock.Mock
|
||||
}
|
||||
|
||||
// A compile time check to ensure mockStore implements the Store interface.
|
||||
var _ shachain.Store = (*mockStore)(nil)
|
||||
|
||||
func (m *mockStore) LookUp(height uint64) (*chainhash.Hash, error) {
|
||||
args := m.Called(height)
|
||||
|
||||
if args.Get(0) == nil {
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
|
||||
return args.Get(0).(*chainhash.Hash), args.Error(1)
|
||||
}
|
||||
|
||||
func (m *mockStore) AddNextEntry(preimage *chainhash.Hash) error {
|
||||
args := m.Called(preimage)
|
||||
|
||||
return args.Error(0)
|
||||
}
|
||||
|
||||
// Encode encodes a series of dummy values to pass the serialize/deserialize
|
||||
// process.
|
||||
func (m *mockStore) Encode(w io.Writer) error {
|
||||
err := binary.Write(w, binary.BigEndian, int8(1))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := binary.Write(w, binary.BigEndian, uint64(0)); err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err = w.Write(preimage2); err != nil {
|
||||
return err
|
||||
}
|
||||
return binary.Write(w, binary.BigEndian, uint64(0))
|
||||
}
|
Reference in New Issue
Block a user