diff --git a/dot/core/messages_test.go b/dot/core/messages_test.go
index c668b9fcb4..b471014c1a 100644
--- a/dot/core/messages_test.go
+++ b/dot/core/messages_test.go
@@ -93,13 +93,17 @@ func TestService_ProcessBlockAnnounceMessage(t *testing.T) {
require.Nil(t, err)
// simulate block sent from BABE session
- newBlock := &types.Block{
- Header: &types.Header{
+ digest := types.NewDigest()
+ err = digest.Add(*types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest())
+ require.NoError(t, err)
+
+ newBlock := types.Block{
+ Header: types.Header{
Number: big.NewInt(1),
ParentHash: s.blockState.BestBlockHash(),
- Digest: types.Digest{types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest()},
+ Digest: digest,
},
- Body: types.NewBody([]byte{}),
+ Body: *types.NewBody([]byte{}),
}
expected := &network.BlockAnnounceMessage{
@@ -107,7 +111,7 @@ func TestService_ProcessBlockAnnounceMessage(t *testing.T) {
Number: newBlock.Header.Number,
StateRoot: newBlock.Header.StateRoot,
ExtrinsicsRoot: newBlock.Header.ExtrinsicsRoot,
- Digest: newBlock.Header.Digest,
+ Digest: digest,
BestBlock: true,
}
@@ -116,7 +120,7 @@ func TestService_ProcessBlockAnnounceMessage(t *testing.T) {
state, err := s.storageState.TrieState(nil)
require.NoError(t, err)
- err = s.HandleBlockProduced(newBlock, state)
+ err = s.HandleBlockProduced(&newBlock, state)
require.NoError(t, err)
time.Sleep(time.Second)
diff --git a/dot/core/service.go b/dot/core/service.go
index 1bac4a4f26..b0890a4e76 100644
--- a/dot/core/service.go
+++ b/dot/core/service.go
@@ -17,6 +17,7 @@ package core
import (
"context"
+ "fmt"
"math/big"
"os"
"sync"
@@ -176,12 +177,20 @@ func (s *Service) HandleBlockImport(block *types.Block, state *rtstorage.TrieSta
// It is handled the same as an imported block in terms of state updates; the only difference
// is we send a BlockAnnounceMessage to our peers.
func (s *Service) HandleBlockProduced(block *types.Block, state *rtstorage.TrieState) error {
+ digest := types.NewDigest()
+ for i := range block.Header.Digest.Types {
+ err := digest.Add(block.Header.Digest.Types[i].Value())
+ if err != nil {
+ return err
+ }
+ }
+
msg := &network.BlockAnnounceMessage{
ParentHash: block.Header.ParentHash,
Number: block.Header.Number,
StateRoot: block.Header.StateRoot,
ExtrinsicsRoot: block.Header.ExtrinsicsRoot,
- Digest: block.Header.Digest,
+ Digest: digest,
BestBlock: true,
}
@@ -190,12 +199,12 @@ func (s *Service) HandleBlockProduced(block *types.Block, state *rtstorage.TrieS
}
func (s *Service) handleBlock(block *types.Block, state *rtstorage.TrieState) error {
- if block == nil || block.Header == nil || state == nil {
- return nil
+ if block == nil || state == nil {
+ return fmt.Errorf("unable to handle block due to nil parameter")
}
// store updates state trie nodes in database
- err := s.storageState.StoreTrie(state, block.Header)
+ err := s.storageState.StoreTrie(state, &block.Header)
if err != nil {
logger.Warn("failed to store state trie for imported block", "block", block.Header.Hash(), "error", err)
return err
@@ -215,7 +224,7 @@ func (s *Service) handleBlock(block *types.Block, state *rtstorage.TrieState) er
logger.Debug("imported block and stored state trie", "block", block.Header.Hash(), "state root", state.MustRoot())
// handle consensus digests
- s.digestHandler.HandleDigests(block.Header)
+ s.digestHandler.HandleDigests(&block.Header)
rt, err := s.blockState.GetRuntime(&block.Header.ParentHash)
if err != nil {
@@ -235,7 +244,7 @@ func (s *Service) handleBlock(block *types.Block, state *rtstorage.TrieState) er
}
// check if block production epoch transitioned
- if err := s.handleCurrentSlot(block.Header); err != nil {
+ if err := s.handleCurrentSlot(&block.Header); err != nil {
logger.Warn("failed to handle epoch for block", "block", block.Header.Hash(), "error", err)
return err
}
diff --git a/dot/core/service_test.go b/dot/core/service_test.go
index e5ef9188d8..5988da2393 100644
--- a/dot/core/service_test.go
+++ b/dot/core/service_test.go
@@ -40,7 +40,6 @@ import (
"github.com/ChainSafe/gossamer/lib/runtime/wasmer"
"github.com/ChainSafe/gossamer/lib/transaction"
"github.com/ChainSafe/gossamer/lib/trie"
- "github.com/ChainSafe/gossamer/lib/utils"
log "github.com/ChainSafe/log15"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
@@ -61,12 +60,12 @@ func addTestBlocksToStateWithParent(t *testing.T, previousHash common.Hash, dept
for i := 1; i <= depth; i++ {
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: previousHash,
Number: big.NewInt(int64(i)).Add(previousNum, big.NewInt(int64(i))),
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
},
- Body: &types.Body{},
+ Body: types.Body{},
}
previousHash = block.Header.Hash()
@@ -74,7 +73,7 @@ func addTestBlocksToStateWithParent(t *testing.T, previousHash common.Hash, dept
blockState.StoreRuntime(block.Header.Hash(), rt)
err := blockState.AddBlock(block)
require.NoError(t, err)
- headers = append(headers, block.Header)
+ headers = append(headers, &block.Header)
}
return headers
@@ -119,13 +118,17 @@ func TestAnnounceBlock(t *testing.T) {
defer s.Stop()
// simulate block sent from BABE session
- newBlock := &types.Block{
- Header: &types.Header{
- ParentHash: s.blockState.BestBlockHash(),
+ digest := types.NewDigest()
+ err = digest.Add(*types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest())
+ require.NoError(t, err)
+
+ newBlock := types.Block{
+ Header: types.Header{
Number: big.NewInt(1),
- Digest: types.Digest{types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest()},
+ ParentHash: s.blockState.BestBlockHash(),
+ Digest: digest,
},
- Body: &types.Body{},
+ Body: *types.NewBody([]byte{}),
}
expected := &network.BlockAnnounceMessage{
@@ -133,7 +136,7 @@ func TestAnnounceBlock(t *testing.T) {
Number: newBlock.Header.Number,
StateRoot: newBlock.Header.StateRoot,
ExtrinsicsRoot: newBlock.Header.ExtrinsicsRoot,
- Digest: newBlock.Header.Digest,
+ Digest: digest,
BestBlock: true,
}
@@ -142,7 +145,7 @@ func TestAnnounceBlock(t *testing.T) {
state, err := s.storageState.TrieState(nil)
require.NoError(t, err)
- err = s.HandleBlockProduced(newBlock, state)
+ err = s.HandleBlockProduced(&newBlock, state)
require.NoError(t, err)
time.Sleep(time.Second)
@@ -208,27 +211,27 @@ func TestHandleChainReorg_WithReorg_Trans(t *testing.T) {
err = bs.AddBlock(block1)
require.NoError(t, err)
- block2 := sync.BuildBlock(t, rt, block1.Header, nil)
+ block2 := sync.BuildBlock(t, rt, &block1.Header, nil)
bs.StoreRuntime(block2.Header.Hash(), rt)
err = bs.AddBlock(block2)
require.NoError(t, err)
- block3 := sync.BuildBlock(t, rt, block2.Header, nil)
+ block3 := sync.BuildBlock(t, rt, &block2.Header, nil)
bs.StoreRuntime(block3.Header.Hash(), rt)
err = bs.AddBlock(block3)
require.NoError(t, err)
- block4 := sync.BuildBlock(t, rt, block3.Header, nil)
+ block4 := sync.BuildBlock(t, rt, &block3.Header, nil)
bs.StoreRuntime(block4.Header.Hash(), rt)
err = bs.AddBlock(block4)
require.NoError(t, err)
- block5 := sync.BuildBlock(t, rt, block4.Header, nil)
+ block5 := sync.BuildBlock(t, rt, &block4.Header, nil)
bs.StoreRuntime(block5.Header.Hash(), rt)
err = bs.AddBlock(block5)
require.NoError(t, err)
- block31 := sync.BuildBlock(t, rt, block2.Header, nil)
+ block31 := sync.BuildBlock(t, rt, &block2.Header, nil)
bs.StoreRuntime(block31.Header.Hash(), rt)
err = bs.AddBlock(block31)
require.NoError(t, err)
@@ -238,7 +241,7 @@ func TestHandleChainReorg_WithReorg_Trans(t *testing.T) {
// Add extrinsic to block `block31`
ext := createExtrinsic(t, rt, bs.GenesisHash(), nonce)
- block41 := sync.BuildBlock(t, rt, block31.Header, ext)
+ block41 := sync.BuildBlock(t, rt, &block31.Header, ext)
bs.StoreRuntime(block41.Header.Hash(), rt)
err = bs.AddBlock(block41)
require.NoError(t, err)
@@ -304,15 +307,14 @@ func TestHandleChainReorg_WithReorg_Transactions(t *testing.T) {
body, err := types.NewBodyFromExtrinsics([]types.Extrinsic{tx})
require.NoError(t, err)
+ digest := types.NewDigest()
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: ancestor.Header.Hash(),
Number: big.NewInt(0).Add(ancestor.Header.Number, big.NewInt(1)),
- Digest: types.Digest{
- utils.NewMockDigestItem(1),
- },
+ Digest: digest,
},
- Body: body,
+ Body: *body,
}
s.blockState.StoreRuntime(block.Header.Hash(), rt)
@@ -376,7 +378,7 @@ func TestMaintainTransactionPool_EmptyBlock(t *testing.T) {
}
err := s.maintainTransactionPool(&types.Block{
- Body: types.NewBody([]byte{}),
+ Body: *types.NewBody([]byte{}),
})
require.NoError(t, err)
@@ -425,7 +427,7 @@ func TestMaintainTransactionPool_BlockWithExtrinsics(t *testing.T) {
require.NoError(t, err)
err = s.maintainTransactionPool(&types.Block{
- Body: body,
+ Body: *body,
})
require.NoError(t, err)
@@ -501,20 +503,28 @@ func TestService_HandleRuntimeChanges(t *testing.T) {
currSpecVersion := v.SpecVersion() // genesis runtime version.
hash := s.blockState.BestBlockHash() // genesisHash
+ digest := types.NewDigest()
+ err = digest.Add(types.PreRuntimeDigest{
+ ConsensusEngineID: types.BabeEngineID,
+ Data: common.MustHexToBytes("0x0201000000ef55a50f00000000"),
+ })
+ require.NoError(t, err)
+
newBlock1 := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: hash,
Number: big.NewInt(1),
- Digest: types.Digest{utils.NewMockDigestItem(1)}},
- Body: types.NewBody([]byte("Old Runtime")),
+ Digest: types.NewDigest()},
+ Body: *types.NewBody([]byte("Old Runtime")),
}
newBlockRTUpdate := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: hash,
Number: big.NewInt(1),
- Digest: types.Digest{utils.NewMockDigestItem(2)}},
- Body: types.NewBody([]byte("Updated Runtime")),
+ Digest: digest,
+ },
+ Body: *types.NewBody([]byte("Updated Runtime")),
}
ts, err := s.storageState.TrieState(nil) // Pass genesis root
@@ -590,11 +600,12 @@ func TestService_HandleRuntimeChangesAfterCodeSubstitutes(t *testing.T) {
blockHash := common.MustHexToHash("0x86aa36a140dfc449c30dbce16ce0fea33d5c3786766baa764e33f336841b9e29") // hash for known test code substitution
newBlock := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: blockHash,
Number: big.NewInt(1),
- Digest: types.Digest{utils.NewMockDigestItem(1)}},
- Body: types.NewBody([]byte("Updated Runtime")),
+ Digest: types.NewDigest(),
+ },
+ Body: *types.NewBody([]byte("Updated Runtime")),
}
err = s.handleCodeSubstitution(blockHash)
@@ -629,15 +640,15 @@ func TestTryQueryStore_WhenThereIsDataToRetrieve(t *testing.T) {
require.NoError(t, err)
header, err := types.NewHeader(s.blockState.GenesisHash(), storageStateTrie.MustRoot(),
- common.Hash{}, big.NewInt(1), nil)
+ common.Hash{}, big.NewInt(1), types.NewDigest())
require.NoError(t, err)
err = s.storageState.StoreTrie(storageStateTrie, header)
require.NoError(t, err)
testBlock := &types.Block{
- Header: header,
- Body: types.NewBody([]byte{}),
+ Header: *header,
+ Body: *types.NewBody([]byte{}),
}
err = s.blockState.AddBlock(testBlock)
@@ -659,15 +670,15 @@ func TestTryQueryStore_WhenDoesNotHaveDataToRetrieve(t *testing.T) {
require.NoError(t, err)
header, err := types.NewHeader(s.blockState.GenesisHash(), storageStateTrie.MustRoot(),
- common.Hash{}, big.NewInt(1), nil)
+ common.Hash{}, big.NewInt(1), types.NewDigest())
require.NoError(t, err)
err = s.storageState.StoreTrie(storageStateTrie, header)
require.NoError(t, err)
testBlock := &types.Block{
- Header: header,
- Body: types.NewBody([]byte{}),
+ Header: *header,
+ Body: *types.NewBody([]byte{}),
}
err = s.blockState.AddBlock(testBlock)
@@ -687,12 +698,12 @@ func TestTryQueryStore_WhenDoesNotHaveDataToRetrieve(t *testing.T) {
func TestTryQueryState_WhenDoesNotHaveStateRoot(t *testing.T) {
s := NewTestService(t, nil)
- header, err := types.NewHeader(s.blockState.GenesisHash(), common.Hash{}, common.Hash{}, big.NewInt(1), nil)
+ header, err := types.NewHeader(s.blockState.GenesisHash(), common.Hash{}, common.Hash{}, big.NewInt(1), types.NewDigest())
require.NoError(t, err)
testBlock := &types.Block{
- Header: header,
- Body: types.NewBody([]byte{}),
+ Header: *header,
+ Body: *types.NewBody([]byte{}),
}
err = s.blockState.AddBlock(testBlock)
@@ -769,16 +780,15 @@ func createNewBlockAndStoreDataAtBlock(t *testing.T, s *Service, key, value []by
storageStateTrie.Set(key, value)
require.NoError(t, err)
- header, err := types.NewHeader(parentHash, storageStateTrie.MustRoot(),
- common.Hash{}, big.NewInt(number), nil)
+ header, err := types.NewHeader(parentHash, storageStateTrie.MustRoot(), common.Hash{}, big.NewInt(number), types.NewDigest())
require.NoError(t, err)
err = s.storageState.StoreTrie(storageStateTrie, header)
require.NoError(t, err)
testBlock := &types.Block{
- Header: header,
- Body: types.NewBody([]byte{}),
+ Header: *header,
+ Body: *types.NewBody([]byte{}),
}
err = s.blockState.AddBlock(testBlock)
diff --git a/dot/digest/digest.go b/dot/digest/digest.go
index ebd5dec899..34343d243c 100644
--- a/dot/digest/digest.go
+++ b/dot/digest/digest.go
@@ -23,8 +23,8 @@ import (
"math/big"
"github.com/ChainSafe/gossamer/dot/types"
- "github.com/ChainSafe/gossamer/lib/scale"
"github.com/ChainSafe/gossamer/lib/services"
+ "github.com/ChainSafe/gossamer/pkg/scale"
log "github.com/ChainSafe/log15"
)
@@ -60,7 +60,7 @@ type Handler struct {
}
type grandpaChange struct {
- auths []*types.Authority
+ auths []types.Authority
atBlock *big.Int
}
@@ -144,63 +144,83 @@ func (h *Handler) NextGrandpaAuthorityChange() uint64 {
// HandleDigests handles consensus digests for an imported block
func (h *Handler) HandleDigests(header *types.Header) {
- for i, d := range header.Digest {
- if d.Type() == types.ConsensusDigestType {
- cd, ok := d.(*types.ConsensusDigest)
- if !ok {
- logger.Error("handleDigests", "block number", header.Number, "index", i, "error", "cannot cast invalid consensus digest item")
- continue
- }
-
- err := h.handleConsensusDigest(cd, header)
+ for i, d := range header.Digest.Types {
+ val, ok := d.Value().(types.ConsensusDigest)
+ if ok {
+ err := h.handleConsensusDigest(&val, header)
if err != nil {
- logger.Error("handleDigests", "block number", header.Number, "index", i, "digest", cd, "error", err)
+ logger.Error("handleDigests", "block number", header.Number, "index", i, "digest", d.Value(), "error", err)
}
}
}
}
func (h *Handler) handleConsensusDigest(d *types.ConsensusDigest, header *types.Header) error {
- t := d.DataType()
-
- if d.ConsensusEngineID == types.GrandpaEngineID {
- switch t {
- case types.GrandpaScheduledChangeType:
- return h.handleScheduledChange(d, header)
- case types.GrandpaForcedChangeType:
- return h.handleForcedChange(d, header)
- case types.GrandpaOnDisabledType:
- return nil // do nothing, as this is not implemented in substrate
- case types.GrandpaPauseType:
- return h.handlePause(d)
- case types.GrandpaResumeType:
- return h.handleResume(d)
- default:
- return errors.New("invalid consensus digest data")
+ switch d.ConsensusEngineID {
+ case types.GrandpaEngineID:
+ data := types.NewGrandpaConsensusDigest()
+ err := scale.Unmarshal(d.Data, &data)
+ if err != nil {
+ return err
}
- }
-
- if d.ConsensusEngineID == types.BabeEngineID {
- switch t {
- case types.NextEpochDataType:
- return h.handleNextEpochData(d, header)
- case types.BABEOnDisabledType:
- return h.handleBABEOnDisabled(d, header)
- case types.NextConfigDataType:
- return h.handleNextConfigData(d, header)
- default:
- return errors.New("invalid consensus digest data")
+ err = h.handleGrandpaConsensusDigest(data, header)
+ if err != nil {
+ return err
}
+ return nil
+ case types.BabeEngineID:
+ data := types.NewBabeConsensusDigest()
+ err := scale.Unmarshal(d.Data, &data)
+ if err != nil {
+ return err
+ }
+ err = h.handleBabeConsensusDigest(data, header)
+ if err != nil {
+ return err
+ }
+ return nil
}
return errors.New("unknown consensus engine ID")
}
+func (h *Handler) handleGrandpaConsensusDigest(digest scale.VaryingDataType, header *types.Header) error {
+ switch val := digest.Value().(type) {
+ case types.GrandpaScheduledChange:
+ return h.handleScheduledChange(val, header)
+ case types.GrandpaForcedChange:
+ return h.handleForcedChange(val, header)
+ case types.GrandpaOnDisabled:
+ return nil // do nothing, as this is not implemented in substrate
+ case types.GrandpaPause:
+ return h.handlePause(val)
+ case types.GrandpaResume:
+ return h.handleResume(val)
+ }
+
+ return errors.New("invalid consensus digest data")
+}
+
+func (h *Handler) handleBabeConsensusDigest(digest scale.VaryingDataType, header *types.Header) error {
+ switch val := digest.Value().(type) {
+ case types.NextEpochData:
+ logger.Debug("handling BABENextEpochData", "data", digest)
+ return h.handleNextEpochData(val, header)
+ case types.BABEOnDisabled:
+ return h.handleBABEOnDisabled(val, header)
+ case types.NextConfigData:
+ logger.Debug("handling BABENextConfigData", "data", digest)
+ return h.handleNextConfigData(val, header)
+ }
+
+ return errors.New("invalid consensus digest data")
+}
+
func (h *Handler) handleBlockImport(ctx context.Context) {
for {
select {
case block := <-h.imported:
- if block == nil || block.Header == nil {
+ if block == nil {
continue
}
@@ -218,7 +238,7 @@ func (h *Handler) handleBlockFinalisation(ctx context.Context) {
for {
select {
case info := <-h.finalised:
- if info == nil || info.Header == nil {
+ if info == nil {
continue
}
@@ -284,27 +304,16 @@ func (h *Handler) handleGrandpaChangesOnFinalization(num *big.Int) error {
return nil
}
-func (h *Handler) handleScheduledChange(d *types.ConsensusDigest, header *types.Header) error {
+func (h *Handler) handleScheduledChange(sc types.GrandpaScheduledChange, header *types.Header) error {
curr, err := h.blockState.BestBlockHeader()
if err != nil {
return err
}
- if d.ConsensusEngineID != types.GrandpaEngineID {
- return nil
- }
-
if h.grandpaScheduledChange != nil {
return nil
}
- sc := &types.GrandpaScheduledChange{}
- dec, err := scale.Decode(d.Data[1:], sc)
- if err != nil {
- return err
- }
- sc = dec.(*types.GrandpaScheduledChange)
-
logger.Debug("handling GrandpaScheduledChange", "data", sc)
c, err := newGrandpaChange(sc.Auths, sc.Delay, curr.Number)
@@ -318,7 +327,6 @@ func (h *Handler) handleScheduledChange(d *types.ConsensusDigest, header *types.
if err != nil {
return err
}
-
logger.Debug("setting GrandpaScheduledChange", "at block", big.NewInt(0).Add(header.Number, big.NewInt(int64(sc.Delay))))
return h.grandpaState.SetNextChange(
types.NewGrandpaVotersFromAuthorities(auths),
@@ -326,11 +334,7 @@ func (h *Handler) handleScheduledChange(d *types.ConsensusDigest, header *types.
)
}
-func (h *Handler) handleForcedChange(d *types.ConsensusDigest, header *types.Header) error {
- if d.ConsensusEngineID != types.GrandpaEngineID {
- return nil
- }
-
+func (h *Handler) handleForcedChange(fc types.GrandpaForcedChange, header *types.Header) error {
if header == nil {
return errors.New("header is nil")
}
@@ -339,13 +343,6 @@ func (h *Handler) handleForcedChange(d *types.ConsensusDigest, header *types.Hea
return errors.New("already have forced change scheduled")
}
- fc := &types.GrandpaForcedChange{}
- dec, err := scale.Decode(d.Data[1:], fc)
- if err != nil {
- return err
- }
- fc = dec.(*types.GrandpaForcedChange)
-
logger.Debug("handling GrandpaForcedChange", "data", fc)
c, err := newGrandpaChange(fc.Auths, fc.Delay, header.Number)
@@ -367,19 +364,12 @@ func (h *Handler) handleForcedChange(d *types.ConsensusDigest, header *types.Hea
)
}
-func (h *Handler) handlePause(d *types.ConsensusDigest) error {
+func (h *Handler) handlePause(p types.GrandpaPause) error {
curr, err := h.blockState.BestBlockHeader()
if err != nil {
return err
}
- p := &types.GrandpaPause{}
- dec, err := scale.Decode(d.Data[1:], p)
- if err != nil {
- return err
- }
- p = dec.(*types.GrandpaPause)
-
delay := big.NewInt(int64(p.Delay))
h.grandpaPause = &pause{
@@ -389,21 +379,13 @@ func (h *Handler) handlePause(d *types.ConsensusDigest) error {
return h.grandpaState.SetNextPause(h.grandpaPause.atBlock)
}
-func (h *Handler) handleResume(d *types.ConsensusDigest) error {
+func (h *Handler) handleResume(r types.GrandpaResume) error {
curr, err := h.blockState.BestBlockHeader()
if err != nil {
return err
}
- p := &types.GrandpaResume{}
- dec, err := scale.Decode(d.Data[1:], p)
- if err != nil {
- return err
- }
- p = dec.(*types.GrandpaResume)
-
- delay := big.NewInt(int64(p.Delay))
-
+ delay := big.NewInt(int64(r.Delay))
h.grandpaResume = &resume{
atBlock: big.NewInt(-1).Add(curr.Number, delay),
}
@@ -411,7 +393,7 @@ func (h *Handler) handleResume(d *types.ConsensusDigest) error {
return h.grandpaState.SetNextResume(h.grandpaResume.atBlock)
}
-func newGrandpaChange(raw []*types.GrandpaAuthoritiesRaw, delay uint32, currBlock *big.Int) (*grandpaChange, error) {
+func newGrandpaChange(raw []types.GrandpaAuthoritiesRaw, delay uint32, currBlock *big.Int) (*grandpaChange, error) {
auths, err := types.GrandpaAuthoritiesRawToAuthorities(raw)
if err != nil {
return nil, err
@@ -425,29 +407,20 @@ func newGrandpaChange(raw []*types.GrandpaAuthoritiesRaw, delay uint32, currBloc
}, nil
}
-func (h *Handler) handleBABEOnDisabled(d *types.ConsensusDigest, _ *types.Header) error {
+func (h *Handler) handleBABEOnDisabled(d types.BABEOnDisabled, _ *types.Header) error {
od := &types.BABEOnDisabled{}
logger.Debug("handling BABEOnDisabled", "data", od)
return nil
}
-func (h *Handler) handleNextEpochData(d *types.ConsensusDigest, header *types.Header) error {
- od := &types.NextEpochData{}
- dec, err := scale.Decode(d.Data[1:], od)
- if err != nil {
- return err
- }
- od = dec.(*types.NextEpochData)
-
- logger.Debug("handling BABENextEpochData", "data", od)
-
+func (h *Handler) handleNextEpochData(act types.NextEpochData, header *types.Header) error {
currEpoch, err := h.epochState.GetEpochForBlock(header)
if err != nil {
return err
}
// set EpochState epoch data for upcoming epoch
- data, err := od.ToEpochData()
+ data, err := act.ToEpochData()
if err != nil {
return err
}
@@ -456,22 +429,13 @@ func (h *Handler) handleNextEpochData(d *types.ConsensusDigest, header *types.He
return h.epochState.SetEpochData(currEpoch+1, data)
}
-func (h *Handler) handleNextConfigData(d *types.ConsensusDigest, header *types.Header) error {
- od := &types.NextConfigData{}
- dec, err := scale.Decode(d.Data[1:], od)
- if err != nil {
- return err
- }
- od = dec.(*types.NextConfigData)
-
- logger.Debug("handling BABENextConfigData", "data", od)
-
+func (h *Handler) handleNextConfigData(config types.NextConfigData, header *types.Header) error {
currEpoch, err := h.epochState.GetEpochForBlock(header)
if err != nil {
return err
}
- logger.Debug("setting BABE config data", "blocknum", header.Number, "epoch", currEpoch+1, "data", od.ToConfigData())
+ logger.Debug("setting BABE config data", "blocknum", header.Number, "epoch", currEpoch+1, "data", config.ToConfigData())
// set EpochState config data for upcoming epoch
- return h.epochState.SetConfigData(currEpoch+1, od.ToConfigData())
+ return h.epochState.SetConfigData(currEpoch+1, config.ToConfigData())
}
diff --git a/dot/digest/digest_test.go b/dot/digest/digest_test.go
index 5b4423b456..39ac8896cb 100644
--- a/dot/digest/digest_test.go
+++ b/dot/digest/digest_test.go
@@ -29,6 +29,8 @@ import (
"github.com/ChainSafe/gossamer/lib/crypto/sr25519"
"github.com/ChainSafe/gossamer/lib/genesis"
"github.com/ChainSafe/gossamer/lib/keystore"
+ "github.com/ChainSafe/gossamer/pkg/scale"
+
log "github.com/ChainSafe/log15"
"github.com/stretchr/testify/require"
)
@@ -46,22 +48,23 @@ func addTestBlocksToStateWithParent(t *testing.T, previousHash common.Hash, dept
headers := []*types.Header{}
for i := 1; i <= depth; i++ {
+ digest := types.NewDigest()
+ err = digest.Add(*types.NewBabeSecondaryPlainPreDigest(0, uint64(i)).ToPreRuntimeDigest())
+ require.NoError(t, err)
+
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: previousHash,
Number: big.NewInt(int64(i)).Add(previousNum, big.NewInt(int64(i))),
- Digest: types.Digest{
- types.NewBabeSecondaryPlainPreDigest(0, uint64(i)).ToPreRuntimeDigest(),
- },
+ Digest: digest,
},
- Body: &types.Body{},
+ Body: types.Body{},
}
previousHash = block.Header.Hash()
-
- err := blockState.(*state.BlockState).AddBlock(block)
+ err = blockState.(*state.BlockState).AddBlock(block)
require.NoError(t, err)
- headers = append(headers, block.Header)
+ headers = append(headers, &block.Header)
}
return headers
@@ -98,14 +101,18 @@ func TestHandler_GrandpaScheduledChange(t *testing.T) {
kr, err := keystore.NewEd25519Keyring()
require.NoError(t, err)
- sc := &types.GrandpaScheduledChange{
- Auths: []*types.GrandpaAuthoritiesRaw{
+ sc := types.GrandpaScheduledChange{
+ Auths: []types.GrandpaAuthoritiesRaw{
{Key: kr.Alice().Public().(*ed25519.PublicKey).AsBytes(), ID: 0},
},
Delay: 3,
}
- data, err := sc.Encode()
+ var digest = types.NewGrandpaConsensusDigest()
+ err = digest.Set(sc)
+ require.NoError(t, err)
+
+ data, err := scale.Marshal(digest)
require.NoError(t, err)
d := &types.ConsensusDigest{
@@ -151,14 +158,18 @@ func TestHandler_GrandpaForcedChange(t *testing.T) {
kr, err := keystore.NewEd25519Keyring()
require.NoError(t, err)
- fc := &types.GrandpaForcedChange{
- Auths: []*types.GrandpaAuthoritiesRaw{
+ fc := types.GrandpaForcedChange{
+ Auths: []types.GrandpaAuthoritiesRaw{
{Key: kr.Alice().Public().(*ed25519.PublicKey).AsBytes(), ID: 0},
},
Delay: 3,
}
- data, err := fc.Encode()
+ var digest = types.NewGrandpaConsensusDigest()
+ err = digest.Set(fc)
+ require.NoError(t, err)
+
+ data, err := scale.Marshal(digest)
require.NoError(t, err)
d := &types.ConsensusDigest{
@@ -195,11 +206,15 @@ func TestHandler_GrandpaPauseAndResume(t *testing.T) {
handler.Start()
defer handler.Stop()
- p := &types.GrandpaPause{
+ p := types.GrandpaPause{
Delay: 3,
}
- data, err := p.Encode()
+ var digest = types.NewGrandpaConsensusDigest()
+ err := digest.Set(p)
+ require.NoError(t, err)
+
+ data, err := scale.Marshal(digest)
require.NoError(t, err)
d := &types.ConsensusDigest{
@@ -221,11 +236,15 @@ func TestHandler_GrandpaPauseAndResume(t *testing.T) {
time.Sleep(time.Millisecond * 100)
require.Nil(t, handler.grandpaPause)
- r := &types.GrandpaResume{
+ r := types.GrandpaResume{
Delay: 3,
}
- data, err = r.Encode()
+ var digest2 = types.NewGrandpaConsensusDigest()
+ err = digest2.Set(r)
+ require.NoError(t, err)
+
+ data, err = scale.Marshal(digest2)
require.NoError(t, err)
d = &types.ConsensusDigest{
@@ -251,12 +270,16 @@ func TestNextGrandpaAuthorityChange_OneChange(t *testing.T) {
defer handler.Stop()
block := uint32(3)
- sc := &types.GrandpaScheduledChange{
- Auths: []*types.GrandpaAuthoritiesRaw{},
+ sc := types.GrandpaScheduledChange{
+ Auths: []types.GrandpaAuthoritiesRaw{},
Delay: block,
}
- data, err := sc.Encode()
+ var digest = types.NewGrandpaConsensusDigest()
+ err := digest.Set(sc)
+ require.NoError(t, err)
+
+ data, err := scale.Marshal(digest)
require.NoError(t, err)
d := &types.ConsensusDigest{
@@ -290,12 +313,16 @@ func TestNextGrandpaAuthorityChange_MultipleChanges(t *testing.T) {
require.NoError(t, err)
later := uint32(6)
- sc := &types.GrandpaScheduledChange{
- Auths: []*types.GrandpaAuthoritiesRaw{},
+ sc := types.GrandpaScheduledChange{
+ Auths: []types.GrandpaAuthoritiesRaw{},
Delay: later,
}
- data, err := sc.Encode()
+ var digest = types.NewGrandpaConsensusDigest()
+ err = digest.Set(sc)
+ require.NoError(t, err)
+
+ data, err := scale.Marshal(digest)
require.NoError(t, err)
d := &types.ConsensusDigest{
@@ -318,14 +345,18 @@ func TestNextGrandpaAuthorityChange_MultipleChanges(t *testing.T) {
require.Equal(t, expected, auths)
earlier := uint32(4)
- fc := &types.GrandpaForcedChange{
- Auths: []*types.GrandpaAuthoritiesRaw{
+ fc := types.GrandpaForcedChange{
+ Auths: []types.GrandpaAuthoritiesRaw{
{Key: kr.Alice().Public().(*ed25519.PublicKey).AsBytes(), ID: 0},
},
Delay: earlier,
}
- data, err = fc.Encode()
+ digest = types.NewGrandpaConsensusDigest()
+ err = digest.Set(fc)
+ require.NoError(t, err)
+
+ data, err = scale.Marshal(digest)
require.NoError(t, err)
d = &types.ConsensusDigest{
@@ -352,11 +383,13 @@ func TestHandler_HandleBABEOnDisabled(t *testing.T) {
Number: big.NewInt(1),
}
- digest := &types.BABEOnDisabled{
+ var digest = types.NewBabeConsensusDigest()
+ err := digest.Set(types.BABEOnDisabled{
ID: 7,
- }
+ })
+ require.NoError(t, err)
- data, err := digest.Encode()
+ data, err := scale.Marshal(digest)
require.NoError(t, err)
d := &types.ConsensusDigest{
@@ -368,20 +401,25 @@ func TestHandler_HandleBABEOnDisabled(t *testing.T) {
require.NoError(t, err)
}
-func createHeaderWithPreDigest(slotNumber uint64) *types.Header {
+func createHeaderWithPreDigest(t *testing.T, slotNumber uint64) *types.Header {
babeHeader := types.NewBabePrimaryPreDigest(0, slotNumber, [32]byte{}, [64]byte{})
enc := babeHeader.Encode()
- digest := &types.PreRuntimeDigest{
+ d := &types.PreRuntimeDigest{
Data: enc,
}
+ digest := types.NewDigest()
+ err := digest.Add(*d)
+ require.NoError(t, err)
return &types.Header{
- Digest: types.Digest{digest},
+ Digest: digest,
}
}
func TestHandler_HandleNextEpochData(t *testing.T) {
+ expData := common.MustHexToBytes("0x0108d43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d01000000000000008eaf04151687736326c9fea17e25fc5287613693c912909cb226aa4794f26a4801000000000000004d58630000000000000000000000000000000000000000000000000000000000")
+
handler := newTestHandler(t, true, false)
handler.Start()
defer handler.Stop()
@@ -389,37 +427,47 @@ func TestHandler_HandleNextEpochData(t *testing.T) {
keyring, err := keystore.NewSr25519Keyring()
require.NoError(t, err)
- authA := &types.AuthorityRaw{
+ authA := types.AuthorityRaw{
Key: keyring.Alice().Public().(*sr25519.PublicKey).AsBytes(),
Weight: 1,
}
- authB := &types.AuthorityRaw{
+ authB := types.AuthorityRaw{
Key: keyring.Bob().Public().(*sr25519.PublicKey).AsBytes(),
Weight: 1,
}
- digest := &types.NextEpochData{
- Authorities: []*types.AuthorityRaw{authA, authB},
+ var digest = types.NewBabeConsensusDigest()
+ err = digest.Set(types.NextEpochData{
+ Authorities: []types.AuthorityRaw{authA, authB},
Randomness: [32]byte{77, 88, 99},
- }
+ })
+ require.NoError(t, err)
- data, err := digest.Encode()
+ data, err := scale.Marshal(digest)
require.NoError(t, err)
+ require.Equal(t, expData, data)
+
d := &types.ConsensusDigest{
ConsensusEngineID: types.BabeEngineID,
Data: data,
}
- header := createHeaderWithPreDigest(10)
+ header := createHeaderWithPreDigest(t, 10)
err = handler.handleConsensusDigest(d, header)
require.NoError(t, err)
stored, err := handler.epochState.(*state.EpochState).GetEpochData(1)
require.NoError(t, err)
- res, err := digest.ToEpochData()
+
+ act, ok := digest.Value().(types.NextEpochData)
+ if !ok {
+ t.Fatal()
+ }
+
+ res, err := act.ToEpochData()
require.NoError(t, err)
require.Equal(t, res, stored)
}
@@ -429,13 +477,15 @@ func TestHandler_HandleNextConfigData(t *testing.T) {
handler.Start()
defer handler.Stop()
- digest := &types.NextConfigData{
+ var digest = types.NewBabeConsensusDigest()
+ err := digest.Set(types.NextConfigData{
C1: 1,
C2: 8,
SecondarySlots: 1,
- }
+ })
+ require.NoError(t, err)
- data, err := digest.Encode()
+ data, err := scale.Marshal(digest)
require.NoError(t, err)
d := &types.ConsensusDigest{
@@ -443,12 +493,17 @@ func TestHandler_HandleNextConfigData(t *testing.T) {
Data: data,
}
- header := createHeaderWithPreDigest(10)
+ header := createHeaderWithPreDigest(t, 10)
err = handler.handleConsensusDigest(d, header)
require.NoError(t, err)
+ act, ok := digest.Value().(types.NextConfigData)
+ if !ok {
+ t.Fatal()
+ }
+
stored, err := handler.epochState.(*state.EpochState).GetConfigData(1)
require.NoError(t, err)
- require.Equal(t, digest.ToConfigData(), stored)
+ require.Equal(t, act.ToConfigData(), stored)
}
diff --git a/dot/digest/interface.go b/dot/digest/interface.go
index bd42d9b883..fbc31cd350 100644
--- a/dot/digest/interface.go
+++ b/dot/digest/interface.go
@@ -41,7 +41,7 @@ type EpochState interface {
// GrandpaState is the interface for the state.GrandpaState
type GrandpaState interface {
- SetNextChange(authorities []*grandpa.Voter, number *big.Int) error
+ SetNextChange(authorities []grandpa.Voter, number *big.Int) error
IncrementSetID() error
SetNextPause(number *big.Int) error
SetNextResume(number *big.Int) error
diff --git a/dot/import.go b/dot/import.go
index 6546871b42..35ccb9206d 100644
--- a/dot/import.go
+++ b/dot/import.go
@@ -17,7 +17,6 @@
package dot
import (
- "bytes"
"encoding/json"
"errors"
"io/ioutil"
@@ -27,6 +26,7 @@ import (
"github.com/ChainSafe/gossamer/dot/types"
"github.com/ChainSafe/gossamer/lib/common"
"github.com/ChainSafe/gossamer/lib/trie"
+ "github.com/ChainSafe/gossamer/pkg/scale"
log "github.com/ChainSafe/log15"
)
@@ -126,18 +126,20 @@ func newHeaderFromFile(filename string) (*types.Header, error) {
}
logs := digestRaw["logs"].([]interface{})
- digest := types.Digest{}
+ digest := types.NewDigest()
for _, log := range logs {
digestBytes := common.MustHexToBytes(log.(string))
- r := &bytes.Buffer{}
- _, _ = r.Write(digestBytes)
- digestItem, err := types.DecodeDigestItem(r)
+ var digestItem = types.NewDigestItem()
+ err := scale.Unmarshal(digestBytes, &digestItem)
if err != nil {
return nil, err
}
- digest = append(digest, digestItem)
+ err = digest.Add(digestItem.Value())
+ if err != nil {
+ return nil, err
+ }
}
header := &types.Header{
diff --git a/dot/import_test.go b/dot/import_test.go
index a00111e487..30527edf21 100644
--- a/dot/import_test.go
+++ b/dot/import_test.go
@@ -17,7 +17,6 @@
package dot
import (
- "bytes"
"encoding/json"
"io/ioutil"
"math/big"
@@ -26,6 +25,7 @@ import (
"github.com/ChainSafe/gossamer/dot/types"
"github.com/ChainSafe/gossamer/lib/common"
"github.com/ChainSafe/gossamer/lib/utils"
+ "github.com/ChainSafe/gossamer/pkg/scale"
"github.com/stretchr/testify/require"
)
@@ -74,11 +74,10 @@ func TestNewHeaderFromFile(t *testing.T) {
require.NoError(t, err)
digestBytes := common.MustHexToBytes("0x080642414245b501013c0000009659bd0f0000000070edad1c9064fff78cb18435223d8adaf5ea04c24b1a8766e3dc01eb03cc6a0c11b79793d4e31cc0990838229c44fed1669a7c7c79e1e6d0a96374d6496728069d1ef739e290497a0e3b728fa88fcbdd3a5504e0efde0242e7a806dd4fa9260c054241424501019e7f28dddcf27c1e6b328d5694c368d5b2ec5dbe0e412ae1c98f88d53be4d8502fac571f3f19c9caaf281a673319241e0c5095a683ad34316204088a36a4bd86")
- r := &bytes.Buffer{}
- _, _ = r.Write(digestBytes)
- digest, err := types.DecodeDigest(r)
+ digest := types.NewDigest()
+ err = scale.Unmarshal(digestBytes, &digest)
require.NoError(t, err)
- require.Equal(t, 2, len(digest))
+ require.Equal(t, 2, len(digest.Types))
expected := &types.Header{
ParentHash: common.MustHexToHash("0x3b45c9c22dcece75a30acc9c2968cb311e6b0557350f83b430f47559db786975"),
diff --git a/dot/network/block_announce.go b/dot/network/block_announce.go
index 1ccc47116a..1b2ead809f 100644
--- a/dot/network/block_announce.go
+++ b/dot/network/block_announce.go
@@ -17,14 +17,13 @@
package network
import (
- "bytes"
"errors"
"fmt"
"math/big"
"github.com/ChainSafe/gossamer/dot/types"
"github.com/ChainSafe/gossamer/lib/common"
- "github.com/ChainSafe/gossamer/lib/scale"
+ "github.com/ChainSafe/gossamer/pkg/scale"
"github.com/libp2p/go-libp2p-core/peer"
)
@@ -40,7 +39,7 @@ type BlockAnnounceMessage struct {
Number *big.Int
StateRoot common.Hash
ExtrinsicsRoot common.Hash
- Digest types.Digest
+ Digest scale.VaryingDataTypeSlice
BestBlock bool
}
@@ -66,7 +65,7 @@ func (bm *BlockAnnounceMessage) String() string {
// Encode a BlockAnnounce Msg Type containing the BlockAnnounceMessage using scale.Encode
func (bm *BlockAnnounceMessage) Encode() ([]byte, error) {
- enc, err := scale.Encode(bm)
+ enc, err := scale.Marshal(*bm)
if err != nil {
return enc, err
}
@@ -75,24 +74,10 @@ func (bm *BlockAnnounceMessage) Encode() ([]byte, error) {
// Decode the message into a BlockAnnounceMessage
func (bm *BlockAnnounceMessage) Decode(in []byte) error {
- r := &bytes.Buffer{}
- _, _ = r.Write(in)
- h, err := types.NewEmptyHeader().Decode(r)
+ err := scale.Unmarshal(in, bm)
if err != nil {
return err
}
-
- bm.ParentHash = h.ParentHash
- bm.Number = h.Number
- bm.StateRoot = h.StateRoot
- bm.ExtrinsicsRoot = h.ExtrinsicsRoot
- bm.Digest = h.Digest
- bestBlock, err := common.ReadByte(r)
- if err != nil {
- return err
- }
-
- bm.BestBlock = bestBlock == 1
return nil
}
@@ -110,22 +95,26 @@ func (*BlockAnnounceMessage) IsHandshake() bool {
}
func decodeBlockAnnounceHandshake(in []byte) (Handshake, error) {
- hs, err := scale.Decode(in, new(BlockAnnounceHandshake))
+ hs := BlockAnnounceHandshake{}
+ err := scale.Unmarshal(in, &hs)
if err != nil {
return nil, err
}
- return hs.(*BlockAnnounceHandshake), err
+ return &hs, err
}
func decodeBlockAnnounceMessage(in []byte) (NotificationsMessage, error) {
- msg := new(BlockAnnounceMessage)
+ msg := BlockAnnounceMessage{
+ Number: big.NewInt(0),
+ Digest: types.NewDigest(),
+ }
err := msg.Decode(in)
if err != nil {
return nil, err
}
- return msg, nil
+ return &msg, nil
}
// BlockAnnounceHandshake is exchanged by nodes that are beginning the BlockAnnounce protocol
@@ -152,20 +141,15 @@ func (hs *BlockAnnounceHandshake) String() string {
// Encode encodes a BlockAnnounceHandshake message using SCALE
func (hs *BlockAnnounceHandshake) Encode() ([]byte, error) {
- return scale.Encode(hs)
+ return scale.Marshal(*hs)
}
// Decode the message into a BlockAnnounceHandshake
func (hs *BlockAnnounceHandshake) Decode(in []byte) error {
- msg, err := scale.Decode(in, hs)
+ err := scale.Unmarshal(in, hs)
if err != nil {
return err
}
-
- hs.Roles = msg.(*BlockAnnounceHandshake).Roles
- hs.BestBlockNumber = msg.(*BlockAnnounceHandshake).BestBlockNumber
- hs.BestBlockHash = msg.(*BlockAnnounceHandshake).BestBlockHash
- hs.GenesisHash = msg.(*BlockAnnounceHandshake).GenesisHash
return nil
}
diff --git a/dot/network/block_announce_test.go b/dot/network/block_announce_test.go
index 216153f359..bc13558040 100644
--- a/dot/network/block_announce_test.go
+++ b/dot/network/block_announce_test.go
@@ -24,63 +24,111 @@ import (
"github.com/ChainSafe/gossamer/dot/types"
"github.com/ChainSafe/gossamer/lib/common"
"github.com/ChainSafe/gossamer/lib/utils"
+ "github.com/ChainSafe/gossamer/pkg/scale"
"github.com/libp2p/go-libp2p-core/peer"
"github.com/stretchr/testify/require"
)
-func TestBlockAnnounce_Encode(t *testing.T) {
- testBlockAnnounce := &BlockAnnounceMessage{
+func TestEncodeBlockAnnounce(t *testing.T) {
+ expected := common.MustHexToBytes("0x01000000000000000000000000000000000000000000000000000000000000003501020000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000c0642414245340201000000ef55a50f00000000044241424549040118ca239392960473fe1bc65f94ee27d890a49c1b200c006ff5dcc525330ecc16770100000000000000b46f01874ce7abbb5220e8fd89bede0adad14c73039d91e28e881823433e723f0100000000000000d684d9176d6eb69887540c9a89fa6097adea82fc4b0ff26d1062b488f352e179010000000000000068195a71bdde49117a616424bdc60a1733e96acb1da5aeab5d268cf2a572e94101000000000000001a0575ef4ae24bdfd31f4cb5bd61239ae67c12d4e64ae51ac756044aa6ad8200010000000000000018168f2aad0081a25728961ee00627cfe35e39833c805016632bf7c14da5800901000000000000000000000000000000000000000000000000000000000000000000000000000000054241424501014625284883e564bc1e4063f5ea2b49846cdddaa3761d04f543b698c1c3ee935c40d25b869247c36c6b8a8cbbd7bb2768f560ab7c276df3c62df357a7e3b1ec8d00")
+
+ digestVdt := types.NewDigest()
+ err := digestVdt.Add(
+ types.PreRuntimeDigest{
+ ConsensusEngineID: types.BabeEngineID,
+ Data: common.MustHexToBytes("0x0201000000ef55a50f00000000"),
+ },
+ types.ConsensusDigest{
+ ConsensusEngineID: types.BabeEngineID,
+ Data: common.MustHexToBytes("0x0118ca239392960473fe1bc65f94ee27d890a49c1b200c006ff5dcc525330ecc16770100000000000000b46f01874ce7abbb5220e8fd89bede0adad14c73039d91e28e881823433e723f0100000000000000d684d9176d6eb69887540c9a89fa6097adea82fc4b0ff26d1062b488f352e179010000000000000068195a71bdde49117a616424bdc60a1733e96acb1da5aeab5d268cf2a572e94101000000000000001a0575ef4ae24bdfd31f4cb5bd61239ae67c12d4e64ae51ac756044aa6ad8200010000000000000018168f2aad0081a25728961ee00627cfe35e39833c805016632bf7c14da5800901000000000000000000000000000000000000000000000000000000000000000000000000000000"),
+ },
+ types.SealDigest{
+ ConsensusEngineID: types.BabeEngineID,
+ Data: common.MustHexToBytes("0x4625284883e564bc1e4063f5ea2b49846cdddaa3761d04f543b698c1c3ee935c40d25b869247c36c6b8a8cbbd7bb2768f560ab7c276df3c62df357a7e3b1ec8d"),
+ },
+ )
+ require.NoError(t, err)
+
+ testBlockAnnounce := BlockAnnounceMessage{
ParentHash: common.Hash{1},
Number: big.NewInt(77),
StateRoot: common.Hash{2},
ExtrinsicsRoot: common.Hash{3},
- Digest: types.Digest{},
+ Digest: digestVdt,
}
- enc, err := testBlockAnnounce.Encode()
+ enc, err := scale.Marshal(testBlockAnnounce)
require.NoError(t, err)
- res := &BlockAnnounceMessage{
+ require.Equal(t, expected, enc)
+}
+
+func TestDecodeBlockAnnounce(t *testing.T) {
+ enc := common.MustHexToBytes("0x01000000000000000000000000000000000000000000000000000000000000003501020000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000c0642414245340201000000ef55a50f00000000044241424549040118ca239392960473fe1bc65f94ee27d890a49c1b200c006ff5dcc525330ecc16770100000000000000b46f01874ce7abbb5220e8fd89bede0adad14c73039d91e28e881823433e723f0100000000000000d684d9176d6eb69887540c9a89fa6097adea82fc4b0ff26d1062b488f352e179010000000000000068195a71bdde49117a616424bdc60a1733e96acb1da5aeab5d268cf2a572e94101000000000000001a0575ef4ae24bdfd31f4cb5bd61239ae67c12d4e64ae51ac756044aa6ad8200010000000000000018168f2aad0081a25728961ee00627cfe35e39833c805016632bf7c14da5800901000000000000000000000000000000000000000000000000000000000000000000000000000000054241424501014625284883e564bc1e4063f5ea2b49846cdddaa3761d04f543b698c1c3ee935c40d25b869247c36c6b8a8cbbd7bb2768f560ab7c276df3c62df357a7e3b1ec8d00")
+
+ digestVdt := types.NewDigest()
+ err := digestVdt.Add(
+ types.PreRuntimeDigest{
+ ConsensusEngineID: types.BabeEngineID,
+ Data: common.MustHexToBytes("0x0201000000ef55a50f00000000"),
+ },
+ types.ConsensusDigest{
+ ConsensusEngineID: types.BabeEngineID,
+ Data: common.MustHexToBytes("0x0118ca239392960473fe1bc65f94ee27d890a49c1b200c006ff5dcc525330ecc16770100000000000000b46f01874ce7abbb5220e8fd89bede0adad14c73039d91e28e881823433e723f0100000000000000d684d9176d6eb69887540c9a89fa6097adea82fc4b0ff26d1062b488f352e179010000000000000068195a71bdde49117a616424bdc60a1733e96acb1da5aeab5d268cf2a572e94101000000000000001a0575ef4ae24bdfd31f4cb5bd61239ae67c12d4e64ae51ac756044aa6ad8200010000000000000018168f2aad0081a25728961ee00627cfe35e39833c805016632bf7c14da5800901000000000000000000000000000000000000000000000000000000000000000000000000000000"),
+ },
+ types.SealDigest{
+ ConsensusEngineID: types.BabeEngineID,
+ Data: common.MustHexToBytes("0x4625284883e564bc1e4063f5ea2b49846cdddaa3761d04f543b698c1c3ee935c40d25b869247c36c6b8a8cbbd7bb2768f560ab7c276df3c62df357a7e3b1ec8d"),
+ },
+ )
+ require.NoError(t, err)
+
+ expected := BlockAnnounceMessage{
+ ParentHash: common.Hash{1},
+ Number: big.NewInt(77),
+ StateRoot: common.Hash{2},
+ ExtrinsicsRoot: common.Hash{3},
+ Digest: digestVdt,
+ }
+
+ act := BlockAnnounceMessage{
Number: big.NewInt(0),
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
}
- err = res.Decode(enc)
+ err = scale.Unmarshal(enc, &act)
require.NoError(t, err)
- require.Equal(t, testBlockAnnounce, res)
+
+ require.Equal(t, expected, act)
}
-func TestDecodeBlockAnnounceHandshake(t *testing.T) {
- testHandshake := &BlockAnnounceHandshake{
+func TestEncodeBlockAnnounceHandshake(t *testing.T) {
+ expected := common.MustHexToBytes("0x044d00000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000")
+ testHandshake := BlockAnnounceHandshake{
Roles: 4,
BestBlockNumber: 77,
BestBlockHash: common.Hash{1},
GenesisHash: common.Hash{2},
}
- enc, err := testHandshake.Encode()
+ enc, err := scale.Marshal(testHandshake)
require.NoError(t, err)
-
- msg, err := decodeBlockAnnounceHandshake(enc)
- require.NoError(t, err)
- require.Equal(t, testHandshake, msg)
+ require.Equal(t, expected, enc)
}
-func TestDecodeBlockAnnounceMessage(t *testing.T) {
- testBlockAnnounce := &BlockAnnounceMessage{
- ParentHash: common.Hash{1},
- Number: big.NewInt(77),
- StateRoot: common.Hash{2},
- ExtrinsicsRoot: common.Hash{3},
- Digest: types.Digest{},
+func TestDecodeBlockAnnounceHandshake(t *testing.T) {
+ enc := common.MustHexToBytes("0x044d00000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000")
+ expected := BlockAnnounceHandshake{
+ Roles: 4,
+ BestBlockNumber: 77,
+ BestBlockHash: common.Hash{1},
+ GenesisHash: common.Hash{2},
}
- enc, err := testBlockAnnounce.Encode()
- require.NoError(t, err)
-
- msg, err := decodeBlockAnnounceMessage(enc)
+ msg := BlockAnnounceHandshake{}
+ err := scale.Unmarshal(enc, &msg)
require.NoError(t, err)
- require.Equal(t, testBlockAnnounce, msg)
+ require.Equal(t, expected, msg)
}
func TestHandleBlockAnnounceMessage(t *testing.T) {
@@ -98,6 +146,7 @@ func TestHandleBlockAnnounceMessage(t *testing.T) {
peerID := peer.ID("noot")
msg := &BlockAnnounceMessage{
Number: big.NewInt(10),
+ Digest: types.NewDigest(),
}
propagate, err := s.handleBlockAnnounceMessage(peerID, msg)
diff --git a/dot/network/message.go b/dot/network/message.go
index c50e8b971f..a6671acaa1 100644
--- a/dot/network/message.go
+++ b/dot/network/message.go
@@ -24,10 +24,8 @@ import (
pb "github.com/ChainSafe/gossamer/dot/network/proto"
"github.com/ChainSafe/gossamer/dot/types"
"github.com/ChainSafe/gossamer/lib/common"
- "github.com/ChainSafe/gossamer/lib/common/optional"
"github.com/ChainSafe/gossamer/lib/common/variadic"
- "github.com/ChainSafe/gossamer/lib/scale"
-
+ "github.com/ChainSafe/gossamer/pkg/scale"
"google.golang.org/protobuf/proto"
)
@@ -68,10 +66,10 @@ var _ Message = &BlockRequestMessage{}
// BlockRequestMessage is sent to request some blocks from a peer
type BlockRequestMessage struct {
RequestedData byte
- StartingBlock *variadic.Uint64OrHash // first byte 0 = block hash (32 byte), first byte 1 = block number (int64)
- EndBlockHash *optional.Hash
+ StartingBlock variadic.Uint64OrHash // first byte 0 = block hash (32 byte), first byte 1 = block number (int64)
+ EndBlockHash *common.Hash
Direction byte // 0 = ascending, 1 = descending
- Max *optional.Uint32
+ Max *uint32
}
// SubProtocol returns the sync sub-protocol
@@ -81,12 +79,20 @@ func (bm *BlockRequestMessage) SubProtocol() string {
// String formats a BlockRequestMessage as a string
func (bm *BlockRequestMessage) String() string {
- return fmt.Sprintf("BlockRequestMessage RequestedData=%d StartingBlock=0x%x EndBlockHash=%s Direction=%d Max=%s",
+ hash := common.Hash{}
+ max := uint32(0)
+ if bm.EndBlockHash != nil {
+ hash = *bm.EndBlockHash
+ }
+ if bm.Max != nil {
+ max = *bm.Max
+ }
+ return fmt.Sprintf("BlockRequestMessage RequestedData=%d StartingBlock=%v EndBlockHash=%s Direction=%d Max=%d",
bm.RequestedData,
bm.StartingBlock,
- bm.EndBlockHash.String(),
+ hash.String(),
bm.Direction,
- bm.Max.String())
+ max)
}
// Encode returns the protobuf encoded BlockRequestMessage
@@ -96,13 +102,13 @@ func (bm *BlockRequestMessage) Encode() ([]byte, error) {
max uint32
)
- if bm.EndBlockHash.Exists() {
- hash := bm.EndBlockHash.Value()
+ if bm.EndBlockHash != nil {
+ hash := bm.EndBlockHash
toBlock = hash[:]
}
- if bm.Max.Exists() {
- max = bm.Max.Value()
+ if bm.Max != nil {
+ max = *bm.Max
}
msg := &pb.BlockRequest{
@@ -140,8 +146,8 @@ func (bm *BlockRequestMessage) Decode(in []byte) error {
var (
startingBlock *variadic.Uint64OrHash
- endBlockHash *optional.Hash
- max *optional.Uint32
+ endBlockHash *common.Hash
+ max *uint32
)
switch from := msg.FromBlock.(type) {
@@ -162,19 +168,20 @@ func (bm *BlockRequestMessage) Decode(in []byte) error {
}
if len(msg.ToBlock) != 0 {
- endBlockHash = optional.NewHash(true, common.BytesToHash(msg.ToBlock))
+ hash := common.NewHash(msg.ToBlock)
+ endBlockHash = &hash
} else {
- endBlockHash = optional.NewHash(false, common.Hash{})
+ endBlockHash = nil
}
if msg.MaxBlocks != 0 {
- max = optional.NewUint32(true, msg.MaxBlocks)
+ max = &msg.MaxBlocks
} else {
- max = optional.NewUint32(false, 0)
+ max = nil
}
bm.RequestedData = byte(msg.Fields >> 24)
- bm.StartingBlock = startingBlock
+ bm.StartingBlock = *startingBlock
bm.EndBlockHash = endBlockHash
bm.Direction = byte(msg.Direction)
bm.Max = max
@@ -202,7 +209,7 @@ func (bm *BlockResponseMessage) getStartAndEnd() (int64, int64, error) {
return 0, 0, errors.New("last BlockData in BlockResponseMessage does not contain header")
}
- return bm.BlockData[0].Header.Value().Number.Int64(), bm.BlockData[len(bm.BlockData)-1].Header.Value().Number.Int64(), nil
+ return bm.BlockData[0].Header.Number.Int64(), bm.BlockData[len(bm.BlockData)-1].Header.Number.Int64(), nil
}
// SubProtocol returns the sync sub-protocol
@@ -250,10 +257,11 @@ func (bm *BlockResponseMessage) Decode(in []byte) (err error) {
bm.BlockData = make([]*types.BlockData, len(msg.Blocks))
for i, bd := range msg.Blocks {
- bm.BlockData[i], err = protobufToBlockData(bd)
+ block, err := protobufToBlockData(bd)
if err != nil {
return err
}
+ bm.BlockData[i] = block
}
return nil
@@ -265,20 +273,16 @@ func blockDataToProtobuf(bd *types.BlockData) (*pb.BlockData, error) {
Hash: bd.Hash[:],
}
- if bd.Header != nil && bd.Header.Exists() {
- header, err := types.NewHeaderFromOptional(bd.Header)
- if err != nil {
- return nil, err
- }
-
- p.Header, err = header.Encode()
+ if bd.Header != nil {
+ header, err := scale.Marshal(*bd.Header)
if err != nil {
return nil, err
}
+ p.Header = header
}
- if bd.Body != nil && bd.Body.Exists() {
- body := types.Body(bd.Body.Value())
+ if bd.Body != nil {
+ body := bd.Body
exts, err := body.AsEncodedExtrinsics()
if err != nil {
return nil, err
@@ -287,17 +291,17 @@ func blockDataToProtobuf(bd *types.BlockData) (*pb.BlockData, error) {
p.Body = types.ExtrinsicsArrayToBytesArray(exts)
}
- if bd.Receipt != nil && bd.Receipt.Exists() {
- p.Receipt = bd.Receipt.Value()
+ if bd.Receipt != nil {
+ p.Receipt = *bd.Receipt
}
- if bd.MessageQueue != nil && bd.MessageQueue.Exists() {
- p.MessageQueue = bd.MessageQueue.Value()
+ if bd.MessageQueue != nil {
+ p.MessageQueue = *bd.MessageQueue
}
- if bd.Justification != nil && bd.Justification.Exists() {
- p.Justification = bd.Justification.Value()
- if len(bd.Justification.Value()) == 0 {
+ if bd.Justification != nil {
+ p.Justification = *bd.Justification
+ if len(*bd.Justification) == 0 {
p.IsEmptyJustification = true
}
}
@@ -307,16 +311,18 @@ func blockDataToProtobuf(bd *types.BlockData) (*pb.BlockData, error) {
func protobufToBlockData(pbd *pb.BlockData) (*types.BlockData, error) {
bd := &types.BlockData{
- Hash: common.BytesToHash(pbd.Hash),
+ Hash: common.BytesToHash(pbd.Hash),
+ Header: types.NewEmptyHeader(),
}
if pbd.Header != nil {
- header, err := scale.Decode(pbd.Header, types.NewEmptyHeader())
+ header := types.NewEmptyHeader()
+ err := scale.Unmarshal(pbd.Header, header)
if err != nil {
return nil, err
}
- bd.Header = header.(*types.Header).AsOptional()
+ bd.Header = header
}
if pbd.Body != nil {
@@ -325,31 +331,31 @@ func protobufToBlockData(pbd *pb.BlockData) (*types.BlockData, error) {
return nil, err
}
- bd.Body = body.AsOptional()
+ bd.Body = body
} else {
- bd.Body = optional.NewBody(false, nil)
+ bd.Body = nil
}
if pbd.Receipt != nil {
- bd.Receipt = optional.NewBytes(true, pbd.Receipt)
+ bd.Receipt = &pbd.Receipt
} else {
- bd.Receipt = optional.NewBytes(false, nil)
+ bd.Receipt = nil
}
if pbd.MessageQueue != nil {
- bd.MessageQueue = optional.NewBytes(true, pbd.MessageQueue)
+ bd.MessageQueue = &pbd.MessageQueue
} else {
- bd.MessageQueue = optional.NewBytes(false, nil)
+ bd.MessageQueue = nil
}
if pbd.Justification != nil {
- bd.Justification = optional.NewBytes(true, pbd.Justification)
+ bd.Justification = &pbd.Justification
} else {
- bd.Justification = optional.NewBytes(false, nil)
+ bd.Justification = nil
}
if pbd.Justification == nil && pbd.IsEmptyJustification {
- bd.Justification = optional.NewBytes(true, []byte{})
+ bd.Justification = &[]byte{}
}
return bd, nil
diff --git a/dot/network/message_cache_test.go b/dot/network/message_cache_test.go
index eb32d120dc..0d24062479 100644
--- a/dot/network/message_cache_test.go
+++ b/dot/network/message_cache_test.go
@@ -30,7 +30,7 @@ func TestMessageCache(t *testing.T) {
Number: big.NewInt(77),
StateRoot: common.Hash{2},
ExtrinsicsRoot: common.Hash{3},
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
}
ok, err := msgCache.put(peerID, msg)
diff --git a/dot/network/message_test.go b/dot/network/message_test.go
index 9c870ca1c4..aedcebb58a 100644
--- a/dot/network/message_test.go
+++ b/dot/network/message_test.go
@@ -23,12 +23,42 @@ import (
"github.com/ChainSafe/gossamer/dot/types"
"github.com/ChainSafe/gossamer/lib/common"
- "github.com/ChainSafe/gossamer/lib/common/optional"
"github.com/ChainSafe/gossamer/lib/common/variadic"
"github.com/stretchr/testify/require"
)
+func TestEncodeBlockRequestMessage(t *testing.T) {
+ expected, err := common.HexToBytes("0x08808080082220fd19d9ebac759c993fd2e05a1cff9e757d8741c2704c8682c15b5503496b6aa1280130011220dcd1346701ca8396496e52aa2785b1748deb6db09551b72159dcb3e08991025b")
+ require.Nil(t, err)
+
+ genesisHash, err := common.HexToBytes("0xdcd1346701ca8396496e52aa2785b1748deb6db09551b72159dcb3e08991025b")
+ require.Nil(t, err)
+
+ endBlock, err := common.HexToHash("0xfd19d9ebac759c993fd2e05a1cff9e757d8741c2704c8682c15b5503496b6aa1")
+ require.NoError(t, err)
+
+ one := uint32(1)
+
+ bm := &BlockRequestMessage{
+ RequestedData: 1,
+ StartingBlock: *variadic.NewUint64OrHashFromBytes(append([]byte{0}, genesisHash...)),
+ EndBlockHash: &endBlock,
+ Direction: 1,
+ Max: &one,
+ }
+
+ encMsg, err := bm.Encode()
+ require.NoError(t, err)
+
+ require.Equal(t, expected, encMsg) // Pass!
+
+ res := new(BlockRequestMessage)
+ err = res.Decode(encMsg)
+ require.NoError(t, err)
+ require.Equal(t, bm, res)
+}
+
func TestEncodeBlockRequestMessage_BlockHash(t *testing.T) {
genesisHash, err := common.HexToBytes("0xdcd1346701ca8396496e52aa2785b1748deb6db09551b72159dcb3e08991025b")
require.Nil(t, err)
@@ -36,12 +66,13 @@ func TestEncodeBlockRequestMessage_BlockHash(t *testing.T) {
endBlock, err := common.HexToHash("0xfd19d9ebac759c993fd2e05a1cff9e757d8741c2704c8682c15b5503496b6aa1")
require.NoError(t, err)
+ one := uint32(1)
bm := &BlockRequestMessage{
RequestedData: 1,
- StartingBlock: variadic.NewUint64OrHashFromBytes(append([]byte{0}, genesisHash...)),
- EndBlockHash: optional.NewHash(true, endBlock),
+ StartingBlock: *variadic.NewUint64OrHashFromBytes(append([]byte{0}, genesisHash...)),
+ EndBlockHash: &endBlock,
Direction: 1,
- Max: optional.NewUint32(true, 1),
+ Max: &one,
}
encMsg, err := bm.Encode()
@@ -57,12 +88,13 @@ func TestEncodeBlockRequestMessage_BlockNumber(t *testing.T) {
endBlock, err := common.HexToHash("0xfd19d9ebac759c993fd2e05a1cff9e757d8741c2704c8682c15b5503496b6aa1")
require.NoError(t, err)
+ one := uint32(1)
bm := &BlockRequestMessage{
RequestedData: 1,
- StartingBlock: variadic.NewUint64OrHashFromBytes([]byte{1, 1}),
- EndBlockHash: optional.NewHash(true, endBlock),
+ StartingBlock: *variadic.NewUint64OrHashFromBytes([]byte{1, 1}),
+ EndBlockHash: &endBlock,
Direction: 1,
- Max: optional.NewUint32(true, 1),
+ Max: &one,
}
encMsg, err := bm.Encode()
@@ -74,16 +106,31 @@ func TestEncodeBlockRequestMessage_BlockNumber(t *testing.T) {
require.Equal(t, bm, res)
}
+func TestBlockRequestString(t *testing.T) {
+ genesisHash, err := common.HexToBytes("0xdcd1346701ca8396496e52aa2785b1748deb6db09551b72159dcb3e08991025b")
+ require.Nil(t, err)
+
+ bm := &BlockRequestMessage{
+ RequestedData: 1,
+ StartingBlock: *variadic.NewUint64OrHashFromBytes(append([]byte{0}, genesisHash...)),
+ EndBlockHash: nil,
+ Direction: 1,
+ Max: nil,
+ }
+
+ _ = bm.String()
+}
+
func TestEncodeBlockRequestMessage_NoOptionals(t *testing.T) {
genesisHash, err := common.HexToBytes("0xdcd1346701ca8396496e52aa2785b1748deb6db09551b72159dcb3e08991025b")
require.Nil(t, err)
bm := &BlockRequestMessage{
RequestedData: 1,
- StartingBlock: variadic.NewUint64OrHashFromBytes(append([]byte{0}, genesisHash...)),
- EndBlockHash: optional.NewHash(false, common.Hash{}),
+ StartingBlock: *variadic.NewUint64OrHashFromBytes(append([]byte{0}, genesisHash...)),
+ EndBlockHash: nil,
Direction: 1,
- Max: optional.NewUint32(false, 0),
+ Max: nil,
}
encMsg, err := bm.Encode()
@@ -95,51 +142,41 @@ func TestEncodeBlockRequestMessage_NoOptionals(t *testing.T) {
require.Equal(t, bm, res)
}
-func TestEncodeBlockResponseMessage_WithHeader(t *testing.T) {
- hash := common.NewHash([]byte{0})
- testHash := common.NewHash([]byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf})
-
- header := &optional.CoreHeader{
- ParentHash: testHash,
- Number: big.NewInt(1),
- StateRoot: testHash,
- ExtrinsicsRoot: testHash,
- Digest: &types.Digest{},
- }
-
- bd := &types.BlockData{
- Hash: hash,
- Header: optional.NewHeader(true, header),
- Body: optional.NewBody(false, nil),
- Receipt: optional.NewBytes(false, nil),
- MessageQueue: optional.NewBytes(false, nil),
- Justification: optional.NewBytes(false, nil),
- }
+func TestEncodeBlockResponseMessage_Empty(t *testing.T) {
+ bd := types.NewEmptyBlockData()
+ bd.Header = types.NewEmptyHeader()
+ bd.Header.Hash()
bm := &BlockResponseMessage{
BlockData: []*types.BlockData{bd},
}
- encMsg, err := bm.Encode()
+ enc, err := bm.Encode()
require.NoError(t, err)
- res := new(BlockResponseMessage)
- err = res.Decode(encMsg)
+ empty := types.NewEmptyBlockData()
+ empty.Header = types.NewEmptyHeader()
+
+ act := &BlockResponseMessage{
+ BlockData: []*types.BlockData{empty},
+ }
+ err = act.Decode(enc)
require.NoError(t, err)
- require.Equal(t, bm, res)
+
+ for _, b := range act.BlockData {
+ if b.Header != nil {
+ _ = b.Header.Hash()
+ }
+ }
+
+ require.Equal(t, bm, act)
}
func TestEncodeBlockResponseMessage_WithBody(t *testing.T) {
hash := common.NewHash([]byte{0})
testHash := common.NewHash([]byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf})
-
- header := &optional.CoreHeader{
- ParentHash: testHash,
- Number: big.NewInt(1),
- StateRoot: testHash,
- ExtrinsicsRoot: testHash,
- Digest: &types.Digest{},
- }
+ header, err := types.NewHeader(testHash, testHash, testHash, big.NewInt(1), types.NewDigest())
+ require.NoError(t, err)
exts := [][]byte{{1, 3, 5, 7}, {9, 1, 2}, {3, 4, 5}}
body, err := types.NewBodyFromBytes(exts)
@@ -147,62 +184,84 @@ func TestEncodeBlockResponseMessage_WithBody(t *testing.T) {
bd := &types.BlockData{
Hash: hash,
- Header: optional.NewHeader(true, header),
- Body: body.AsOptional(),
- Receipt: optional.NewBytes(false, nil),
- MessageQueue: optional.NewBytes(false, nil),
- Justification: optional.NewBytes(false, nil),
+ Header: header,
+ Body: body,
+ Receipt: nil,
+ MessageQueue: nil,
+ Justification: nil,
}
bm := &BlockResponseMessage{
BlockData: []*types.BlockData{bd},
}
- encMsg, err := bm.Encode()
+ enc, err := bm.Encode()
require.NoError(t, err)
- res := new(BlockResponseMessage)
- err = res.Decode(encMsg)
+ empty := types.NewEmptyBlockData()
+ empty.Header = types.NewEmptyHeader()
+
+ act := &BlockResponseMessage{
+ BlockData: []*types.BlockData{empty},
+ }
+ err = act.Decode(enc)
require.NoError(t, err)
- require.Equal(t, bm, res)
+
+ for _, bd := range act.BlockData {
+ if bd.Header != nil {
+ _ = bd.Header.Hash()
+ }
+ }
+
+ require.Equal(t, bm, act)
+
}
func TestEncodeBlockResponseMessage_WithAll(t *testing.T) {
+ exp := common.MustHexToBytes("0x0aa2010a2000000000000000000000000000000000000000000000000000000000000000001262000102030405060708090a0b0c0d0e0f000102030405060708090a0b0c0d0e0f04000102030405060708090a0b0c0d0e0f000102030405060708090a0b0c0d0e0f000102030405060708090a0b0c0d0e0f000102030405060708090a0b0c0d0e0f001a0510010305071a040c0901021a040c0304052201012a0102320103")
hash := common.NewHash([]byte{0})
testHash := common.NewHash([]byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf})
- header := &optional.CoreHeader{
- ParentHash: testHash,
- Number: big.NewInt(1),
- StateRoot: testHash,
- ExtrinsicsRoot: testHash,
- Digest: &types.Digest{},
- }
+ header, err := types.NewHeader(testHash, testHash, testHash, big.NewInt(1), types.NewDigest())
+ require.NoError(t, err)
- exts := [][]byte{{16, 1, 3, 5, 7}, {12, 9, 1, 2}, {12, 3, 4, 5}}
- body, err := types.NewBodyFromEncodedBytes(exts)
+ exts := [][]byte{{1, 3, 5, 7}, {9, 1, 2}, {3, 4, 5}}
+ body, err := types.NewBodyFromBytes(exts)
require.NoError(t, err)
bd := &types.BlockData{
Hash: hash,
- Header: optional.NewHeader(true, header),
- Body: body.AsOptional(),
- Receipt: optional.NewBytes(true, []byte{77}),
- MessageQueue: optional.NewBytes(true, []byte{88, 99}),
- Justification: optional.NewBytes(true, []byte{11, 22, 33}),
+ Header: header,
+ Body: body,
+ Receipt: &[]byte{1},
+ MessageQueue: &[]byte{2},
+ Justification: &[]byte{3},
}
bm := &BlockResponseMessage{
BlockData: []*types.BlockData{bd},
}
- encMsg, err := bm.Encode()
+ enc, err := bm.Encode()
require.NoError(t, err)
+ require.Equal(t, exp, enc)
- res := new(BlockResponseMessage)
- err = res.Decode(encMsg)
+ empty := types.NewEmptyBlockData()
+ empty.Header = types.NewEmptyHeader()
+
+ act := &BlockResponseMessage{
+ BlockData: []*types.BlockData{empty},
+ }
+ err = act.Decode(enc)
require.NoError(t, err)
- require.Equal(t, bm, res)
+
+ for _, bd := range act.BlockData {
+ if bd.Header != nil {
+ _ = bd.Header.Hash()
+ }
+ }
+
+ require.Equal(t, bm, act)
}
func TestEncodeBlockAnnounceMessage(t *testing.T) {
@@ -231,20 +290,22 @@ func TestEncodeBlockAnnounceMessage(t *testing.T) {
Number: big.NewInt(1),
StateRoot: stateRoot,
ExtrinsicsRoot: extrinsicsRoot,
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
}
encMsg, err := bhm.Encode()
require.Nil(t, err)
require.Equal(t, expected, encMsg)
-
}
func TestDecode_BlockAnnounceMessage(t *testing.T) {
announceMessage, err := common.HexToBytes("0x454545454545454545454545454545454545454545454545454545454545454504b3266de137d20a5d0ff3a6401eb57127525fd9b2693701f0bf5a8a853fa3ebe003170a2e7597b7b7e3d84c05391d139a62b157e78786d8c082f29dcf4c1113140000")
require.Nil(t, err)
- bhm := new(BlockAnnounceMessage)
+ bhm := BlockAnnounceMessage{
+ Number: big.NewInt(0),
+ Digest: types.NewDigest(),
+ }
err = bhm.Decode(announceMessage)
require.Nil(t, err)
@@ -257,12 +318,12 @@ func TestDecode_BlockAnnounceMessage(t *testing.T) {
extrinsicsRoot, err := common.HexToHash("0x03170a2e7597b7b7e3d84c05391d139a62b157e78786d8c082f29dcf4c111314")
require.Nil(t, err)
- expected := &BlockAnnounceMessage{
+ expected := BlockAnnounceMessage{
ParentHash: parentHash,
Number: big.NewInt(1),
StateRoot: stateRoot,
ExtrinsicsRoot: extrinsicsRoot,
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
}
require.Equal(t, expected, bhm)
diff --git a/dot/network/notifications_test.go b/dot/network/notifications_test.go
index d971cbf0f2..b7c4ca6b45 100644
--- a/dot/network/notifications_test.go
+++ b/dot/network/notifications_test.go
@@ -85,7 +85,7 @@ func TestCreateDecoder_BlockAnnounce(t *testing.T) {
Number: big.NewInt(77),
StateRoot: common.Hash{2},
ExtrinsicsRoot: common.Hash{3},
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
}
enc, err = testBlockAnnounce.Encode()
@@ -151,8 +151,10 @@ func TestCreateNotificationsMessageHandler_BlockAnnounce(t *testing.T) {
received: true,
validated: true,
})
+
msg := &BlockAnnounceMessage{
Number: big.NewInt(10),
+ Digest: types.NewDigest(),
}
err = handler(stream, msg)
diff --git a/dot/network/sync.go b/dot/network/sync.go
index b650d10f6a..7f00e57409 100644
--- a/dot/network/sync.go
+++ b/dot/network/sync.go
@@ -29,7 +29,6 @@ import (
"github.com/ChainSafe/gossamer/dot/types"
"github.com/ChainSafe/gossamer/lib/blocktree"
"github.com/ChainSafe/gossamer/lib/common"
- "github.com/ChainSafe/gossamer/lib/common/optional"
"github.com/ChainSafe/gossamer/lib/common/variadic"
"github.com/ChainSafe/chaindb"
@@ -503,7 +502,7 @@ func (q *syncQueue) pushResponse(resp *BlockResponseMessage, pid peer.ID) error
justificationResponses := []*types.BlockData{}
for _, bd := range resp.BlockData {
- if bd.Justification.Exists() {
+ if bd.Justification != nil {
justificationResponses = append(justificationResponses, bd)
numJustifications++
}
@@ -533,7 +532,7 @@ func (q *syncQueue) pushResponse(resp *BlockResponseMessage, pid peer.ID) error
return fmt.Errorf("response doesn't contain block headers")
}
- if resp.BlockData[0].Body == nil || !resp.BlockData[0].Body.Exists() {
+ if resp.BlockData[0].Body == nil {
// update peer's score
q.updatePeerScore(pid, -1)
return fmt.Errorf("response doesn't contain block bodies")
@@ -604,7 +603,7 @@ func (q *syncQueue) processBlockRequests() {
continue
}
- reqData, ok := q.isRequestDataCached(req.req.StartingBlock)
+ reqData, ok := q.isRequestDataCached(&req.req.StartingBlock)
if !ok {
q.trySync(req)
@@ -714,6 +713,11 @@ func (q *syncQueue) receiveBlockResponse(stream libp2pnetwork.Stream) (*BlockRes
msg := new(BlockResponseMessage)
err = msg.Decode(q.buf[:n])
+ for _, bd := range msg.BlockData {
+ if bd.Header != nil {
+ bd.Header.Hash()
+ }
+ }
return msg, err
}
@@ -814,11 +818,7 @@ func (q *syncQueue) handleBlockDataFailure(idx int, err error, data []*types.Blo
panic(err)
}
- header, err := types.NewHeaderFromOptional(data[idx].Header)
- if err != nil {
- logger.Debug("failed to get header from BlockData", "idx", idx, "error", err)
- return
- }
+ header := data[idx].Header
// don't request a chain that's been dropped
if header.Number.Int64() <= finalised.Number.Int64() {
@@ -890,19 +890,19 @@ func (q *syncQueue) handleBlockAnnounce(msg *BlockAnnounceMessage, from peer.ID)
}
func createBlockRequest(startInt int64, size uint32) *BlockRequestMessage {
- var max *optional.Uint32
+ var max *uint32
if size != 0 {
- max = optional.NewUint32(true, size)
+ max = &size
} else {
- max = optional.NewUint32(false, 0)
+ max = nil
}
start, _ := variadic.NewUint64OrHash(uint64(startInt))
blockRequest := &BlockRequestMessage{
RequestedData: RequestedDataHeader + RequestedDataBody + RequestedDataJustification,
- StartingBlock: start,
- EndBlockHash: optional.NewHash(false, common.Hash{}),
+ StartingBlock: *start,
+ EndBlockHash: nil,
Direction: 0, // TODO: define this somewhere
Max: max,
}
@@ -911,19 +911,19 @@ func createBlockRequest(startInt int64, size uint32) *BlockRequestMessage {
}
func createBlockRequestWithHash(startHash common.Hash, size uint32) *BlockRequestMessage {
- var max *optional.Uint32
+ var max *uint32
if size != 0 {
- max = optional.NewUint32(true, size)
+ max = &size
} else {
- max = optional.NewUint32(false, 0)
+ max = nil
}
start, _ := variadic.NewUint64OrHash(startHash)
blockRequest := &BlockRequestMessage{
RequestedData: RequestedDataHeader + RequestedDataBody + RequestedDataJustification,
- StartingBlock: start,
- EndBlockHash: optional.NewHash(false, common.Hash{}),
+ StartingBlock: *start,
+ EndBlockHash: nil,
Direction: 0, // TODO: define this somewhere
Max: max,
}
diff --git a/dot/network/sync_justification_test.go b/dot/network/sync_justification_test.go
index 4c3caf41b9..782bb1ed0f 100644
--- a/dot/network/sync_justification_test.go
+++ b/dot/network/sync_justification_test.go
@@ -23,7 +23,6 @@ import (
"github.com/ChainSafe/gossamer/dot/types"
"github.com/ChainSafe/gossamer/lib/common"
- "github.com/ChainSafe/gossamer/lib/common/optional"
"github.com/ChainSafe/gossamer/lib/utils"
"github.com/libp2p/go-libp2p-core/peer"
@@ -49,10 +48,10 @@ func TestSyncQueue_PushResponse_Justification(t *testing.T) {
}
for i := 0; i < int(blockRequestSize); i++ {
- msg.BlockData = append(msg.BlockData, &types.BlockData{
- Hash: common.Hash{byte(i)},
- Justification: optional.NewBytes(true, []byte{1}),
- })
+ bd := types.NewEmptyBlockData()
+ bd.Hash = common.Hash{byte(i)}
+ bd.Justification = &[]byte{1}
+ msg.BlockData = append(msg.BlockData, bd)
}
s.syncQueue.justificationRequestData.Store(common.Hash{byte(0)}, requestData{})
@@ -87,10 +86,9 @@ func TestSyncQueue_PushResponse_EmptyJustification(t *testing.T) {
}
for i := 0; i < int(blockRequestSize); i++ {
- msg.BlockData = append(msg.BlockData, &types.BlockData{
- Hash: common.Hash{byte(i)},
- Justification: optional.NewBytes(false, nil),
- })
+ bd := types.NewEmptyBlockData()
+ bd.Hash = common.Hash{byte(i)}
+ msg.BlockData = append(msg.BlockData, bd)
}
s.syncQueue.justificationRequestData.Store(common.Hash{byte(0)}, &requestData{})
@@ -108,11 +106,7 @@ func TestSyncQueue_processBlockResponses_Justification(t *testing.T) {
q.responseCh <- []*types.BlockData{
{
Hash: common.Hash{byte(0)},
- Header: optional.NewHeader(false, nil),
- Body: optional.NewBody(false, nil),
- Receipt: optional.NewBytes(false, nil),
- MessageQueue: optional.NewBytes(false, nil),
- Justification: optional.NewBytes(true, []byte{1}),
+ Justification: &[]byte{1},
},
}
}()
diff --git a/dot/network/sync_test.go b/dot/network/sync_test.go
index 26bf8410c1..c979c648c1 100644
--- a/dot/network/sync_test.go
+++ b/dot/network/sync_test.go
@@ -25,7 +25,6 @@ import (
"time"
"github.com/ChainSafe/gossamer/dot/types"
- "github.com/ChainSafe/gossamer/lib/common/optional"
"github.com/ChainSafe/gossamer/lib/common/variadic"
"github.com/ChainSafe/gossamer/lib/utils"
@@ -88,13 +87,12 @@ func TestSyncQueue_PushResponse(t *testing.T) {
}
for i := 0; i < int(blockRequestSize); i++ {
- testHeader := types.Header{
- Number: big.NewInt(int64(77 + i)),
- }
+ testHeader := types.NewEmptyHeader()
+ testHeader.Number = big.NewInt(int64(77 + i))
msg.BlockData = append(msg.BlockData, &types.BlockData{
- Header: testHeader.AsOptional(),
- Body: optional.NewBody(true, []byte{0}),
+ Header: testHeader,
+ Body: types.NewBody([]byte{0}),
})
}
@@ -141,45 +139,48 @@ func TestSortRequests_RemoveDuplicates(t *testing.T) {
}
func TestSortResponses(t *testing.T) {
- testHeader0 := types.Header{
+ testHeader0 := &types.Header{
Number: big.NewInt(77),
+ Digest: types.NewDigest(),
}
- testHeader1 := types.Header{
+ testHeader1 := &types.Header{
Number: big.NewInt(78),
+ Digest: types.NewDigest(),
}
- testHeader2 := types.Header{
+ testHeader2 := &types.Header{
Number: big.NewInt(79),
+ Digest: types.NewDigest(),
}
data := []*types.BlockData{
{
Hash: testHeader2.Hash(),
- Header: testHeader2.AsOptional(),
+ Header: testHeader2,
},
{
Hash: testHeader0.Hash(),
- Header: testHeader0.AsOptional(),
+ Header: testHeader0,
},
{
Hash: testHeader1.Hash(),
- Header: testHeader1.AsOptional(),
+ Header: testHeader1,
},
}
expected := []*types.BlockData{
{
Hash: testHeader0.Hash(),
- Header: testHeader0.AsOptional(),
+ Header: testHeader0,
},
{
Hash: testHeader1.Hash(),
- Header: testHeader1.AsOptional(),
+ Header: testHeader1,
},
{
Hash: testHeader2.Hash(),
- Header: testHeader2.AsOptional(),
+ Header: testHeader2,
},
}
@@ -188,30 +189,33 @@ func TestSortResponses(t *testing.T) {
}
func TestSortResponses_RemoveDuplicated(t *testing.T) {
- testHeader0 := types.Header{
+ testHeader0 := &types.Header{
Number: big.NewInt(77),
+ Digest: types.NewDigest(),
}
- testHeader1 := types.Header{
+ testHeader1 := &types.Header{
Number: big.NewInt(78),
+ Digest: types.NewDigest(),
}
- testHeader2 := types.Header{
+ testHeader2 := &types.Header{
Number: big.NewInt(79),
+ Digest: types.NewDigest(),
}
data := []*types.BlockData{
{
Hash: testHeader0.Hash(),
- Header: testHeader2.AsOptional(),
+ Header: testHeader2,
},
{
Hash: testHeader0.Hash(),
- Header: testHeader0.AsOptional(),
+ Header: testHeader0,
},
{
Hash: testHeader0.Hash(),
- Header: testHeader1.AsOptional(),
+ Header: testHeader1,
},
}
@@ -219,7 +223,7 @@ func TestSortResponses_RemoveDuplicated(t *testing.T) {
expected := []*types.BlockData{
{
Hash: testHeader0.Hash(),
- Header: testHeader0.AsOptional(),
+ Header: testHeader0,
},
}
@@ -361,17 +365,17 @@ func TestSyncQueue_handleResponseQueue_responseQueueAhead(t *testing.T) {
q.goal = int64(blockRequestSize) * 10
q.ctx = context.Background()
- testHeader0 := types.Header{
+ testHeader0 := &types.Header{
Number: big.NewInt(77),
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
}
q.responses = append(q.responses, &types.BlockData{
Hash: testHeader0.Hash(),
- Header: testHeader0.AsOptional(),
- Body: optional.NewBody(true, []byte{4, 4, 2}),
- Receipt: optional.NewBytes(false, nil),
- MessageQueue: optional.NewBytes(false, nil),
- Justification: optional.NewBytes(false, nil),
+ Header: testHeader0,
+ Body: types.NewBody([]byte{4, 4, 2}),
+ Receipt: nil,
+ MessageQueue: nil,
+ Justification: nil,
})
go q.handleResponseQueue()
@@ -387,19 +391,19 @@ func TestSyncQueue_processBlockResponses(t *testing.T) {
q.goal = int64(blockRequestSize) * 10
q.ctx = context.Background()
- testHeader0 := types.Header{
+ testHeader0 := &types.Header{
Number: big.NewInt(0),
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
}
go func() {
q.responseCh <- []*types.BlockData{
{
Hash: testHeader0.Hash(),
- Header: testHeader0.AsOptional(),
- Body: optional.NewBody(true, []byte{4, 4, 2}),
- Receipt: optional.NewBytes(false, nil),
- MessageQueue: optional.NewBytes(false, nil),
- Justification: optional.NewBytes(false, nil),
+ Header: testHeader0,
+ Body: types.NewBody([]byte{4, 4, 2}),
+ Receipt: nil,
+ MessageQueue: nil,
+ Justification: nil,
},
}
}()
diff --git a/dot/network/test_helpers.go b/dot/network/test_helpers.go
index 3eb8ba85a1..cf0a31dca0 100644
--- a/dot/network/test_helpers.go
+++ b/dot/network/test_helpers.go
@@ -7,7 +7,6 @@ import (
"github.com/ChainSafe/gossamer/dot/types"
"github.com/ChainSafe/gossamer/lib/common"
- "github.com/ChainSafe/gossamer/lib/common/optional"
"github.com/ChainSafe/gossamer/lib/common/variadic"
"github.com/stretchr/testify/mock"
@@ -29,7 +28,7 @@ func NewMockBlockState(n *big.Int) *mockBlockState {
Number: n,
StateRoot: stateRoot,
ExtrinsicsRoot: extrinsicsRoot,
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
}
m := new(mockBlockState)
@@ -69,18 +68,18 @@ func testBlockResponseMessage() *BlockResponseMessage {
}
for i := 0; i < int(blockRequestSize); i++ {
- testHeader := types.Header{
+ testHeader := &types.Header{
Number: big.NewInt(int64(77 + i)),
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
}
msg.BlockData = append(msg.BlockData, &types.BlockData{
Hash: testHeader.Hash(),
- Header: testHeader.AsOptional(),
- Body: optional.NewBody(true, []byte{4, 4, 2}),
- MessageQueue: optional.NewBytes(false, nil),
- Receipt: optional.NewBytes(false, nil),
- Justification: optional.NewBytes(false, nil),
+ Header: testHeader,
+ Body: types.NewBody([]byte{4, 4, 2}),
+ MessageQueue: nil,
+ Receipt: nil,
+ Justification: nil,
})
}
@@ -168,14 +167,16 @@ func (s *testStreamHandler) readStream(stream libp2pnetwork.Stream, peer peer.ID
}
}
-var start, _ = variadic.NewUint64OrHash(uint64(1))
+var starting, _ = variadic.NewUint64OrHash(uint64(1))
+
+var one = uint32(1)
var testBlockRequestMessage = &BlockRequestMessage{
RequestedData: RequestedDataHeader + RequestedDataBody + RequestedDataJustification,
- StartingBlock: start,
- EndBlockHash: optional.NewHash(true, common.Hash{}),
+ StartingBlock: *starting,
+ EndBlockHash: &common.Hash{},
Direction: 1,
- Max: optional.NewUint32(true, 1),
+ Max: &one,
}
func testBlockRequestMessageDecoder(in []byte, _ peer.ID, _ bool) (Message, error) {
@@ -186,6 +187,7 @@ func testBlockRequestMessageDecoder(in []byte, _ peer.ID, _ bool) (Message, erro
var testBlockAnnounceMessage = &BlockAnnounceMessage{
Number: big.NewInt(128 * 7),
+ Digest: types.NewDigest(),
}
var testBlockAnnounceHandshake = &BlockAnnounceHandshake{
@@ -193,9 +195,12 @@ var testBlockAnnounceHandshake = &BlockAnnounceHandshake{
}
func testBlockAnnounceMessageDecoder(in []byte, _ peer.ID, _ bool) (Message, error) {
- msg := new(BlockAnnounceMessage)
+ msg := BlockAnnounceMessage{
+ Number: big.NewInt(0),
+ Digest: types.NewDigest(),
+ }
err := msg.Decode(in)
- return msg, err
+ return &msg, err
}
func testBlockAnnounceHandshakeDecoder(in []byte, _ peer.ID, _ bool) (Message, error) {
diff --git a/dot/network/transaction_test.go b/dot/network/transaction_test.go
index 3abda4c5c0..cebc19f606 100644
--- a/dot/network/transaction_test.go
+++ b/dot/network/transaction_test.go
@@ -38,19 +38,6 @@ func TestDecodeTransactionHandshake(t *testing.T) {
require.Equal(t, testHandshake, msg)
}
-func TestDecodeTransactionMessage(t *testing.T) {
- testTxMsg := &TransactionMessage{
- Extrinsics: []types.Extrinsic{{1, 1}, {2, 2}},
- }
-
- enc, err := testTxMsg.Encode()
- require.NoError(t, err)
-
- msg, err := decodeTransactionMessage(enc)
- require.NoError(t, err)
- require.Equal(t, testTxMsg, msg)
-}
-
func TestHandleTransactionMessage(t *testing.T) {
basePath := utils.NewTestBasePath(t, "nodeA")
mockhandler := &MockTransactionHandler{}
diff --git a/dot/node_test.go b/dot/node_test.go
index 16a0da970a..855c75f239 100644
--- a/dot/node_test.go
+++ b/dot/node_test.go
@@ -223,7 +223,7 @@ func TestInitNode_LoadGenesisData(t *testing.T) {
genTrie, err := genesis.NewTrieFromGenesis(gen)
require.NoError(t, err)
- genesisHeader, err := types.NewHeader(common.NewHash([]byte{0}), genTrie.MustHash(), trie.EmptyHash, big.NewInt(0), types.Digest{})
+ genesisHeader, err := types.NewHeader(common.NewHash([]byte{0}), genTrie.MustHash(), trie.EmptyHash, big.NewInt(0), types.NewDigest())
require.NoError(t, err)
err = stateSrvc.Initialise(gen, genesisHeader, genTrie)
@@ -254,7 +254,7 @@ func TestInitNode_LoadGenesisData(t *testing.T) {
require.NoError(t, err)
stateRoot := genesisHeader.StateRoot
- expectedHeader, err := types.NewHeader(common.NewHash([]byte{0}), stateRoot, trie.EmptyHash, big.NewInt(0), types.NewEmptyDigest())
+ expectedHeader, err := types.NewHeader(common.NewHash([]byte{0}), stateRoot, trie.EmptyHash, big.NewInt(0), types.NewDigest())
require.NoError(t, err)
require.Equal(t, expectedHeader.Hash(), genesisHeader.Hash())
}
diff --git a/dot/rpc/modules/chain.go b/dot/rpc/modules/chain.go
index f2a9cea94b..782a078d28 100644
--- a/dot/rpc/modules/chain.go
+++ b/dot/rpc/modules/chain.go
@@ -24,6 +24,7 @@ import (
"github.com/ChainSafe/gossamer/dot/types"
"github.com/ChainSafe/gossamer/lib/common"
+ "github.com/ChainSafe/gossamer/pkg/scale"
)
// ChainHashRequest Hash as a string
@@ -91,12 +92,12 @@ func (cm *ChainModule) GetBlock(r *http.Request, req *ChainHashRequest, res *Cha
return err
}
- res.Block.Header, err = HeaderToJSON(*block.Header)
+ res.Block.Header, err = HeaderToJSON(block.Header)
if err != nil {
return err
}
- if *block.Body != nil {
+ if block.Body != nil {
ext, err := block.Body.AsEncodedExtrinsics()
if err != nil {
return err
@@ -257,8 +258,9 @@ func HeaderToJSON(header types.Header) (ChainBlockHeaderResponse, error) {
} else {
res.Number = common.BytesToHex(header.Number.Bytes())
}
- for _, item := range header.Digest {
- enc, err := item.Encode()
+
+ for _, item := range header.Digest.Types {
+ enc, err := scale.Marshal(item)
if err != nil {
return ChainBlockHeaderResponse{}, err
}
diff --git a/dot/rpc/modules/chain_test.go b/dot/rpc/modules/chain_test.go
index 3551406e5f..73254f5849 100644
--- a/dot/rpc/modules/chain_test.go
+++ b/dot/rpc/modules/chain_test.go
@@ -27,6 +27,7 @@ import (
"github.com/ChainSafe/gossamer/lib/genesis"
"github.com/ChainSafe/gossamer/lib/runtime"
"github.com/ChainSafe/gossamer/lib/trie"
+ "github.com/ChainSafe/gossamer/pkg/scale"
database "github.com/ChainSafe/chaindb"
log "github.com/ChainSafe/log15"
@@ -40,7 +41,10 @@ func TestChainGetHeader_Genesis(t *testing.T) {
header, err := state.Block.BestBlockHeader()
require.NoError(t, err)
- d, err := types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest().Encode()
+ di := types.NewDigestItem()
+ di.Set(*types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest())
+
+ d, err := scale.Marshal(di)
require.NoError(t, err)
expected := &ChainBlockHeaderResponse{
@@ -70,7 +74,10 @@ func TestChainGetHeader_Latest(t *testing.T) {
header, err := state.Block.BestBlockHeader()
require.NoError(t, err)
- d, err := types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest().Encode()
+ di := types.NewDigestItem()
+ di.Set(*types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest())
+
+ d, err := scale.Marshal(di)
require.NoError(t, err)
expected := &ChainBlockHeaderResponse{
@@ -112,7 +119,10 @@ func TestChainGetBlock_Genesis(t *testing.T) {
header, err := state.Block.BestBlockHeader()
require.NoError(t, err)
- d, err := types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest().Encode()
+ di := types.NewDigestItem()
+ di.Set(*types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest())
+
+ d, err := scale.Marshal(di)
require.NoError(t, err)
expectedHeader := &ChainBlockHeaderResponse{
@@ -150,7 +160,10 @@ func TestChainGetBlock_Latest(t *testing.T) {
header, err := state.Block.BestBlockHeader()
require.NoError(t, err)
- d, err := types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest().Encode()
+ di := types.NewDigestItem()
+ di.Set(*types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest())
+
+ d, err := scale.Marshal(di)
require.NoError(t, err)
expectedHeader := &ChainBlockHeaderResponse{
@@ -288,11 +301,11 @@ func TestChainGetFinalizedHeadByRound(t *testing.T) {
expected := genesisHeader.Hash()
require.Equal(t, common.BytesToHex(expected[:]), res)
+ digest := types.NewDigest()
+ digest.Add(*types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest())
header := &types.Header{
Number: big.NewInt(1),
- Digest: types.Digest{
- types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest(),
- },
+ Digest: digest,
}
err = state.Block.SetHeader(header)
require.NoError(t, err)
@@ -328,7 +341,7 @@ func newTestStateService(t *testing.T) *state.Service {
rt, err := stateSrvc.CreateGenesisRuntime(genTrie, gen)
require.NoError(t, err)
- err = loadTestBlocks(genesisHeader.Hash(), stateSrvc.Block, rt)
+ err = loadTestBlocks(t, genesisHeader.Hash(), stateSrvc.Block, rt)
require.NoError(t, err)
t.Cleanup(func() {
@@ -337,11 +350,11 @@ func newTestStateService(t *testing.T) *state.Service {
return stateSrvc
}
-func loadTestBlocks(gh common.Hash, bs *state.BlockState, rt runtime.Instance) error {
+func loadTestBlocks(t *testing.T, gh common.Hash, bs *state.BlockState, rt runtime.Instance) error {
// Create header
header0 := &types.Header{
Number: big.NewInt(0),
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
ParentHash: gh,
StateRoot: trie.EmptyHash,
}
@@ -351,8 +364,8 @@ func loadTestBlocks(gh common.Hash, bs *state.BlockState, rt runtime.Instance) e
blockBody0 := types.Body{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}
block0 := &types.Block{
- Header: header0,
- Body: &blockBody0,
+ Header: *header0,
+ Body: blockBody0,
}
err := bs.AddBlock(block0)
@@ -363,11 +376,12 @@ func loadTestBlocks(gh common.Hash, bs *state.BlockState, rt runtime.Instance) e
bs.StoreRuntime(block0.Header.Hash(), rt)
// Create header & blockData for block 1
+ digest := types.NewDigest()
+ err = digest.Add(*types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest())
+ require.NoError(t, err)
header1 := &types.Header{
- Number: big.NewInt(1),
- Digest: types.Digest{
- types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest(),
- },
+ Number: big.NewInt(1),
+ Digest: digest,
ParentHash: blockHash0,
StateRoot: trie.EmptyHash,
}
@@ -376,8 +390,8 @@ func loadTestBlocks(gh common.Hash, bs *state.BlockState, rt runtime.Instance) e
blockBody1 := types.Body{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}
block1 := &types.Block{
- Header: header1,
- Body: &blockBody1,
+ Header: *header1,
+ Body: blockBody1,
}
// Add the block1 to the DB
diff --git a/dot/rpc/modules/dev_test.go b/dot/rpc/modules/dev_test.go
index 50a1768487..06fc9ee416 100644
--- a/dot/rpc/modules/dev_test.go
+++ b/dot/rpc/modules/dev_test.go
@@ -24,7 +24,7 @@ var genesisBABEConfig = &types.BabeConfiguration{
EpochLength: 200,
C1: 1,
C2: 4,
- GenesisAuthorities: []*types.AuthorityRaw{},
+ GenesisAuthorities: []types.AuthorityRaw{},
Randomness: [32]byte{},
SecondarySlots: 0,
}
diff --git a/dot/rpc/modules/grandpa_test.go b/dot/rpc/modules/grandpa_test.go
index b434e704eb..4768ec7f88 100644
--- a/dot/rpc/modules/grandpa_test.go
+++ b/dot/rpc/modules/grandpa_test.go
@@ -69,7 +69,7 @@ func TestRoundState(t *testing.T) {
var voters grandpa.Voters
for _, k := range kr.Keys {
- voters = append(voters, &types.GrandpaVoter{
+ voters = append(voters, types.GrandpaVoter{
Key: k.Public().(*ed25519.PublicKey),
ID: 1,
})
diff --git a/dot/rpc/modules/state_test.go b/dot/rpc/modules/state_test.go
index 19a844d80e..c5cf4b169f 100644
--- a/dot/rpc/modules/state_test.go
+++ b/dot/rpc/modules/state_test.go
@@ -493,12 +493,12 @@ func setupStateModule(t *testing.T) (*StateModule, *common.Hash, *common.Hash) {
require.NoError(t, err)
b := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: chain.Block.BestBlockHash(),
Number: big.NewInt(2),
StateRoot: sr1,
},
- Body: types.NewBody([]byte{}),
+ Body: *types.NewBody([]byte{}),
}
err = chain.Block.AddBlock(b)
diff --git a/dot/rpc/modules/system_test.go b/dot/rpc/modules/system_test.go
index afd04255f3..efcd0c7318 100644
--- a/dot/rpc/modules/system_test.go
+++ b/dot/rpc/modules/system_test.go
@@ -307,12 +307,12 @@ func setupSystemModule(t *testing.T) *SystemModule {
err = chain.Storage.StoreTrie(ts, nil)
require.NoError(t, err)
err = chain.Block.AddBlock(&types.Block{
- Header: &types.Header{
+ Header: types.Header{
Number: big.NewInt(1),
ParentHash: chain.Block.BestBlockHash(),
StateRoot: ts.MustRoot(),
},
- Body: &types.Body{},
+ Body: types.Body{},
})
require.NoError(t, err)
diff --git a/dot/rpc/subscription/listeners.go b/dot/rpc/subscription/listeners.go
index ddc18a3c98..7528f71630 100644
--- a/dot/rpc/subscription/listeners.go
+++ b/dot/rpc/subscription/listeners.go
@@ -144,7 +144,7 @@ func (l *BlockListener) Listen() {
if block == nil {
continue
}
- head, err := modules.HeaderToJSON(*block.Header)
+ head, err := modules.HeaderToJSON(block.Header)
if err != nil {
logger.Error("failed to convert header to JSON", "error", err)
}
@@ -192,10 +192,10 @@ func (l *BlockFinalizedListener) Listen() {
return
}
- if info == nil || info.Header == nil {
+ if info == nil {
continue
}
- head, err := modules.HeaderToJSON(*info.Header)
+ head, err := modules.HeaderToJSON(info.Header)
if err != nil {
logger.Error("failed to convert header to JSON", "error", err)
}
@@ -260,11 +260,11 @@ func (l *AllBlocksListener) Listen() {
return
}
- if fin == nil || fin.Header == nil {
+ if fin == nil {
continue
}
- finHead, err := modules.HeaderToJSON(*fin.Header)
+ finHead, err := modules.HeaderToJSON(fin.Header)
if err != nil {
logger.Error("failed to convert finalised block header to JSON", "error", err)
continue
@@ -277,11 +277,11 @@ func (l *AllBlocksListener) Listen() {
return
}
- if imp == nil || imp.Header == nil {
+ if imp == nil {
continue
}
- impHead, err := modules.HeaderToJSON(*imp.Header)
+ impHead, err := modules.HeaderToJSON(imp.Header)
if err != nil {
logger.Error("failed to convert imported block header to JSON", "error", err)
continue
diff --git a/dot/rpc/subscription/listeners_test.go b/dot/rpc/subscription/listeners_test.go
index 6a4c612ad2..589b044751 100644
--- a/dot/rpc/subscription/listeners_test.go
+++ b/dot/rpc/subscription/listeners_test.go
@@ -110,7 +110,8 @@ func TestBlockListener_Listen(t *testing.T) {
cancelTimeout: time.Second * 5,
}
- block := types.NewEmptyBlock()
+ //block := types.NewEmptyBlock()
+ block := types.NewBlock(*types.NewEmptyHeader(), *new(types.Body))
block.Header.Number = big.NewInt(1)
go bl.Listen()
@@ -120,13 +121,13 @@ func TestBlockListener_Listen(t *testing.T) {
BlockAPI.AssertCalled(t, "UnregisterImportedChannel", mock.AnythingOfType("uint8"))
}()
- notifyChan <- block
+ notifyChan <- &block
time.Sleep(time.Second * 2)
_, msg, err := ws.ReadMessage()
require.NoError(t, err)
- head, err := modules.HeaderToJSON(*block.Header)
+ head, err := modules.HeaderToJSON(block.Header)
require.NoError(t, err)
expectedResposnse := newSubcriptionBaseResponseJSON()
@@ -167,7 +168,7 @@ func TestBlockFinalizedListener_Listen(t *testing.T) {
}()
notifyChan <- &types.FinalisationInfo{
- Header: header,
+ Header: *header,
}
time.Sleep(time.Second * 2)
@@ -216,8 +217,8 @@ func TestExtrinsicSubmitListener_Listen(t *testing.T) {
require.NoError(t, err)
block := &types.Block{
- Header: header,
- Body: body,
+ Header: *header,
+ Body: *body,
}
esl.Listen()
@@ -240,7 +241,7 @@ func TestExtrinsicSubmitListener_Listen(t *testing.T) {
require.Equal(t, string(expectedImportedBytes)+"\n", string(msg))
notifyFinalizedChan <- &types.FinalisationInfo{
- Header: header,
+ Header: *header,
}
time.Sleep(time.Second * 2)
@@ -286,7 +287,7 @@ func TestGrandpaJustification_Listen(t *testing.T) {
sub.Listen()
finchannel <- &types.FinalisationInfo{
- Header: types.NewEmptyHeader(),
+ Header: *types.NewEmptyHeader(),
}
time.Sleep(time.Second * 3)
diff --git a/dot/rpc/subscription/websocket_test.go b/dot/rpc/subscription/websocket_test.go
index cac0e9db0a..0f87143adc 100644
--- a/dot/rpc/subscription/websocket_test.go
+++ b/dot/rpc/subscription/websocket_test.go
@@ -258,7 +258,7 @@ func TestWSConn_HandleComm(t *testing.T) {
}
fCh <- &types.FinalisationInfo{
- Header: header,
+ Header: *header,
}
time.Sleep(time.Second * 2)
@@ -347,13 +347,16 @@ func TestSubscribeAllHeads(t *testing.T) {
common.EmptyHash,
)
+ digest := types.NewDigest()
+ err = digest.Add(*types.NewBABEPreRuntimeDigest([]byte{0xff}))
+ require.NoError(t, err)
fCh <- &types.FinalisationInfo{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: common.EmptyHash,
Number: big.NewInt(0),
StateRoot: common.EmptyHash,
ExtrinsicsRoot: common.EmptyHash,
- Digest: types.NewDigest(types.NewBABEPreRuntimeDigest([]byte{0xff})),
+ Digest: digest,
},
}
@@ -362,13 +365,17 @@ func TestSubscribeAllHeads(t *testing.T) {
require.NoError(t, err)
require.Equal(t, expected+"\n", string(msg))
+ digest = types.NewDigest()
+ err = digest.Add(*types.NewBABEPreRuntimeDigest([]byte{0xff}))
+ require.NoError(t, err)
+
iCh <- &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: common.EmptyHash,
Number: big.NewInt(0),
StateRoot: common.EmptyHash,
ExtrinsicsRoot: common.EmptyHash,
- Digest: types.NewDigest(types.NewBABEPreRuntimeDigest([]byte{0xff})),
+ Digest: digest,
},
}
time.Sleep(time.Millisecond * 500)
diff --git a/dot/state/block.go b/dot/state/block.go
index 26a5114d5c..1be58b1cb8 100644
--- a/dot/state/block.go
+++ b/dot/state/block.go
@@ -22,7 +22,6 @@ import (
"errors"
"fmt"
"math/big"
- "reflect"
"sync"
"time"
@@ -31,6 +30,8 @@ import (
"github.com/ChainSafe/gossamer/lib/blocktree"
"github.com/ChainSafe/gossamer/lib/common"
"github.com/ChainSafe/gossamer/lib/runtime"
+ "github.com/ChainSafe/gossamer/pkg/scale"
+
rtstorage "github.com/ChainSafe/gossamer/lib/runtime/storage"
"github.com/ChainSafe/gossamer/lib/runtime/wasmer"
)
@@ -236,7 +237,7 @@ func (bs *BlockState) HasHeader(hash common.Hash) (bool, error) {
// GetHeader returns a BlockHeader for a given hash
func (bs *BlockState) GetHeader(hash common.Hash) (*types.Header, error) {
- result := new(types.Header)
+ result := types.NewEmptyHeader()
if bs.db == nil {
return nil, fmt.Errorf("database is nil")
@@ -251,18 +252,12 @@ func (bs *BlockState) GetHeader(hash common.Hash) (*types.Header, error) {
return nil, err
}
- rw := &bytes.Buffer{}
- _, err = rw.Write(data)
- if err != nil {
- return nil, err
- }
-
- _, err = result.Decode(rw)
+ err = scale.Unmarshal(data, result)
if err != nil {
return nil, err
}
- if reflect.DeepEqual(result, new(types.Header)) {
+ if result.Empty() {
return nil, chaindb.ErrKeyNotFound
}
@@ -302,7 +297,7 @@ func (bs *BlockState) GetBlockByHash(hash common.Hash) (*types.Block, error) {
if err != nil {
return nil, err
}
- return &types.Block{Header: header, Body: blockBody}, nil
+ return &types.Block{Header: *header, Body: *blockBody}, nil
}
// GetBlockByNumber returns a block for a given blockNumber
@@ -336,9 +331,8 @@ func (bs *BlockState) GetBlockHash(blockNumber *big.Int) (common.Hash, error) {
// SetHeader will set the header into DB
func (bs *BlockState) SetHeader(header *types.Header) error {
hash := header.Hash()
-
// Write the encoded header
- bh, err := header.Encode()
+ bh, err := scale.Marshal(*header)
if err != nil {
return err
}
@@ -374,16 +368,16 @@ func (bs *BlockState) SetBlockBody(hash common.Hash, body *types.Body) error {
// CompareAndSetBlockData will compare empty fields and set all elements in a block data to db
func (bs *BlockState) CompareAndSetBlockData(bd *types.BlockData) error {
hasReceipt, _ := bs.HasReceipt(bd.Hash)
- if bd.Receipt != nil && bd.Receipt.Exists() && !hasReceipt {
- err := bs.SetReceipt(bd.Hash, bd.Receipt.Value())
+ if bd.Receipt != nil && !hasReceipt {
+ err := bs.SetReceipt(bd.Hash, *bd.Receipt)
if err != nil {
return err
}
}
hasMessageQueue, _ := bs.HasMessageQueue(bd.Hash)
- if bd.MessageQueue != nil && bd.MessageQueue.Exists() && !hasMessageQueue {
- err := bs.SetMessageQueue(bd.Hash, bd.MessageQueue.Value())
+ if bd.MessageQueue != nil && !hasMessageQueue {
+ err := bs.SetMessageQueue(bd.Hash, *bd.MessageQueue)
if err != nil {
return err
}
@@ -409,13 +403,13 @@ func (bs *BlockState) AddBlockWithArrivalTime(block *types.Block, arrivalTime ti
prevHead := bs.bt.DeepestBlockHash()
// add block to blocktree
- err = bs.bt.AddBlock(block.Header, uint64(arrivalTime.UnixNano()))
+ err = bs.bt.AddBlock(&block.Header, uint64(arrivalTime.UnixNano()))
if err != nil {
return err
}
// add the header to the DB
- err = bs.SetHeader(block.Header)
+ err = bs.SetHeader(&block.Header)
if err != nil {
return err
}
@@ -431,14 +425,14 @@ func (bs *BlockState) AddBlockWithArrivalTime(block *types.Block, arrivalTime ti
// only set number->hash mapping for our current chain
var onChain bool
- if onChain, err = bs.isBlockOnCurrentChain(block.Header); onChain && err == nil {
+ if onChain, err = bs.isBlockOnCurrentChain(&block.Header); onChain && err == nil {
err = bs.db.Put(headerHashKey(block.Header.Number.Uint64()), hash.ToBytes())
if err != nil {
return err
}
}
- err = bs.SetBlockBody(block.Header.Hash(), types.NewBody(block.Body.AsOptional().Value()))
+ err = bs.SetBlockBody(block.Header.Hash(), types.NewBody(block.Body))
if err != nil {
return err
}
diff --git a/dot/state/block_data_test.go b/dot/state/block_data_test.go
index 6a8109aa16..73fc146374 100644
--- a/dot/state/block_data_test.go
+++ b/dot/state/block_data_test.go
@@ -22,7 +22,6 @@ import (
"github.com/ChainSafe/gossamer/dot/types"
"github.com/ChainSafe/gossamer/lib/common"
- "github.com/ChainSafe/gossamer/lib/common/optional"
"github.com/ChainSafe/gossamer/lib/trie"
"github.com/stretchr/testify/require"
@@ -35,10 +34,11 @@ func TestGetSet_ReceiptMessageQueue_Justification(t *testing.T) {
var genesisHeader = &types.Header{
Number: big.NewInt(0),
StateRoot: trie.EmptyHash,
+ Digest: types.NewDigest(),
}
hash := common.NewHash([]byte{0})
- body := optional.CoreBody{0xa, 0xb, 0xc, 0xd}
+ body := types.NewBody([]byte{0xa, 0xb, 0xc, 0xd})
parentHash := genesisHeader.Hash()
@@ -53,23 +53,26 @@ func TestGetSet_ReceiptMessageQueue_Justification(t *testing.T) {
Number: big.NewInt(1),
StateRoot: stateRoot,
ExtrinsicsRoot: extrinsicsRoot,
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
}
+ a := []byte("asdf")
+ b := []byte("ghjkl")
+ c := []byte("qwerty")
bds := []*types.BlockData{{
Hash: header.Hash(),
- Header: header.AsOptional(),
- Body: types.NewBody([]byte{}).AsOptional(),
- Receipt: optional.NewBytes(false, nil),
- MessageQueue: optional.NewBytes(false, nil),
- Justification: optional.NewBytes(false, nil),
+ Header: header,
+ Body: types.NewBody([]byte{}),
+ Receipt: nil,
+ MessageQueue: nil,
+ Justification: nil,
}, {
Hash: hash,
- Header: optional.NewHeader(false, nil),
- Body: optional.NewBody(true, body),
- Receipt: optional.NewBytes(true, []byte("asdf")),
- MessageQueue: optional.NewBytes(true, []byte("ghjkl")),
- Justification: optional.NewBytes(true, []byte("qwerty")),
+ Header: nil,
+ Body: body,
+ Receipt: &a,
+ MessageQueue: &b,
+ Justification: &c,
}}
for _, blockdata := range bds {
@@ -78,17 +81,17 @@ func TestGetSet_ReceiptMessageQueue_Justification(t *testing.T) {
require.Nil(t, err)
// test Receipt
- if blockdata.Receipt.Exists() {
+ if blockdata.Receipt != nil {
receipt, err := s.GetReceipt(blockdata.Hash)
require.Nil(t, err)
- require.Equal(t, blockdata.Receipt.Value(), receipt)
+ require.Equal(t, *blockdata.Receipt, receipt)
}
// test MessageQueue
- if blockdata.MessageQueue.Exists() {
+ if blockdata.MessageQueue != nil {
messageQueue, err := s.GetMessageQueue(blockdata.Hash)
require.Nil(t, err)
- require.Equal(t, blockdata.MessageQueue.Value(), messageQueue)
+ require.Equal(t, *blockdata.MessageQueue, messageQueue)
}
}
}
diff --git a/dot/state/block_notify.go b/dot/state/block_notify.go
index fc9828d41b..34e674c200 100644
--- a/dot/state/block_notify.go
+++ b/dot/state/block_notify.go
@@ -123,7 +123,7 @@ func (bs *BlockState) notifyFinalized(hash common.Hash, round, setID uint64) {
logger.Debug("notifying finalised block chans...", "chans", bs.finalised)
info := &types.FinalisationInfo{
- Header: header,
+ Header: *header,
Round: round,
SetID: setID,
}
diff --git a/dot/state/block_race_test.go b/dot/state/block_race_test.go
index 5cfbd8afc9..567ea67743 100644
--- a/dot/state/block_race_test.go
+++ b/dot/state/block_race_test.go
@@ -49,7 +49,7 @@ func TestConcurrencySetHeader(t *testing.T) {
header := &types.Header{
Number: big.NewInt(0),
StateRoot: trie.EmptyHash,
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
}
err := bs.SetHeader(header)
diff --git a/dot/state/block_test.go b/dot/state/block_test.go
index 05e372aa35..e36714387e 100644
--- a/dot/state/block_test.go
+++ b/dot/state/block_test.go
@@ -32,6 +32,7 @@ import (
var testGenesisHeader = &types.Header{
Number: big.NewInt(0),
StateRoot: trie.EmptyHash,
+ Digest: types.NewDigest(),
}
func newTestBlockState(t *testing.T, header *types.Header) *BlockState {
@@ -53,7 +54,7 @@ func TestSetAndGetHeader(t *testing.T) {
header := &types.Header{
Number: big.NewInt(0),
StateRoot: trie.EmptyHash,
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
}
err := bs.SetHeader(header)
@@ -70,7 +71,7 @@ func TestHasHeader(t *testing.T) {
header := &types.Header{
Number: big.NewInt(0),
StateRoot: trie.EmptyHash,
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
}
err := bs.SetHeader(header)
@@ -87,12 +88,12 @@ func TestGetBlockByNumber(t *testing.T) {
blockHeader := &types.Header{
ParentHash: testGenesisHeader.Hash(),
Number: big.NewInt(1),
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
}
block := &types.Block{
- Header: blockHeader,
- Body: &types.Body{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
+ Header: *blockHeader,
+ Body: types.Body{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
}
// AddBlock also sets mapping [blockNumber : hash] in DB
@@ -110,7 +111,7 @@ func TestAddBlock(t *testing.T) {
// Create header
header0 := &types.Header{
Number: big.NewInt(0),
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
ParentHash: testGenesisHeader.Hash(),
}
// Create blockHash
@@ -119,8 +120,8 @@ func TestAddBlock(t *testing.T) {
blockBody0 := types.Body{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}
block0 := &types.Block{
- Header: header0,
- Body: &blockBody0,
+ Header: *header0,
+ Body: blockBody0,
}
// Add the block0 to the DB
@@ -130,7 +131,7 @@ func TestAddBlock(t *testing.T) {
// Create header & blockData for block 1
header1 := &types.Header{
Number: big.NewInt(1),
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
ParentHash: blockHash0,
}
blockHash1 := header1.Hash()
@@ -139,8 +140,8 @@ func TestAddBlock(t *testing.T) {
blockBody1 := types.Body{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}
block1 := &types.Block{
- Header: header1,
- Body: &blockBody1,
+ Header: *header1,
+ Body: blockBody1,
}
// Add the block1 to the DB
@@ -181,16 +182,19 @@ func TestGetSlotForBlock(t *testing.T) {
data := babeHeader.Encode()
preDigest := types.NewBABEPreRuntimeDigest(data)
+ digest := types.NewDigest()
+ err := digest.Add(*preDigest)
+ require.NoError(t, err)
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: testGenesisHeader.Hash(),
Number: big.NewInt(int64(1)),
- Digest: types.Digest{preDigest},
+ Digest: digest,
},
- Body: &types.Body{},
+ Body: types.Body{},
}
- err := bs.AddBlock(block)
+ err = bs.AddBlock(block)
require.NoError(t, err)
res, err := bs.GetSlotForBlock(block.Header.Hash())
@@ -249,11 +253,11 @@ func TestAddBlock_BlockNumberToHash(t *testing.T) {
}
newBlock := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: bestHash,
Number: big.NewInt(0).Add(bestHeader.Number, big.NewInt(1)),
},
- Body: &types.Body{},
+ Body: types.Body{},
}
err = bs.AddBlock(newBlock)
@@ -273,12 +277,13 @@ func TestFinalizedHash(t *testing.T) {
require.NoError(t, err)
require.Equal(t, testGenesisHeader.Hash(), h)
+ digest := types.NewDigest()
+ err = digest.Add(*types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest())
+ require.NoError(t, err)
header := &types.Header{
ParentHash: testGenesisHeader.Hash(),
Number: big.NewInt(1),
- Digest: types.Digest{
- types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest(),
- },
+ Digest: digest,
}
testhash := header.Hash()
@@ -286,8 +291,8 @@ func TestFinalizedHash(t *testing.T) {
require.NoError(t, err)
err = bs.AddBlock(&types.Block{
- Header: header,
- Body: &types.Body{},
+ Header: *header,
+ Body: types.Body{},
})
require.NoError(t, err)
@@ -380,13 +385,13 @@ func TestGetHashByNumber(t *testing.T) {
header := &types.Header{
Number: big.NewInt(1),
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
ParentHash: testGenesisHeader.Hash(),
}
block := &types.Block{
- Header: header,
- Body: &types.Body{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
+ Header: *header,
+ Body: types.Body{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
}
err = bs.AddBlock(block)
@@ -402,13 +407,13 @@ func TestAddBlock_WithReOrg(t *testing.T) {
header1a := &types.Header{
Number: big.NewInt(1),
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
ParentHash: testGenesisHeader.Hash(),
}
block1a := &types.Block{
- Header: header1a,
- Body: &types.Body{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
+ Header: *header1a,
+ Body: types.Body{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
}
err := bs.AddBlock(block1a)
@@ -420,14 +425,14 @@ func TestAddBlock_WithReOrg(t *testing.T) {
header1b := &types.Header{
Number: big.NewInt(1),
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
ParentHash: testGenesisHeader.Hash(),
ExtrinsicsRoot: common.Hash{99},
}
block1b := &types.Block{
- Header: header1b,
- Body: &types.Body{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
+ Header: *header1b,
+ Body: types.Body{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
}
err = bs.AddBlock(block1b)
@@ -440,14 +445,14 @@ func TestAddBlock_WithReOrg(t *testing.T) {
header2b := &types.Header{
Number: big.NewInt(2),
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
ParentHash: header1b.Hash(),
ExtrinsicsRoot: common.Hash{99},
}
block2b := &types.Block{
- Header: header2b,
- Body: &types.Body{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
+ Header: *header2b,
+ Body: types.Body{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
}
err = bs.AddBlock(block2b)
@@ -464,13 +469,13 @@ func TestAddBlock_WithReOrg(t *testing.T) {
header2a := &types.Header{
Number: big.NewInt(2),
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
ParentHash: header1a.Hash(),
}
block2a := &types.Block{
- Header: header2a,
- Body: &types.Body{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
+ Header: *header2a,
+ Body: types.Body{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
}
err = bs.AddBlock(block2a)
@@ -478,13 +483,13 @@ func TestAddBlock_WithReOrg(t *testing.T) {
header3a := &types.Header{
Number: big.NewInt(3),
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
ParentHash: header2a.Hash(),
}
block3a := &types.Block{
- Header: header3a,
- Body: &types.Body{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
+ Header: *header3a,
+ Body: types.Body{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
}
err = bs.AddBlock(block3a)
@@ -510,7 +515,7 @@ func TestAddBlockToBlockTree(t *testing.T) {
header := &types.Header{
Number: big.NewInt(1),
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
ParentHash: testGenesisHeader.Hash(),
}
@@ -532,19 +537,23 @@ func TestNumberIsFinalised(t *testing.T) {
require.NoError(t, err)
require.False(t, fin)
+ digest := types.NewDigest()
+ err = digest.Add(*types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest())
+ require.NoError(t, err)
+
+ digest2 := types.NewDigest()
+ err = digest2.Add(*types.NewBabeSecondaryPlainPreDigest(0, 100).ToPreRuntimeDigest())
+ require.NoError(t, err)
+
header1 := &types.Header{
- Number: big.NewInt(1),
- Digest: types.Digest{
- types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest(),
- },
+ Number: big.NewInt(1),
+ Digest: digest,
ParentHash: testGenesisHeader.Hash(),
}
header100 := &types.Header{
- Number: big.NewInt(100),
- Digest: types.Digest{
- types.NewBabeSecondaryPlainPreDigest(0, 100).ToPreRuntimeDigest(),
- },
+ Number: big.NewInt(100),
+ Digest: digest2,
ParentHash: testGenesisHeader.Hash(),
}
@@ -575,23 +584,26 @@ func TestSetFinalisedHash_setFirstSlotOnFinalisation(t *testing.T) {
bs := newTestBlockState(t, testGenesisHeader)
firstSlot := uint64(42069)
+ digest := types.NewDigest()
+ err := digest.Add(*types.NewBabeSecondaryPlainPreDigest(0, firstSlot).ToPreRuntimeDigest())
+ require.NoError(t, err)
+ digest2 := types.NewDigest()
+ err = digest2.Add(*types.NewBabeSecondaryPlainPreDigest(0, firstSlot+100).ToPreRuntimeDigest())
+ require.NoError(t, err)
+
header1 := &types.Header{
- Number: big.NewInt(1),
- Digest: types.Digest{
- types.NewBabeSecondaryPlainPreDigest(0, firstSlot).ToPreRuntimeDigest(),
- },
+ Number: big.NewInt(1),
+ Digest: digest,
ParentHash: testGenesisHeader.Hash(),
}
header100 := &types.Header{
- Number: big.NewInt(100),
- Digest: types.Digest{
- types.NewBabeSecondaryPlainPreDigest(0, firstSlot+100).ToPreRuntimeDigest(),
- },
+ Number: big.NewInt(100),
+ Digest: digest2,
ParentHash: testGenesisHeader.Hash(),
}
- err := bs.SetHeader(header1)
+ err = bs.SetHeader(header1)
require.NoError(t, err)
err = bs.db.Put(headerHashKey(header1.Number.Uint64()), header1.Hash().ToBytes())
require.NoError(t, err)
diff --git a/dot/state/epoch.go b/dot/state/epoch.go
index 0b13e6bf49..edbe6c3530 100644
--- a/dot/state/epoch.go
+++ b/dot/state/epoch.go
@@ -193,13 +193,12 @@ func (s *EpochState) GetEpochForBlock(header *types.Header) (uint64, error) {
return 0, err
}
- for _, d := range header.Digest {
- if d.Type() != types.PreRuntimeDigestType {
+ for _, d := range header.Digest.Types {
+ predigest, ok := d.Value().(types.PreRuntimeDigest)
+ if !ok {
continue
}
- predigest := d.(*types.PreRuntimeDigest)
-
r := &bytes.Buffer{}
_, _ = r.Write(predigest.Data)
digest, err := types.DecodeBabePreDigest(r)
diff --git a/dot/state/epoch_test.go b/dot/state/epoch_test.go
index 1ace4ceb37..a83bc43ca2 100644
--- a/dot/state/epoch_test.go
+++ b/dot/state/epoch_test.go
@@ -32,7 +32,7 @@ var genesisBABEConfig = &types.BabeConfiguration{
EpochLength: 200,
C1: 1,
C2: 4,
- GenesisAuthorities: []*types.AuthorityRaw{},
+ GenesisAuthorities: []types.AuthorityRaw{},
Randomness: [32]byte{},
SecondarySlots: 0,
}
@@ -70,13 +70,13 @@ func TestEpochState_EpochData(t *testing.T) {
keyring, err := keystore.NewSr25519Keyring()
require.NoError(t, err)
- auth := &types.Authority{
+ auth := types.Authority{
Key: keyring.Alice().Public().(*sr25519.PublicKey),
Weight: 1,
}
info := &types.EpochData{
- Authorities: []*types.Authority{auth},
+ Authorities: []types.Authority{auth},
Randomness: [32]byte{77},
}
@@ -151,10 +151,12 @@ func TestEpochState_GetEpochForBlock(t *testing.T) {
babeHeader := types.NewBabePrimaryPreDigest(0, s.epochLength+2, [32]byte{}, [64]byte{})
enc := babeHeader.Encode()
- digest := types.NewBABEPreRuntimeDigest(enc)
+ d := types.NewBABEPreRuntimeDigest(enc)
+ digest := types.NewDigest()
+ digest.Add(*d)
header := &types.Header{
- Digest: types.Digest{digest},
+ Digest: digest,
}
epoch, err := s.GetEpochForBlock(header)
@@ -163,10 +165,12 @@ func TestEpochState_GetEpochForBlock(t *testing.T) {
babeHeader = types.NewBabePrimaryPreDigest(0, s.epochLength*2+3, [32]byte{}, [64]byte{})
enc = babeHeader.Encode()
- digest = types.NewBABEPreRuntimeDigest(enc)
+ d = types.NewBABEPreRuntimeDigest(enc)
+ digest2 := types.NewDigest()
+ digest2.Add(*d)
header = &types.Header{
- Digest: types.Digest{digest},
+ Digest: digest2,
}
epoch, err = s.GetEpochForBlock(header)
diff --git a/dot/state/grandpa.go b/dot/state/grandpa.go
index af1585f538..818409c7ac 100644
--- a/dot/state/grandpa.go
+++ b/dot/state/grandpa.go
@@ -44,7 +44,7 @@ type GrandpaState struct {
}
// NewGrandpaStateFromGenesis returns a new GrandpaState given the grandpa genesis authorities
-func NewGrandpaStateFromGenesis(db chaindb.Database, genesisAuthorities []*types.GrandpaVoter) (*GrandpaState, error) {
+func NewGrandpaStateFromGenesis(db chaindb.Database, genesisAuthorities []types.GrandpaVoter) (*GrandpaState, error) {
grandpaDB := chaindb.NewTable(db, grandpaPrefix)
s := &GrandpaState{
db: grandpaDB,
@@ -89,7 +89,7 @@ func setIDChangeKey(setID uint64) []byte {
}
// setAuthorities sets the authorities for a given setID
-func (s *GrandpaState) setAuthorities(setID uint64, authorities []*types.GrandpaVoter) error {
+func (s *GrandpaState) setAuthorities(setID uint64, authorities []types.GrandpaVoter) error {
enc, err := scale.Encode(authorities)
if err != nil {
return err
@@ -99,7 +99,7 @@ func (s *GrandpaState) setAuthorities(setID uint64, authorities []*types.Grandpa
}
// GetAuthorities returns the authorities for the given setID
-func (s *GrandpaState) GetAuthorities(setID uint64) ([]*types.GrandpaVoter, error) {
+func (s *GrandpaState) GetAuthorities(setID uint64) ([]types.GrandpaVoter, error) {
enc, err := s.db.Get(authoritiesKey(setID))
if err != nil {
return nil, err
@@ -159,7 +159,7 @@ func (s *GrandpaState) GetLatestRound() (uint64, error) {
}
// SetNextChange sets the next authority change
-func (s *GrandpaState) SetNextChange(authorities []*types.GrandpaVoter, number *big.Int) error {
+func (s *GrandpaState) SetNextChange(authorities []types.GrandpaVoter, number *big.Int) error {
currSetID, err := s.GetCurrentSetID()
if err != nil {
return err
diff --git a/dot/state/grandpa_test.go b/dot/state/grandpa_test.go
index be89fbfeed..7231688295 100644
--- a/dot/state/grandpa_test.go
+++ b/dot/state/grandpa_test.go
@@ -29,7 +29,7 @@ import (
var (
kr, _ = keystore.NewEd25519Keyring()
- testAuths = []*types.GrandpaVoter{
+ testAuths = []types.GrandpaVoter{
{Key: kr.Alice().Public().(*ed25519.PublicKey), ID: 0},
}
)
@@ -57,7 +57,7 @@ func TestGrandpaState_SetNextChange(t *testing.T) {
gs, err := NewGrandpaStateFromGenesis(db, testAuths)
require.NoError(t, err)
- testAuths2 := []*types.GrandpaVoter{
+ testAuths2 := []types.GrandpaVoter{
{Key: kr.Bob().Public().(*ed25519.PublicKey), ID: 0},
}
@@ -91,7 +91,7 @@ func TestGrandpaState_GetSetIDByBlockNumber(t *testing.T) {
gs, err := NewGrandpaStateFromGenesis(db, testAuths)
require.NoError(t, err)
- testAuths2 := []*types.GrandpaVoter{
+ testAuths2 := []types.GrandpaVoter{
{Key: kr.Bob().Public().(*ed25519.PublicKey), ID: 0},
}
diff --git a/dot/state/initialize.go b/dot/state/initialize.go
index c0af7de590..79f3483e4a 100644
--- a/dot/state/initialize.go
+++ b/dot/state/initialize.go
@@ -141,10 +141,10 @@ func (s *Service) loadBabeConfigurationFromRuntime(r runtime.Instance) (*types.B
return babeCfg, nil
}
-func loadGrandpaAuthorities(t *trie.Trie) ([]*types.GrandpaVoter, error) {
+func loadGrandpaAuthorities(t *trie.Trie) ([]types.GrandpaVoter, error) {
authsRaw := t.Get(runtime.GrandpaAuthoritiesKey)
if authsRaw == nil {
- return []*types.GrandpaVoter{}, nil
+ return []types.GrandpaVoter{}, nil
}
r := &bytes.Buffer{}
diff --git a/dot/state/service.go b/dot/state/service.go
index c4ed84029a..7aa180cc47 100644
--- a/dot/state/service.go
+++ b/dot/state/service.go
@@ -216,7 +216,7 @@ func (s *Service) Rewind(toBlock int64) error {
return err
}
- s.Block.bt = blocktree.NewBlockTreeFromRoot(root.Header, s.db)
+ s.Block.bt = blocktree.NewBlockTreeFromRoot(&root.Header, s.db)
newHead := s.Block.BestBlockHash()
header, _ := s.Block.BestBlockHeader()
diff --git a/dot/state/service_test.go b/dot/state/service_test.go
index 8f06b3f781..b7c995e293 100644
--- a/dot/state/service_test.go
+++ b/dot/state/service_test.go
@@ -84,7 +84,7 @@ func TestService_Initialise(t *testing.T) {
err := state.Initialise(genData, genesisHeader, genTrie)
require.NoError(t, err)
- genesisHeader, err = types.NewHeader(common.NewHash([]byte{77}), genTrie.MustHash(), trie.EmptyHash, big.NewInt(0), types.Digest{})
+ genesisHeader, err = types.NewHeader(common.NewHash([]byte{77}), genTrie.MustHash(), trie.EmptyHash, big.NewInt(0), types.NewDigest())
require.NoError(t, err)
err = state.Initialise(genData, genesisHeader, genTrie)
@@ -180,7 +180,7 @@ func TestService_StorageTriePruning(t *testing.T) {
err = serv.Storage.blockState.AddBlock(block)
require.NoError(t, err)
- err = serv.Storage.StoreTrie(trieState, block.Header)
+ err = serv.Storage.StoreTrie(trieState, &block.Header)
require.NoError(t, err)
blocks = append(blocks, block)
@@ -225,9 +225,9 @@ func TestService_PruneStorage(t *testing.T) {
var toFinalize common.Hash
for i := 0; i < 3; i++ {
block, trieState := generateBlockWithRandomTrie(t, serv, nil, int64(i+1))
- block.Header.Digest = types.Digest{
- types.NewBabeSecondaryPlainPreDigest(0, uint64(i+1)).ToPreRuntimeDigest(),
- }
+ digest := types.NewDigest()
+ digest.Add(*types.NewBabeSecondaryPlainPreDigest(0, uint64(i+1)).ToPreRuntimeDigest())
+ block.Header.Digest = digest
err = serv.Storage.blockState.AddBlock(block)
require.NoError(t, err)
@@ -359,10 +359,12 @@ func TestService_Import(t *testing.T) {
tr.Put([]byte(tc), []byte(tc))
}
+ digest := types.NewDigest()
+ digest.Add(*types.NewBabeSecondaryPlainPreDigest(0, 177).ToPreRuntimeDigest())
header := &types.Header{
Number: big.NewInt(77),
StateRoot: tr.MustHash(),
- Digest: types.Digest{types.NewBabeSecondaryPlainPreDigest(0, 177).ToPreRuntimeDigest()},
+ Digest: digest,
}
firstSlot := uint64(100)
diff --git a/dot/state/storage_test.go b/dot/state/storage_test.go
index 2c21c73213..b33cb88c4f 100644
--- a/dot/state/storage_test.go
+++ b/dot/state/storage_test.go
@@ -58,12 +58,12 @@ func TestStorage_GetStorageByBlockHash(t *testing.T) {
require.NoError(t, err)
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: testGenesisHeader.Hash(),
Number: big.NewInt(1),
StateRoot: root,
},
- Body: types.NewBody([]byte{}),
+ Body: *types.NewBody([]byte{}),
}
err = storage.blockState.AddBlock(block)
require.NoError(t, err)
diff --git a/dot/state/test_helpers.go b/dot/state/test_helpers.go
index bd6f936f5d..1ec2500a07 100644
--- a/dot/state/test_helpers.go
+++ b/dot/state/test_helpers.go
@@ -73,17 +73,20 @@ func AddBlocksToState(t *testing.T, blockState *BlockState, depth int) ([]*types
startNum := int(head.Number.Int64())
for i := startNum + 1; i <= depth; i++ {
d := types.NewBabePrimaryPreDigest(0, uint64(i), [32]byte{}, [64]byte{})
+ digest := types.NewDigest()
+ _ = digest.Add(*d.ToPreRuntimeDigest())
+
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: previousHash,
Number: big.NewInt(int64(i)),
StateRoot: trie.EmptyHash,
- Digest: types.Digest{d.ToPreRuntimeDigest()},
+ Digest: digest,
},
- Body: &types.Body{},
+ Body: types.Body{},
}
- currentChain = append(currentChain, block.Header)
+ currentChain = append(currentChain, &block.Header)
hash := block.Header.Hash()
err := blockState.AddBlockWithArrivalTime(block, arrivalTime)
@@ -108,21 +111,22 @@ func AddBlocksToState(t *testing.T, blockState *BlockState, depth int) ([]*types
previousHash = branch.hash
for i := branch.depth; i < depth; i++ {
+ digest := types.NewDigest()
+ _ = digest.Add(types.PreRuntimeDigest{
+ Data: []byte{byte(i)},
+ })
+
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: previousHash,
Number: big.NewInt(int64(i) + 1),
StateRoot: trie.EmptyHash,
- Digest: types.Digest{
- &types.PreRuntimeDigest{
- Data: []byte{byte(i)},
- },
- },
+ Digest: digest,
},
- Body: &types.Body{},
+ Body: types.Body{},
}
- branchChains = append(branchChains, block.Header)
+ branchChains = append(branchChains, &block.Header)
hash := block.Header.Hash()
err := blockState.AddBlockWithArrivalTime(block, arrivalTime)
@@ -153,12 +157,12 @@ func AddBlocksToStateWithFixedBranches(t *testing.T, blockState *BlockState, dep
startNum := int(head.Number.Int64())
for i := startNum + 1; i <= depth; i++ {
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: previousHash,
Number: big.NewInt(int64(i)),
StateRoot: trie.EmptyHash,
},
- Body: &types.Body{},
+ Body: types.Body{},
}
hash := block.Header.Hash()
@@ -187,18 +191,19 @@ func AddBlocksToStateWithFixedBranches(t *testing.T, blockState *BlockState, dep
previousHash = branch.hash
for i := branch.depth; i < depth; i++ {
+ digest := types.NewDigest()
+ _ = digest.Add(types.PreRuntimeDigest{
+ Data: []byte{byte(i), byte(j), r},
+ })
+
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: previousHash,
Number: big.NewInt(int64(i)),
StateRoot: trie.EmptyHash,
- Digest: types.Digest{
- &types.PreRuntimeDigest{
- Data: []byte{byte(i), byte(j), r},
- },
- },
+ Digest: digest,
},
- Body: &types.Body{},
+ Body: types.Body{},
}
hash := block.Header.Hash()
@@ -232,12 +237,12 @@ func generateBlockWithRandomTrie(t *testing.T, serv *Service, parent *common.Has
}
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: *parent,
Number: big.NewInt(bNum),
StateRoot: trieStateRoot,
},
- Body: types.NewBody([]byte{}),
+ Body: *types.NewBody([]byte{}),
}
return block, trieState
}
diff --git a/dot/sync/message.go b/dot/sync/message.go
index 9031301a5d..fe7d2268e7 100644
--- a/dot/sync/message.go
+++ b/dot/sync/message.go
@@ -23,7 +23,6 @@ import (
"github.com/ChainSafe/gossamer/dot/network"
"github.com/ChainSafe/gossamer/dot/types"
"github.com/ChainSafe/gossamer/lib/common"
- "github.com/ChainSafe/gossamer/lib/common/optional"
)
var maxResponseSize uint32 = 128 // maximum number of block datas to reply with in a BlockResponse message.
@@ -37,12 +36,8 @@ func (s *Service) CreateBlockResponse(blockRequest *network.BlockRequestMessage)
respSize uint32
)
- if blockRequest.StartingBlock == nil {
- return nil, ErrInvalidBlockRequest
- }
-
- if blockRequest.Max != nil && blockRequest.Max.Exists() {
- respSize = blockRequest.Max.Value()
+ if blockRequest.Max != nil {
+ respSize = *blockRequest.Max
if respSize > maxResponseSize {
respSize = maxResponseSize
}
@@ -61,7 +56,7 @@ func (s *Service) CreateBlockResponse(blockRequest *network.BlockRequestMessage)
return nil, err
}
- startHeader = block.Header
+ startHeader = &block.Header
startHash = block.Header.Hash()
case common.Hash:
startHash = startBlock
@@ -69,10 +64,12 @@ func (s *Service) CreateBlockResponse(blockRequest *network.BlockRequestMessage)
if err != nil {
return nil, err
}
+ default:
+ return nil, ErrInvalidBlockRequest
}
- if blockRequest.EndBlockHash != nil && blockRequest.EndBlockHash.Exists() {
- endHash = blockRequest.EndBlockHash.Value()
+ if blockRequest.EndBlockHash != nil {
+ endHash = *blockRequest.EndBlockHash
endHeader, err = s.blockState.GetHeader(endHash)
if err != nil {
return nil, err
@@ -92,7 +89,7 @@ func (s *Service) CreateBlockResponse(blockRequest *network.BlockRequestMessage)
if err != nil {
return nil, err
}
- endHeader = endBlock.Header
+ endHeader = &endBlock.Header
endHash = endHeader.Hash()
}
@@ -134,12 +131,7 @@ func (s *Service) getBlockData(num *big.Int, requestedData byte) (*types.BlockDa
}
blockData := &types.BlockData{
- Hash: hash,
- Header: optional.NewHeader(false, nil),
- Body: optional.NewBody(false, nil),
- Receipt: optional.NewBytes(false, nil),
- MessageQueue: optional.NewBytes(false, nil),
- Justification: optional.NewBytes(false, nil),
+ Hash: hash,
}
if requestedData == 0 {
@@ -149,35 +141,35 @@ func (s *Service) getBlockData(num *big.Int, requestedData byte) (*types.BlockDa
if (requestedData & network.RequestedDataHeader) == 1 {
retData, err := s.blockState.GetHeader(hash)
if err == nil && retData != nil {
- blockData.Header = retData.AsOptional()
+ blockData.Header = retData
}
}
if (requestedData&network.RequestedDataBody)>>1 == 1 {
retData, err := s.blockState.GetBlockBody(hash)
if err == nil && retData != nil {
- blockData.Body = retData.AsOptional()
+ blockData.Body = retData
}
}
if (requestedData&network.RequestedDataReceipt)>>2 == 1 {
retData, err := s.blockState.GetReceipt(hash)
if err == nil && retData != nil {
- blockData.Receipt = optional.NewBytes(true, retData)
+ blockData.Receipt = &retData
}
}
if (requestedData&network.RequestedDataMessageQueue)>>3 == 1 {
retData, err := s.blockState.GetMessageQueue(hash)
if err == nil && retData != nil {
- blockData.MessageQueue = optional.NewBytes(true, retData)
+ blockData.MessageQueue = &retData
}
}
if (requestedData&network.RequestedDataJustification)>>4 == 1 {
retData, err := s.blockState.GetJustification(hash)
if err == nil && retData != nil {
- blockData.Justification = optional.NewBytes(true, retData)
+ blockData.Justification = &retData
}
}
diff --git a/dot/sync/message_test.go b/dot/sync/message_test.go
index 0d38a43223..a2f37ca6b6 100644
--- a/dot/sync/message_test.go
+++ b/dot/sync/message_test.go
@@ -6,7 +6,6 @@ import (
"github.com/ChainSafe/gossamer/dot/network"
"github.com/ChainSafe/gossamer/dot/types"
- "github.com/ChainSafe/gossamer/lib/common"
"github.com/ChainSafe/gossamer/lib/common/optional"
"github.com/ChainSafe/gossamer/lib/common/variadic"
"github.com/ChainSafe/gossamer/lib/trie"
@@ -21,13 +20,13 @@ func addTestBlocksToState(t *testing.T, depth int, blockState BlockState) {
for i := 1; i <= depth; i++ {
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: previousHash,
Number: big.NewInt(int64(i)).Add(previousNum, big.NewInt(int64(i))),
StateRoot: trie.EmptyHash,
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
},
- Body: &types.Body{},
+ Body: types.Body{},
}
previousHash = block.Header.Hash()
@@ -46,10 +45,10 @@ func TestService_CreateBlockResponse_MaxSize(t *testing.T) {
req := &network.BlockRequestMessage{
RequestedData: 3,
- StartingBlock: start,
- EndBlockHash: optional.NewHash(false, common.Hash{}),
+ StartingBlock: *start,
+ EndBlockHash: nil,
Direction: 0,
- Max: optional.NewUint32(false, 0),
+ Max: nil,
}
resp, err := s.CreateBlockResponse(req)
@@ -58,12 +57,13 @@ func TestService_CreateBlockResponse_MaxSize(t *testing.T) {
require.Equal(t, big.NewInt(1), resp.BlockData[0].Number())
require.Equal(t, big.NewInt(128), resp.BlockData[127].Number())
+ max := maxResponseSize + 100
req = &network.BlockRequestMessage{
RequestedData: 3,
- StartingBlock: start,
- EndBlockHash: optional.NewHash(false, common.Hash{}),
+ StartingBlock: *start,
+ EndBlockHash: nil,
Direction: 0,
- Max: optional.NewUint32(true, maxResponseSize+100),
+ Max: &max,
}
resp, err = s.CreateBlockResponse(req)
@@ -85,10 +85,10 @@ func TestService_CreateBlockResponse_StartHash(t *testing.T) {
req := &network.BlockRequestMessage{
RequestedData: 3,
- StartingBlock: start,
- EndBlockHash: optional.NewHash(false, common.Hash{}),
+ StartingBlock: *start,
+ EndBlockHash: nil,
Direction: 0,
- Max: optional.NewUint32(false, 0),
+ Max: nil,
}
resp, err := s.CreateBlockResponse(req)
@@ -110,10 +110,10 @@ func TestService_CreateBlockResponse_Descending(t *testing.T) {
req := &network.BlockRequestMessage{
RequestedData: 3,
- StartingBlock: start,
- EndBlockHash: optional.NewHash(false, common.Hash{}),
+ StartingBlock: *start,
+ EndBlockHash: nil,
Direction: 1,
- Max: optional.NewUint32(false, 0),
+ Max: nil,
}
resp, err := s.CreateBlockResponse(req)
@@ -144,11 +144,14 @@ func TestService_CreateBlockResponse(t *testing.T) {
require.NoError(t, err)
// set receipt message and justification
+ a := []byte("asdf")
+ b := []byte("ghjkl")
+ c := []byte("qwerty")
bds = &types.BlockData{
Hash: bestHash,
- Receipt: optional.NewBytes(true, []byte("asdf")),
- MessageQueue: optional.NewBytes(true, []byte("ghjkl")),
- Justification: optional.NewBytes(true, []byte("qwerty")),
+ Receipt: &a,
+ MessageQueue: &b,
+ Justification: &c,
}
endHash := s.blockState.BestBlockHash()
@@ -167,17 +170,17 @@ func TestService_CreateBlockResponse(t *testing.T) {
description: "test get Header and Body",
value: &network.BlockRequestMessage{
RequestedData: 3,
- StartingBlock: start,
- EndBlockHash: optional.NewHash(true, endHash),
+ StartingBlock: *start,
+ EndBlockHash: &endHash,
Direction: 0,
- Max: optional.NewUint32(false, 0),
+ Max: nil,
},
expectedMsgValue: &network.BlockResponseMessage{
BlockData: []*types.BlockData{
{
Hash: optional.NewHash(true, bestHash).Value(),
- Header: bestBlock.Header.AsOptional(),
- Body: bestBlock.Body.AsOptional(),
+ Header: &bestBlock.Header,
+ Body: &bestBlock.Body,
},
},
},
@@ -186,17 +189,17 @@ func TestService_CreateBlockResponse(t *testing.T) {
description: "test get Header",
value: &network.BlockRequestMessage{
RequestedData: 1,
- StartingBlock: start,
- EndBlockHash: optional.NewHash(true, endHash),
+ StartingBlock: *start,
+ EndBlockHash: &endHash,
Direction: 0,
- Max: optional.NewUint32(false, 0),
+ Max: nil,
},
expectedMsgValue: &network.BlockResponseMessage{
BlockData: []*types.BlockData{
{
Hash: optional.NewHash(true, bestHash).Value(),
- Header: bestBlock.Header.AsOptional(),
- Body: optional.NewBody(false, nil),
+ Header: &bestBlock.Header,
+ Body: nil,
},
},
},
@@ -205,17 +208,17 @@ func TestService_CreateBlockResponse(t *testing.T) {
description: "test get Receipt",
value: &network.BlockRequestMessage{
RequestedData: 4,
- StartingBlock: start,
- EndBlockHash: optional.NewHash(true, endHash),
+ StartingBlock: *start,
+ EndBlockHash: &endHash,
Direction: 0,
- Max: optional.NewUint32(false, 0),
+ Max: nil,
},
expectedMsgValue: &network.BlockResponseMessage{
BlockData: []*types.BlockData{
{
Hash: optional.NewHash(true, bestHash).Value(),
- Header: optional.NewHeader(false, nil),
- Body: optional.NewBody(false, nil),
+ Header: nil,
+ Body: nil,
Receipt: bds.Receipt,
},
},
@@ -225,17 +228,17 @@ func TestService_CreateBlockResponse(t *testing.T) {
description: "test get MessageQueue",
value: &network.BlockRequestMessage{
RequestedData: 8,
- StartingBlock: start,
- EndBlockHash: optional.NewHash(true, endHash),
+ StartingBlock: *start,
+ EndBlockHash: &endHash,
Direction: 0,
- Max: optional.NewUint32(false, 0),
+ Max: nil,
},
expectedMsgValue: &network.BlockResponseMessage{
BlockData: []*types.BlockData{
{
Hash: optional.NewHash(true, bestHash).Value(),
- Header: optional.NewHeader(false, nil),
- Body: optional.NewBody(false, nil),
+ Header: nil,
+ Body: nil,
MessageQueue: bds.MessageQueue,
},
},
diff --git a/dot/sync/syncer.go b/dot/sync/syncer.go
index 5c4de5135a..1123248272 100644
--- a/dot/sync/syncer.go
+++ b/dot/sync/syncer.go
@@ -144,9 +144,9 @@ func (s *Service) ProcessJustification(data []*types.BlockData) (int, error) {
return i, err
}
- if bd.Justification != nil && bd.Justification.Exists() {
+ if bd.Justification != nil {
logger.Debug("handling Justification...", "number", header.Number, "hash", bd.Hash)
- s.handleJustification(header, bd.Justification.Value())
+ s.handleJustification(header, *bd.Justification)
}
}
@@ -170,6 +170,7 @@ func (s *Service) ProcessBlockData(data []*types.BlockData) (int, error) {
hasHeader, _ := s.blockState.HasHeader(bd.Hash)
hasBody, _ := s.blockState.HasBlockBody(bd.Hash)
+
if hasHeader && hasBody {
// TODO: fix this; sometimes when the node shuts down the "best block" isn't stored properly,
// so when the node restarts it has blocks higher than what it thinks is the best, causing it not to sync
@@ -181,15 +182,15 @@ func (s *Service) ProcessBlockData(data []*types.BlockData) (int, error) {
return i, err
}
- err = s.blockState.AddBlockToBlockTree(block.Header)
+ err = s.blockState.AddBlockToBlockTree(&block.Header)
if err != nil && !errors.Is(err, blocktree.ErrBlockExists) {
logger.Warn("failed to add block to blocktree", "hash", bd.Hash, "error", err)
return i, err
}
- if bd.Justification != nil && bd.Justification.Exists() {
+ if bd.Justification != nil {
logger.Debug("handling Justification...", "number", block.Header.Number, "hash", bd.Hash)
- s.handleJustification(block.Header, bd.Justification.Value())
+ s.handleJustification(&block.Header, *bd.Justification)
}
// TODO: this is probably unnecessary, since the state is already in the database
@@ -210,11 +211,8 @@ func (s *Service) ProcessBlockData(data []*types.BlockData) (int, error) {
var header *types.Header
- if bd.Header.Exists() && !hasHeader {
- header, err = types.NewHeaderFromOptional(bd.Header)
- if err != nil {
- return i, err
- }
+ if bd.Header != nil && !hasHeader {
+ header = bd.Header
logger.Trace("processing header", "hash", header.Hash(), "number", header.Number)
@@ -226,11 +224,8 @@ func (s *Service) ProcessBlockData(data []*types.BlockData) (int, error) {
logger.Trace("header processed", "hash", bd.Hash)
}
- if bd.Body.Exists() && !hasBody {
- body, err := types.NewBodyFromOptional(bd.Body) //nolint
- if err != nil {
- return i, err
- }
+ if bd.Body != nil && !hasBody {
+ body := bd.Body //nolint
logger.Trace("processing body", "hash", bd.Hash)
@@ -242,20 +237,13 @@ func (s *Service) ProcessBlockData(data []*types.BlockData) (int, error) {
logger.Trace("body processed", "hash", bd.Hash)
}
- if bd.Header.Exists() && bd.Body.Exists() {
- header, err = types.NewHeaderFromOptional(bd.Header)
- if err != nil {
- return i, err
- }
-
- body, err := types.NewBodyFromOptional(bd.Body)
- if err != nil {
- return i, err
- }
+ if bd.Header != nil && bd.Body != nil {
+ header = bd.Header
+ body := bd.Body
block := &types.Block{
- Header: header,
- Body: body,
+ Header: *header,
+ Body: *body,
}
logger.Debug("processing block", "hash", bd.Hash)
@@ -269,9 +257,9 @@ func (s *Service) ProcessBlockData(data []*types.BlockData) (int, error) {
logger.Debug("block processed", "hash", bd.Hash)
}
- if bd.Justification != nil && bd.Justification.Exists() && header != nil {
+ if bd.Justification != nil && header != nil {
logger.Debug("handling Justification...", "number", bd.Number(), "hash", bd.Hash)
- s.handleJustification(header, bd.Justification.Value())
+ s.handleJustification(header, *bd.Justification)
}
}
@@ -306,7 +294,7 @@ func (s *Service) handleBody(body *types.Body) error {
// handleHeader handles blocks (header+body) included in BlockResponses
func (s *Service) handleBlock(block *types.Block) error {
- if block == nil || block.Header == nil || block.Body == nil {
+ if block == nil || block.Empty() || block.Header.Empty() {
return errors.New("block, header, or body is nil")
}
diff --git a/dot/sync/syncer_test.go b/dot/sync/syncer_test.go
index 8a668cd0f2..7747635bb3 100644
--- a/dot/sync/syncer_test.go
+++ b/dot/sync/syncer_test.go
@@ -26,10 +26,10 @@ import (
"github.com/ChainSafe/gossamer/dot/network"
"github.com/ChainSafe/gossamer/dot/state"
"github.com/ChainSafe/gossamer/dot/types"
- "github.com/ChainSafe/gossamer/lib/common/optional"
"github.com/ChainSafe/gossamer/lib/common/variadic"
"github.com/ChainSafe/gossamer/lib/runtime"
"github.com/ChainSafe/gossamer/lib/transaction"
+ "github.com/ChainSafe/gossamer/pkg/scale"
log "github.com/ChainSafe/log15"
"github.com/stretchr/testify/require"
@@ -68,7 +68,7 @@ func TestHandleBlockResponse(t *testing.T) {
block := BuildBlock(t, rt, parent, nil)
err = responder.blockState.AddBlock(block)
require.NoError(t, err)
- parent = block.Header
+ parent = &block.Header
}
startNum := 1
@@ -77,7 +77,7 @@ func TestHandleBlockResponse(t *testing.T) {
req := &network.BlockRequestMessage{
RequestedData: 3,
- StartingBlock: start,
+ StartingBlock: *start,
}
resp, err := responder.CreateBlockResponse(req)
@@ -108,7 +108,7 @@ func TestHandleBlockResponse_MissingBlocks(t *testing.T) {
block := BuildBlock(t, rt, parent, nil)
err = syncer.blockState.AddBlock(block)
require.NoError(t, err)
- parent = block.Header
+ parent = &block.Header
}
responder := NewTestSyncer(t, false)
@@ -123,7 +123,7 @@ func TestHandleBlockResponse_MissingBlocks(t *testing.T) {
block := BuildBlock(t, rt, parent, nil)
err = responder.blockState.AddBlock(block)
require.NoError(t, err)
- parent = block.Header
+ parent = &block.Header
}
startNum := 15
@@ -132,7 +132,7 @@ func TestHandleBlockResponse_MissingBlocks(t *testing.T) {
req := &network.BlockRequestMessage{
RequestedData: 3,
- StartingBlock: start,
+ StartingBlock: *start,
}
// resp contains blocks 16 + (16 + maxResponseSize)
@@ -162,7 +162,7 @@ func TestRemoveIncludedExtrinsics(t *testing.T) {
require.NoError(t, err)
bd := &types.BlockData{
- Body: body.AsOptional(),
+ Body: body,
}
msg := &network.BlockResponseMessage{
@@ -185,30 +185,50 @@ func TestHandleBlockResponse_NoBlockData(t *testing.T) {
func TestHandleBlockResponse_BlockData(t *testing.T) {
syncer := NewTestSyncer(t, false)
+ rt, err := syncer.blockState.GetRuntime(nil)
+ require.NoError(t, err)
+
parent, err := syncer.blockState.(*state.BlockState).BestBlockHeader()
require.NoError(t, err)
- rt, err := syncer.blockState.GetRuntime(nil)
+ _, err = scale.Marshal(*parent)
require.NoError(t, err)
block := BuildBlock(t, rt, parent, nil)
bd := []*types.BlockData{{
Hash: block.Header.Hash(),
- Header: block.Header.AsOptional(),
- Body: block.Body.AsOptional(),
+ Header: &block.Header,
+ Body: &block.Body,
Receipt: nil,
MessageQueue: nil,
Justification: nil,
}}
- msg := &network.BlockResponseMessage{
- BlockData: bd,
- }
- _, err = syncer.ProcessBlockData(msg.BlockData)
+ _, err = syncer.ProcessBlockData(bd)
require.Nil(t, err)
}
+func TestSyncer_ExecuteBlock_Prev(t *testing.T) {
+ syncer := NewTestSyncer(t, false)
+
+ parent, err := syncer.blockState.(*state.BlockState).BestBlockHeader()
+ require.NoError(t, err)
+
+ rt, err := syncer.blockState.GetRuntime(nil)
+ require.NoError(t, err)
+
+ block := BuildBlock(t, rt, parent, nil)
+
+ // reset parentState
+ parentState, err := syncer.storageState.TrieState(&parent.StateRoot)
+ require.NoError(t, err)
+ rt.SetContextStorage(parentState)
+
+ _, err = rt.ExecuteBlock(block)
+ require.NoError(t, err)
+}
+
func TestSyncer_ExecuteBlock(t *testing.T) {
syncer := NewTestSyncer(t, false)
@@ -233,17 +253,20 @@ func TestSyncer_HandleJustification(t *testing.T) {
syncer := NewTestSyncer(t, false)
d := types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest()
+ digest := types.NewDigest()
+ err := digest.Add(*d)
+ require.NoError(t, err)
header := &types.Header{
ParentHash: syncer.blockState.(*state.BlockState).GenesisHash(),
Number: big.NewInt(1),
- Digest: types.Digest{d},
+ Digest: digest,
}
just := []byte("testjustification")
- err := syncer.blockState.AddBlock(&types.Block{
- Header: header,
- Body: &types.Body{},
+ err = syncer.blockState.AddBlock(&types.Block{
+ Header: *header,
+ Body: types.Body{},
})
require.NoError(t, err)
@@ -264,9 +287,10 @@ func TestSyncer_ProcessJustification(t *testing.T) {
require.NoError(t, err)
block := BuildBlock(t, rt, parent, nil)
- block.Header.Digest = types.Digest{
- types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest(),
- }
+ digest := types.NewDigest()
+ err = digest.Add(*types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest())
+ require.NoError(t, err)
+ block.Header.Digest = digest
err = syncer.blockState.(*state.BlockState).AddBlock(block)
require.NoError(t, err)
@@ -276,7 +300,7 @@ func TestSyncer_ProcessJustification(t *testing.T) {
data := []*types.BlockData{
{
Hash: syncer.blockState.BestBlockHash(),
- Justification: optional.NewBytes(true, just),
+ Justification: &just,
},
}
diff --git a/dot/sync/test_helpers.go b/dot/sync/test_helpers.go
index ae8eacf7fc..7833b55343 100644
--- a/dot/sync/test_helpers.go
+++ b/dot/sync/test_helpers.go
@@ -140,22 +140,23 @@ func newTestGenesisWithTrieAndHeader(t *testing.T, usePolkadotGenesis bool) (*ge
genTrie, err := genesis.NewTrieFromGenesis(gen)
require.NoError(t, err)
- genesisHeader, err := types.NewHeader(common.NewHash([]byte{0}), genTrie.MustHash(), trie.EmptyHash, big.NewInt(0), types.Digest{})
+ genesisHeader, err := types.NewHeader(common.NewHash([]byte{0}), genTrie.MustHash(), trie.EmptyHash, big.NewInt(0), types.NewDigest())
require.NoError(t, err)
return gen, genTrie, genesisHeader
}
// BuildBlock ...
func BuildBlock(t *testing.T, instance runtime.Instance, parent *types.Header, ext types.Extrinsic) *types.Block {
+ digest := types.NewDigest()
+ err := digest.Add(*types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest())
+ require.NoError(t, err)
header := &types.Header{
ParentHash: parent.Hash(),
Number: big.NewInt(0).Add(parent.Number, big.NewInt(1)),
- Digest: types.Digest{
- types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest(),
- },
+ Digest: digest,
}
- err := instance.InitializeBlock(header)
+ err = instance.InitializeBlock(header)
require.NoError(t, err)
idata := types.NewInherentsData()
@@ -212,7 +213,7 @@ func BuildBlock(t *testing.T, instance runtime.Instance, parent *types.Header, e
res.Number = header.Number
return &types.Block{
- Header: res,
- Body: body,
+ Header: *res,
+ Body: *body,
}
}
diff --git a/dot/types/authority.go b/dot/types/authority.go
index 1b0170fc36..fb7c8894f7 100644
--- a/dot/types/authority.go
+++ b/dot/types/authority.go
@@ -125,10 +125,10 @@ func (a *AuthorityRaw) Decode(r io.Reader) (*AuthorityRaw, error) {
}
// AuthoritiesToRaw converts an array of Authority in an array of AuthorityRaw
-func AuthoritiesToRaw(auths []*Authority) []*AuthorityRaw {
- raw := make([]*AuthorityRaw, len(auths))
+func AuthoritiesToRaw(auths []Authority) []AuthorityRaw {
+ raw := make([]AuthorityRaw, len(auths))
for i, auth := range auths {
- raw[i] = auth.ToRaw()
+ raw[i] = *auth.ToRaw()
}
return raw
}
diff --git a/dot/types/babe.go b/dot/types/babe.go
index a3eaafc674..176b5e25bc 100644
--- a/dot/types/babe.go
+++ b/dot/types/babe.go
@@ -30,17 +30,17 @@ type BabeConfiguration struct {
EpochLength uint64 // duration of epoch in slots
C1 uint64 // (1-(c1/c2)) is the probability of a slot being empty
C2 uint64
- GenesisAuthorities []*AuthorityRaw
+ GenesisAuthorities []AuthorityRaw
Randomness [RandomnessLength]byte
SecondarySlots byte
}
// BABEAuthorityRawToAuthority turns a slice of BABE AuthorityRaw into a slice of Authority
-func BABEAuthorityRawToAuthority(adr []*AuthorityRaw) ([]*Authority, error) {
- ad := make([]*Authority, len(adr))
+func BABEAuthorityRawToAuthority(adr []AuthorityRaw) ([]Authority, error) {
+ ad := make([]Authority, len(adr))
for i, r := range adr {
- ad[i] = new(Authority)
- err := ad[i].FromRawSr25519(r)
+ ad[i] = Authority{}
+ err := ad[i].FromRawSr25519(&r) //nolint
if err != nil {
return nil, err
}
@@ -51,7 +51,7 @@ func BABEAuthorityRawToAuthority(adr []*AuthorityRaw) ([]*Authority, error) {
// EpochData is the data provided for a BABE epoch
type EpochData struct {
- Authorities []*Authority
+ Authorities []Authority
Randomness [RandomnessLength]byte
}
@@ -61,9 +61,9 @@ func (d *EpochData) ToEpochDataRaw() *EpochDataRaw {
Randomness: d.Randomness,
}
- rawAuths := make([]*AuthorityRaw, len(d.Authorities))
+ rawAuths := make([]AuthorityRaw, len(d.Authorities))
for i, auth := range d.Authorities {
- rawAuths[i] = auth.ToRaw()
+ rawAuths[i] = *auth.ToRaw()
}
raw.Authorities = rawAuths
@@ -72,7 +72,7 @@ func (d *EpochData) ToEpochDataRaw() *EpochDataRaw {
// EpochDataRaw is the data provided for an epoch, with Authority as AuthorityRaw
type EpochDataRaw struct {
- Authorities []*AuthorityRaw
+ Authorities []AuthorityRaw
Randomness [RandomnessLength]byte
}
@@ -100,12 +100,11 @@ type ConfigData struct {
// GetSlotFromHeader returns the BABE slot from the given header
func GetSlotFromHeader(header *Header) (uint64, error) {
- if len(header.Digest) == 0 {
+ if len(header.Digest.Types) == 0 {
return 0, fmt.Errorf("chain head missing digest")
}
- digestItem := header.Digest[0]
- preDigest, ok := digestItem.(*PreRuntimeDigest)
+ preDigest, ok := header.Digest.Types[0].Value().(PreRuntimeDigest)
if !ok {
return 0, fmt.Errorf("first digest item is not pre-digest")
}
diff --git a/dot/types/babe_test.go b/dot/types/babe_test.go
index df9701a789..8dc8cdcc39 100644
--- a/dot/types/babe_test.go
+++ b/dot/types/babe_test.go
@@ -55,13 +55,13 @@ func TestEpochData(t *testing.T) {
kr, err := keystore.NewSr25519Keyring()
require.NoError(t, err)
- auth := &Authority{
+ auth := Authority{
Key: kr.Alice().Public().(*sr25519.PublicKey),
Weight: 1,
}
data := &EpochData{
- Authorities: []*Authority{auth},
+ Authorities: []Authority{auth},
Randomness: [32]byte{77},
}
diff --git a/dot/types/block.go b/dot/types/block.go
index 7f77c50438..1a5e5a1cc5 100644
--- a/dot/types/block.go
+++ b/dot/types/block.go
@@ -17,42 +17,45 @@
package types
import (
- "io"
-
- "github.com/ChainSafe/gossamer/lib/scale"
+ "github.com/ChainSafe/gossamer/pkg/scale"
)
// Block defines a state block
type Block struct {
- Header *Header
- Body *Body
+ Header Header
+ Body Body
}
// NewBlock returns a new Block
-func NewBlock(header *Header, body *Body) *Block {
- return &Block{
+func NewBlock(header Header, body Body) Block {
+ return Block{
Header: header,
Body: body,
}
}
-// NewEmptyBlock returns a new Block with an initialised but empty Header and Body
-func NewEmptyBlock() *Block {
- return &Block{
- Header: new(Header),
- Body: new(Body),
+// NewEmptyBlock returns a new empty Block
+func NewEmptyBlock() Block {
+ return Block{
+ Header: *NewEmptyHeader(),
+ Body: *NewBody(nil),
}
}
+// Empty returns a boolean indicating is the Block is empty
+func (b *Block) Empty() bool {
+ return b.Header.Empty() && len(b.Body) == 0
+}
+
// Encode returns the SCALE encoding of a block
func (b *Block) Encode() ([]byte, error) {
- enc, err := scale.Encode(b.Header)
+ enc, err := scale.Marshal(b.Header)
if err != nil {
return nil, err
}
// block body is already SCALE encoded
- return append(enc, []byte(*b.Body)...), nil
+ return append(enc, []byte(b.Body)...), nil
}
// MustEncode returns the SCALE encoded block and panics if it fails to encode
@@ -64,19 +67,16 @@ func (b *Block) MustEncode() []byte {
return enc
}
-// Decode decodes the SCALE encoded input into this block
-func (b *Block) Decode(r io.Reader) error {
- sd := scale.Decoder{Reader: r}
- _, err := sd.Decode(b)
- return err
-}
-
// DeepCopy returns a copy of the block
-func (b *Block) DeepCopy() *Block {
- bc := make([]byte, len(*b.Body))
- copy(bc, *b.Body)
- return &Block{
- Header: b.Header.DeepCopy(),
- Body: NewBody(bc),
+func (b *Block) DeepCopy() (Block, error) {
+ bc := make([]byte, len(b.Body))
+ copy(bc, b.Body)
+ head, err := b.Header.DeepCopy()
+ if err != nil {
+ return Block{}, err
}
+ return Block{
+ Header: *head,
+ Body: *NewBody(bc),
+ }, nil
}
diff --git a/dot/types/block_data.go b/dot/types/block_data.go
index a85c67df71..50f9138c42 100644
--- a/dot/types/block_data.go
+++ b/dot/types/block_data.go
@@ -18,196 +18,58 @@ package types
import (
"fmt"
- "io"
"math/big"
"github.com/ChainSafe/gossamer/lib/common"
- "github.com/ChainSafe/gossamer/lib/common/optional"
- "github.com/ChainSafe/gossamer/lib/scale"
)
// BlockData is stored within the BlockDB
+// The BlockData fields are optionals and thus are represented as pointers to ensure correct encoding
type BlockData struct {
Hash common.Hash
- Header *optional.Header
- Body *optional.Body
- Receipt *optional.Bytes
- MessageQueue *optional.Bytes
- Justification *optional.Bytes
+ Header *Header
+ Body *Body
+ Receipt *[]byte
+ MessageQueue *[]byte
+ Justification *[]byte
+}
+
+// NewEmptyBlockData Creates an empty blockData struct
+func NewEmptyBlockData() *BlockData {
+ return &BlockData{}
}
// Number returns the BlockNumber of the BlockData's header, nil if it doesn't exist
func (bd *BlockData) Number() *big.Int {
- if bd == nil || bd.Header == nil || !bd.Header.Exists() {
+ if bd == nil || bd.Header == nil {
return nil
}
- return bd.Header.Value().Number
+ return bd.Header.Number
}
func (bd *BlockData) String() string {
str := fmt.Sprintf("Hash=%s ", bd.Hash)
- if bd.Header != nil && bd.Header.Exists() {
+ if bd.Header != nil {
str = str + fmt.Sprintf("Header=%s ", bd.Header)
}
- if bd.Body != nil && bd.Body.Exists() {
- str = str + fmt.Sprintf("Body=%s ", bd.Body)
+ if bd.Body != nil {
+ str = str + fmt.Sprintf("Body=%s ", *bd.Body)
}
- if bd.Receipt != nil && bd.Receipt.Exists() {
+ if bd.Receipt != nil {
str = str + fmt.Sprintf("Receipt=0x%x ", bd.Receipt)
}
- if bd.MessageQueue != nil && bd.MessageQueue.Exists() {
+ if bd.MessageQueue != nil {
str = str + fmt.Sprintf("MessageQueue=0x%x ", bd.MessageQueue)
}
- if bd.Justification != nil && bd.Justification.Exists() {
+ if bd.Justification != nil {
str = str + fmt.Sprintf("Justification=0x%x ", bd.Justification)
}
return str
}
-
-// Encode performs SCALE encoding of the BlockData
-func (bd *BlockData) Encode() ([]byte, error) {
- enc := bd.Hash[:]
-
- if bd.Header.Exists() {
- venc, err := scale.Encode(bd.Header.Value())
- if err != nil {
- return nil, err
- }
- enc = append(enc, byte(1)) // Some
- enc = append(enc, venc...)
- } else {
- enc = append(enc, byte(0)) // None
- }
-
- if bd.Body.Exists() {
- venc, err := scale.Encode(bd.Body.Value())
- if err != nil {
- return nil, err
- }
- enc = append(enc, byte(1)) // Some
- enc = append(enc, venc...)
- } else {
- enc = append(enc, byte(0)) // None
- }
-
- if bd.Receipt != nil && bd.Receipt.Exists() {
- venc, err := scale.Encode(bd.Receipt.Value())
- if err != nil {
- return nil, err
- }
- enc = append(enc, byte(1)) // Some
- enc = append(enc, venc...)
- } else {
- enc = append(enc, byte(0)) // None
- }
-
- if bd.MessageQueue != nil && bd.MessageQueue.Exists() {
- venc, err := scale.Encode(bd.MessageQueue.Value())
- if err != nil {
- return nil, err
- }
- enc = append(enc, byte(1)) // Some
- enc = append(enc, venc...)
- } else {
- enc = append(enc, byte(0)) // None
- }
-
- if bd.Justification != nil && bd.Justification.Exists() {
- venc, err := scale.Encode(bd.Justification.Value())
- if err != nil {
- return nil, err
- }
- enc = append(enc, byte(1)) // Some
- enc = append(enc, venc...)
- } else {
- enc = append(enc, byte(0)) // None
- }
-
- return enc, nil
-}
-
-// Decode decodes the SCALE encoded input to BlockData
-func (bd *BlockData) Decode(r io.Reader) error {
- hash, err := common.ReadHash(r)
- if err != nil {
- return err
- }
- bd.Hash = hash
-
- bd.Header, err = decodeOptionalHeader(r)
- if err != nil {
- return err
- }
-
- bd.Body, err = decodeOptionalBody(r)
- if err != nil {
- return err
- }
-
- bd.Receipt, err = decodeOptionalBytes(r)
- if err != nil {
- return err
- }
-
- bd.MessageQueue, err = decodeOptionalBytes(r)
- if err != nil {
- return err
- }
-
- bd.Justification, err = decodeOptionalBytes(r)
- if err != nil {
- return err
- }
-
- return nil
-}
-
-// EncodeBlockDataArray encodes an array of BlockData using SCALE
-func EncodeBlockDataArray(bds []*BlockData) ([]byte, error) {
- enc, err := scale.Encode(int32(len(bds)))
- if err != nil {
- return nil, err
- }
-
- for _, bd := range bds {
- benc, err := bd.Encode()
- if err != nil {
- return nil, err
- }
- enc = append(enc, benc...)
- }
-
- return enc, nil
-}
-
-// DecodeBlockDataArray decodes a SCALE encoded BlockData array
-func DecodeBlockDataArray(r io.Reader) ([]*BlockData, error) {
- sd := scale.Decoder{Reader: r}
-
- l, err := sd.Decode(int32(0))
- if err != nil {
- return nil, err
- }
-
- length := int(l.(int32))
- bds := make([]*BlockData, length)
-
- for i := 0; i < length; i++ {
- bd := new(BlockData)
- err = bd.Decode(r)
- if err != nil {
- return bds, err
- }
-
- bds[i] = bd
- }
-
- return bds, err
-}
diff --git a/dot/types/block_data_test.go b/dot/types/block_data_test.go
index 0d90b6ae86..bd4d0b6c8d 100644
--- a/dot/types/block_data_test.go
+++ b/dot/types/block_data_test.go
@@ -17,292 +17,177 @@
package types
import (
- "bytes"
"math/big"
- "reflect"
"testing"
"github.com/ChainSafe/gossamer/lib/common"
- "github.com/ChainSafe/gossamer/lib/common/optional"
-
+ "github.com/ChainSafe/gossamer/pkg/scale"
"github.com/stretchr/testify/require"
)
-var testDigest = &Digest{
- &PreRuntimeDigest{
+var (
+ digestItem = scale.MustNewVaryingDataType(ChangesTrieRootDigest{}, PreRuntimeDigest{}, ConsensusDigest{}, SealDigest{})
+ digest = scale.NewVaryingDataTypeSlice(digestItem)
+ testDigest = digest
+)
+var _ = testDigest.Add(
+ PreRuntimeDigest{
ConsensusEngineID: BabeEngineID,
Data: []byte{1, 2, 3},
},
- &SealDigest{
+ SealDigest{
ConsensusEngineID: BabeEngineID,
Data: []byte{4, 5, 6, 7},
},
-}
-
-func TestBlockDataEncodeEmpty(t *testing.T) {
- hash := common.NewHash([]byte{0})
+)
- bd := &BlockData{
- Hash: hash,
- Header: optional.NewHeader(false, nil),
- Body: optional.NewBody(false, nil),
- Receipt: optional.NewBytes(false, nil),
- MessageQueue: optional.NewBytes(false, nil),
- Justification: optional.NewBytes(false, nil),
- }
+func TestNumber(t *testing.T) {
+ testHash := common.NewHash([]byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf})
- expected := append([]byte{0}, hash[:]...)
- expected = append(expected, []byte{0, 0, 0, 0}...)
+ headerVdt, err := NewHeader(testHash, testHash, testHash, big.NewInt(5), testDigest)
+ require.NoError(t, err)
- enc, err := bd.Encode()
- if err != nil {
- t.Fatal(err)
+ bd := BlockData{
+ Hash: common.NewHash([]byte{0}),
+ Header: headerVdt,
+ Body: nil,
+ Receipt: nil,
+ MessageQueue: nil,
+ Justification: nil,
}
- if !bytes.Equal(expected, enc) {
- t.Fatalf("Fail: got %x expected %x", enc, expected)
- }
+ num := bd.Number()
+ require.Equal(t, big.NewInt(5), num)
}
-func TestBlockDataEncodeHeader(t *testing.T) {
- hash := common.NewHash([]byte{0})
- testHash := common.NewHash([]byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf})
-
- header := &optional.CoreHeader{
- ParentHash: testHash,
- Number: big.NewInt(1),
- StateRoot: testHash,
- ExtrinsicsRoot: testHash,
- Digest: testDigest,
- }
-
- bd := &BlockData{
- Hash: hash,
- Header: optional.NewHeader(true, header),
- Body: optional.NewBody(false, nil),
- Receipt: optional.NewBytes(false, nil),
- MessageQueue: optional.NewBytes(false, nil),
- Justification: optional.NewBytes(false, nil),
- }
-
- enc, err := bd.Encode()
+func TestBlockDataEncodeAndDecodeEmpty(t *testing.T) {
+ expected, err := common.HexToBytes("0x00000000000000000000000000000000000000000000000000000000000000000000000000")
require.NoError(t, err)
- r := &bytes.Buffer{}
- _, _ = r.Write(enc)
+ bd := BlockData{
+ Hash: common.NewHash([]byte{0}),
+ Header: nil,
+ Body: nil,
+ Receipt: nil,
+ MessageQueue: nil,
+ Justification: nil,
+ }
- res := new(BlockData)
- err = res.Decode(r)
+ enc, err := scale.Marshal(bd)
require.NoError(t, err)
- require.Equal(t, bd, res)
-}
-func TestBlockDataEncodeBody(t *testing.T) {
- hash := common.NewHash([]byte{0})
- body := optional.CoreBody{0xa, 0xb, 0xc, 0xd}
+ require.Equal(t, expected, enc)
- bd := &BlockData{
- Hash: hash,
- Header: optional.NewHeader(false, nil),
- Body: optional.NewBody(true, body),
- Receipt: optional.NewBytes(false, nil),
- MessageQueue: optional.NewBytes(false, nil),
- Justification: optional.NewBytes(false, nil),
- }
-
- expected, err := common.HexToBytes("0x00000000000000000000000000000000000000000000000000000000000000000001100a0b0c0d000000")
- if err != nil {
- t.Fatal(err)
+ var block BlockData
+ if bd.Header != nil {
+ block.Header = NewEmptyHeader()
}
-
- enc, err := bd.Encode()
- if err != nil {
- t.Fatal(err)
- }
-
- if !bytes.Equal(expected, enc) {
- t.Fatalf("Fail: got %x expected %x", enc, expected)
+ err = scale.Unmarshal(enc, &block)
+ require.NoError(t, err)
+ if block.Header != nil {
+ _ = block.Header.Hash()
}
+ require.Equal(t, bd, block)
}
-func TestBlockDataEncodeAll(t *testing.T) {
- hash := common.NewHash([]byte{0})
- body := optional.CoreBody{0xa, 0xb, 0xc, 0xd}
-
- bd := &BlockData{
- Hash: hash,
- Header: optional.NewHeader(false, nil),
- Body: optional.NewBody(true, body),
- Receipt: optional.NewBytes(true, []byte("asdf")),
- MessageQueue: optional.NewBytes(true, []byte("ghjkl")),
- Justification: optional.NewBytes(true, []byte("qwerty")),
- }
-
- expected, err := common.HexToBytes("0x00000000000000000000000000000000000000000000000000000000000000000001100a0b0c0d011061736466011467686a6b6c0118717765727479")
- if err != nil {
- t.Fatal(err)
- }
-
- enc, err := bd.Encode()
- if err != nil {
- t.Fatal(err)
- }
-
- if !bytes.Equal(expected, enc) {
- t.Fatalf("Fail: got %x expected %x", enc, expected)
- }
-}
+func TestBlockDataEncodeAndDecodeHeader(t *testing.T) {
+ expected, err := common.HexToBytes("0x000000000000000000000000000000000000000000000000000000000000000001000102030405060708090a0b0c0d0e0f000102030405060708090a0b0c0d0e0f04000102030405060708090a0b0c0d0e0f000102030405060708090a0b0c0d0e0f000102030405060708090a0b0c0d0e0f000102030405060708090a0b0c0d0e0f0806424142450c0102030542414245100405060700000000")
+ require.NoError(t, err)
-func TestBlockDataDecodeHeader(t *testing.T) {
- hash := common.NewHash([]byte{0})
testHash := common.NewHash([]byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf})
- header := &optional.CoreHeader{
- ParentHash: testHash,
- Number: big.NewInt(1),
- StateRoot: testHash,
- ExtrinsicsRoot: testHash,
- Digest: testDigest,
- }
+ headerVdt, err := NewHeader(testHash, testHash, testHash, big.NewInt(1), testDigest)
+ require.NoError(t, err)
- expected := &BlockData{
- Hash: hash,
- Header: optional.NewHeader(true, header),
- Body: optional.NewBody(false, nil),
- Receipt: optional.NewBytes(false, nil),
- MessageQueue: optional.NewBytes(false, nil),
- Justification: optional.NewBytes(false, nil),
+ bd := BlockData{
+ Hash: common.NewHash([]byte{0}),
+ Header: headerVdt,
+ Body: nil,
+ Receipt: nil,
+ MessageQueue: nil,
+ Justification: nil,
}
- enc, err := expected.Encode()
+ enc, err := scale.Marshal(bd)
require.NoError(t, err)
- res := new(BlockData)
- r := &bytes.Buffer{}
- r.Write(enc)
+ require.Equal(t, expected, enc)
- err = res.Decode(r)
+ var block BlockData
+ if bd.Header != nil {
+ block.Header = NewEmptyHeader()
+ }
+ err = scale.Unmarshal(enc, &block)
require.NoError(t, err)
-
- if !reflect.DeepEqual(res, expected) {
- t.Fatalf("Fail: got %v expected %v", res, expected)
+ if block.Header != nil {
+ _ = block.Header.Hash()
}
+ require.Equal(t, bd, block)
}
-func TestBlockDataDecodeBody(t *testing.T) {
- hash := common.NewHash([]byte{0})
- body := optional.CoreBody{0xa, 0xb, 0xc, 0xd}
+func TestBlockDataEncodeAndDecodeBody(t *testing.T) {
+ expected, err := common.HexToBytes("0x00000000000000000000000000000000000000000000000000000000000000000001100a0b0c0d000000")
+ require.NoError(t, err)
- expected := &BlockData{
- Hash: hash,
- Header: optional.NewHeader(false, nil),
- Body: optional.NewBody(true, body),
- Receipt: optional.NewBytes(false, nil),
- MessageQueue: optional.NewBytes(false, nil),
- Justification: optional.NewBytes(false, nil),
+ bd := BlockData{
+ Hash: common.NewHash([]byte{0}),
+ Header: nil,
+ Body: NewBody([]byte{0xa, 0xb, 0xc, 0xd}),
+ Receipt: nil,
+ MessageQueue: nil,
+ Justification: nil,
}
- enc, err := common.HexToBytes("0x00000000000000000000000000000000000000000000000000000000000000000001100a0b0c0d000000")
- if err != nil {
- t.Fatal(err)
- }
+ enc, err := scale.Marshal(bd)
+ require.NoError(t, err)
- res := new(BlockData)
- r := &bytes.Buffer{}
- r.Write(enc)
+ require.Equal(t, expected, enc)
- err = res.Decode(r)
- if err != nil {
- t.Fatal(err)
+ var block BlockData
+ if bd.Header != nil {
+ block.Header = NewEmptyHeader()
}
-
- if !reflect.DeepEqual(res, expected) {
- t.Fatalf("Fail: got %v expected %v", res, expected)
+ err = scale.Unmarshal(enc, &block)
+ require.NoError(t, err)
+ if block.Header != nil {
+ _ = block.Header.Hash()
}
+ require.Equal(t, bd, block)
}
-func TestBlockDataDecodeAll(t *testing.T) {
- hash := common.NewHash([]byte{0})
- body := optional.CoreBody{0xa, 0xb, 0xc, 0xd}
-
- expected := &BlockData{
- Hash: hash,
- Header: optional.NewHeader(false, nil),
- Body: optional.NewBody(true, body),
- Receipt: optional.NewBytes(true, []byte("asdf")),
- MessageQueue: optional.NewBytes(true, []byte("ghjkl")),
- Justification: optional.NewBytes(true, []byte("qwerty")),
- }
-
- enc, err := common.HexToBytes("0x00000000000000000000000000000000000000000000000000000000000000000001100a0b0c0d011061736466011467686a6b6c0118717765727479")
- if err != nil {
- t.Fatal(err)
- }
-
- res := new(BlockData)
- r := &bytes.Buffer{}
- r.Write(enc)
-
- err = res.Decode(r)
- if err != nil {
- t.Fatal(err)
- }
-
- if !reflect.DeepEqual(res, expected) {
- t.Fatalf("Fail: got %v expected %v", res, expected)
- }
-}
+func TestBlockDataEncodeAndDecodeAll(t *testing.T) {
+ expected, err := common.HexToBytes("0x7d0000000000000000000000000000000000000000000000000000000000000001000102030405060708090a0b0c0d0e0f000102030405060708090a0b0c0d0e0f04000102030405060708090a0b0c0d0e0f000102030405060708090a0b0c0d0e0f000102030405060708090a0b0c0d0e0f000102030405060708090a0b0c0d0e0f0806424142450c0102030542414245100405060701100a0b0c0d010401010402010403")
+ require.NoError(t, err)
-func TestBlockDataArrayEncodeAndDecode(t *testing.T) {
- hash := common.NewHash([]byte{0})
+ hash := common.NewHash([]byte{125})
testHash := common.NewHash([]byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf})
- body := optional.CoreBody{0xa, 0xb, 0xc, 0xd}
-
- header := &optional.CoreHeader{
- ParentHash: testHash,
- Number: big.NewInt(1),
- StateRoot: testHash,
- ExtrinsicsRoot: testHash,
- Digest: testDigest,
- }
+ body := NewBody([]byte{0xa, 0xb, 0xc, 0xd})
- expected := []*BlockData{{
- Hash: hash,
- Header: optional.NewHeader(true, header),
- Body: optional.NewBody(false, nil),
- Receipt: optional.NewBytes(false, nil),
- MessageQueue: optional.NewBytes(false, nil),
- Justification: optional.NewBytes(false, nil),
- }, {
- Hash: hash,
- Header: optional.NewHeader(false, nil),
- Body: optional.NewBody(true, body),
- Receipt: optional.NewBytes(true, []byte("asdf")),
- MessageQueue: optional.NewBytes(true, []byte("ghjkl")),
- Justification: optional.NewBytes(true, []byte("qwerty")),
- }, {
+ headerVdt, err := NewHeader(testHash, testHash, testHash, big.NewInt(1), testDigest)
+ require.NoError(t, err)
+
+ bd := BlockData{
Hash: hash,
- Header: optional.NewHeader(false, nil),
- Body: optional.NewBody(true, body),
- Receipt: optional.NewBytes(false, nil),
- MessageQueue: optional.NewBytes(false, nil),
- Justification: optional.NewBytes(false, nil),
- }}
-
- enc, err := EncodeBlockDataArray(expected)
- if err != nil {
- t.Fatal(err)
+ Header: headerVdt,
+ Body: body,
+ Receipt: &[]byte{1},
+ MessageQueue: &[]byte{2},
+ Justification: &[]byte{3},
}
- r := &bytes.Buffer{}
- r.Write(enc)
+ enc, err := scale.Marshal(bd)
+ require.NoError(t, err)
- res, err := DecodeBlockDataArray(r)
- if err != nil {
- t.Fatal(err)
- }
+ require.Equal(t, expected, enc)
- if !reflect.DeepEqual(res[1], expected[1]) {
- t.Fatalf("Fail: got %v expected %v", res[1], expected[1])
+ var block BlockData
+ if bd.Header != nil {
+ block.Header = NewEmptyHeader()
+ }
+ err = scale.Unmarshal(enc, &block)
+ require.NoError(t, err)
+ if block.Header != nil {
+ _ = block.Header.Hash()
}
+ require.Equal(t, bd, block)
}
diff --git a/dot/types/block_test.go b/dot/types/block_test.go
index f7c4e0e175..157f3ad3c4 100644
--- a/dot/types/block_test.go
+++ b/dot/types/block_test.go
@@ -19,122 +19,105 @@ package types
import (
"bytes"
"math/big"
- "reflect"
"testing"
"github.com/ChainSafe/gossamer/lib/common"
+ "github.com/ChainSafe/gossamer/pkg/scale"
"github.com/stretchr/testify/require"
)
-func TestEncodeBlock(t *testing.T) {
- // see https://github.com/paritytech/substrate/blob/master/test-utils/runtime/src/system.rs#L376
- expected := []byte{69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
- 4, 39, 71, 171, 124, 13, 195, 139, 127, 42, 251, 168, 43, 213, 226, 214, 172, 239, 140, 49, 224, 152, 0,
- 246, 96, 183, 94, 200, 74, 112, 5, 9, 159, 3, 23, 10, 46, 117, 151, 183, 183, 227, 216, 76, 5, 57, 29, 19,
- 154, 98, 177, 87, 231, 135, 134, 216, 192, 130, 242, 157, 207, 76, 17, 19, 20, 0, 4, 1}
+func TestEmptyBlock(t *testing.T) {
+ block := NewEmptyBlock()
+ isEmpty := block.Empty()
+ require.True(t, isEmpty)
+
+ block = NewBlock(*NewEmptyHeader(), Body{})
+ isEmpty = block.Empty()
+ require.True(t, isEmpty)
parentHash, err := common.HexToHash("0x4545454545454545454545454545454545454545454545454545454545454545")
- if err != nil {
- t.Fatal(err)
- }
+ require.NoError(t, err)
stateRoot, err := common.HexToHash("0x2747ab7c0dc38b7f2afba82bd5e2d6acef8c31e09800f660b75ec84a7005099f")
- if err != nil {
- t.Fatal(err)
- }
+ require.NoError(t, err)
extrinsicsRoot, err := common.HexToHash("0x03170a2e7597b7b7e3d84c05391d139a62b157e78786d8c082f29dcf4c111314")
- if err != nil {
- t.Fatal(err)
- }
+ require.NoError(t, err)
- header := &Header{
- ParentHash: parentHash,
- Number: big.NewInt(1),
- StateRoot: stateRoot,
- ExtrinsicsRoot: extrinsicsRoot,
- Digest: Digest{},
- }
+ header, err := NewHeader(parentHash, stateRoot, extrinsicsRoot, big.NewInt(1), NewDigest())
+ require.NoError(t, err)
- block := NewBlock(header, NewBody([]byte{4, 1}))
- enc, err := block.Encode()
- if err != nil {
- t.Fatal(err)
- }
+ block = NewBlock(*header, Body{})
+ isEmpty = block.Empty()
+ require.False(t, isEmpty)
- if !bytes.Equal(enc, expected) {
- t.Fatalf("Fail: got %x expected %x", enc, expected)
- }
+ block = NewBlock(*NewEmptyHeader(), *NewBody([]byte{4, 1}))
+ isEmpty = block.Empty()
+ require.False(t, isEmpty)
}
-func TestDecodeBlock(t *testing.T) {
- // see https://github.com/paritytech/substrate/blob/master/test-utils/runtime/src/system.rs#L376
- data := []byte{69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 4, 39, 71, 171, 124, 13, 195, 139, 127, 42, 251, 168, 43, 213, 226, 214, 172, 239, 140, 49, 224, 152, 0, 246, 96, 183, 94, 200, 74, 112, 5, 9, 159, 3, 23, 10, 46, 117, 151, 183, 183, 227, 216, 76, 5, 57, 29, 19, 154, 98, 177, 87, 231, 135, 134, 216, 192, 130, 242, 157, 207, 76, 17, 19, 20, 0, 0}
- bh := NewEmptyBlock()
-
- rw := &bytes.Buffer{}
- rw.Write(data)
- err := bh.Decode(rw)
- if err != nil {
- t.Fatal(err)
- }
+func TestEncodeAndDecodeBlock(t *testing.T) {
+ expected := []byte{69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
+ 4, 39, 71, 171, 124, 13, 195, 139, 127, 42, 251, 168, 43, 213, 226, 214, 172, 239, 140, 49, 224, 152, 0,
+ 246, 96, 183, 94, 200, 74, 112, 5, 9, 159, 3, 23, 10, 46, 117, 151, 183, 183, 227, 216, 76, 5, 57, 29, 19,
+ 154, 98, 177, 87, 231, 135, 134, 216, 192, 130, 242, 157, 207, 76, 17, 19, 20, 0, 8, 4, 1}
parentHash, err := common.HexToHash("0x4545454545454545454545454545454545454545454545454545454545454545")
- if err != nil {
- t.Fatal(err)
- }
+ require.NoError(t, err)
stateRoot, err := common.HexToHash("0x2747ab7c0dc38b7f2afba82bd5e2d6acef8c31e09800f660b75ec84a7005099f")
- if err != nil {
- t.Fatal(err)
- }
+ require.NoError(t, err)
extrinsicsRoot, err := common.HexToHash("0x03170a2e7597b7b7e3d84c05391d139a62b157e78786d8c082f29dcf4c111314")
- if err != nil {
- t.Fatal(err)
- }
+ require.NoError(t, err)
- header := &Header{
- ParentHash: parentHash,
- Number: big.NewInt(1),
- StateRoot: stateRoot,
- ExtrinsicsRoot: extrinsicsRoot,
- Digest: Digest{},
- }
- expected := NewBlock(header, NewBody(nil))
+ header, err := NewHeader(parentHash, stateRoot, extrinsicsRoot, big.NewInt(1), NewDigest())
+ require.NoError(t, err)
+
+ body := NewBody([]byte{4, 1})
+
+ block := NewBlock(*header, *body)
- if !reflect.DeepEqual(bh, expected) {
- t.Fatalf("Fail: got %v, %v expected %v, %v", bh.Header, bh.Body, expected.Header, expected.Body)
+ enc, err := scale.Marshal(block)
+ require.NoError(t, err)
+
+ require.Equal(t, expected, enc)
+
+ dec := NewBlock(*NewEmptyHeader(), *new(Body))
+ err = scale.Unmarshal(enc, &dec)
+ require.NoError(t, err)
+ if dec.Header.Number != nil {
+ dec.Header.Hash()
}
+ require.Equal(t, block, dec)
}
func TestDeepCopyBlock(t *testing.T) {
data := []byte{69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 4, 39, 71, 171, 124, 13, 195, 139, 127, 42, 251, 168, 43, 213, 226, 214, 172, 239, 140, 49, 224, 152, 0, 246, 96, 183, 94, 200, 74, 112, 5, 9, 159, 3, 23, 10, 46, 117, 151, 183, 183, 227, 216, 76, 5, 57, 29, 19, 154, 98, 177, 87, 231, 135, 134, 216, 192, 130, 242, 157, 207, 76, 17, 19, 20, 0, 0}
- block := NewEmptyBlock()
+ block := NewBlock(*NewEmptyHeader(), *new(Body))
- rw := &bytes.Buffer{}
- rw.Write(data)
- err := block.Decode(rw)
+ err := scale.Unmarshal(data, &block)
if err != nil {
t.Fatal(err)
}
- bc := block.DeepCopy()
+ bc, err := block.DeepCopy()
+ require.NoError(t, err)
bc.Header.ParentHash = common.Hash{}
require.NotEqual(t, block.Header.ParentHash, bc.Header.ParentHash)
}
func TestMustEncodeBlock(t *testing.T) {
- h1, err := NewHeader(common.Hash{}, common.Hash{}, common.Hash{}, big.NewInt(0), Digest{})
+ h1, err := NewHeader(common.Hash{}, common.Hash{}, common.Hash{}, big.NewInt(0), NewDigest())
require.NoError(t, err)
- b1 := NewBlock(h1, NewBody([]byte{}))
+ b1 := NewBlock(*h1, *NewBody([]byte{}))
enc, err := b1.Encode()
require.NoError(t, err)
- h2, err := NewHeader(common.Hash{0x1, 0x2}, common.Hash{}, common.Hash{}, big.NewInt(0), Digest{})
+ h2, err := NewHeader(common.Hash{0x1, 0x2}, common.Hash{}, common.Hash{}, big.NewInt(0), NewDigest())
require.NoError(t, err)
- b2 := NewBlock(h2, NewBody([]byte{0xa, 0xb}))
+ b2 := NewBlock(*h2, *NewBody([]byte{0xa, 0xb}))
enc2, err := b2.Encode()
require.NoError(t, err)
@@ -145,12 +128,12 @@ func TestMustEncodeBlock(t *testing.T) {
}{
{
name: "correct",
- take: b1,
+ take: &b1,
want: enc,
},
{
name: "correct2",
- take: b2,
+ take: &b2,
want: enc2,
},
}
diff --git a/dot/types/body.go b/dot/types/body.go
index 52524fb29e..3b6eb38dde 100644
--- a/dot/types/body.go
+++ b/dot/types/body.go
@@ -18,14 +18,13 @@ package types
import (
"bytes"
- "errors"
"fmt"
- "io"
"math/big"
"github.com/ChainSafe/gossamer/lib/common"
"github.com/ChainSafe/gossamer/lib/common/optional"
"github.com/ChainSafe/gossamer/lib/scale"
+ scale2 "github.com/ChainSafe/gossamer/pkg/scale"
)
// Body is the encoded extrinsics inside a state block
@@ -120,16 +119,16 @@ func (b *Body) AsEncodedExtrinsics() ([]Extrinsic, error) {
return []Extrinsic{}, nil
}
- dec, err := scale.Decode(*b, exts)
+ err := scale2.Unmarshal(*b, &exts)
if err != nil {
return nil, err
}
- decodedExts := dec.([][]byte)
+ decodedExts := exts
ret := make([][]byte, len(decodedExts))
for i, ext := range decodedExts {
- ret[i], err = scale.Encode(ext)
+ ret[i], err = scale2.Marshal(ext)
if err != nil {
return nil, err
}
@@ -138,44 +137,12 @@ func (b *Body) AsEncodedExtrinsics() ([]Extrinsic, error) {
return BytesArrayToExtrinsics(ret), nil
}
-// NewBodyFromOptional returns a Body given an optional.Body. If the optional.Body is None, an error is returned.
-func NewBodyFromOptional(ob *optional.Body) (*Body, error) {
- if !ob.Exists() {
- return nil, errors.New("body is None")
- }
-
- res := Body(ob.Value())
- return &res, nil
-}
-
// AsOptional returns the Body as an optional.Body
func (b *Body) AsOptional() *optional.Body {
ob := optional.CoreBody([]byte(*b))
return optional.NewBody(true, ob)
}
-// decodeOptionalBody decodes a SCALE encoded optional Body into an *optional.Body
-func decodeOptionalBody(r io.Reader) (*optional.Body, error) {
- sd := scale.Decoder{Reader: r}
-
- exists, err := common.ReadByte(r)
- if err != nil {
- return nil, err
- }
-
- if exists == 1 {
- b, err := sd.Decode([]byte{})
- if err != nil {
- return nil, err
- }
-
- body := Body(b.([]byte))
- return body.AsOptional(), nil
- }
-
- return optional.NewBody(false, nil), nil
-}
-
// HasExtrinsic returns true if body contains target Extrisic
// returns error when fails to encode decoded extrinsic on body
func (b *Body) HasExtrinsic(target Extrinsic) (bool, error) {
diff --git a/dot/types/bytes.go b/dot/types/bytes.go
deleted file mode 100644
index c8597231d5..0000000000
--- a/dot/types/bytes.go
+++ /dev/null
@@ -1,46 +0,0 @@
-// Copyright 2019 ChainSafe Systems (ON) Corp.
-// This file is part of gossamer.
-//
-// The gossamer library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// The gossamer library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the gossamer library. If not, see .
-
-package types
-
-import (
- "io"
-
- "github.com/ChainSafe/gossamer/lib/common"
- "github.com/ChainSafe/gossamer/lib/common/optional"
- "github.com/ChainSafe/gossamer/lib/scale"
-)
-
-// decodeOptionalBytes decodes SCALE encoded optional bytes into an *optional.Bytes
-func decodeOptionalBytes(r io.Reader) (*optional.Bytes, error) {
- sd := scale.Decoder{Reader: r}
-
- exists, err := common.ReadByte(r)
- if err != nil {
- return nil, err
- }
-
- if exists == 1 {
- b, err := sd.Decode([]byte{})
- if err != nil {
- return nil, err
- }
-
- return optional.NewBytes(true, b.([]byte)), nil
- }
-
- return optional.NewBytes(false, nil), nil
-}
diff --git a/dot/types/bytes_test.go b/dot/types/bytes_test.go
deleted file mode 100644
index 4c8dd6869d..0000000000
--- a/dot/types/bytes_test.go
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright 2019 ChainSafe Systems (ON) Corp.
-// This file is part of gossamer.
-//
-// The gossamer library is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// The gossamer library is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Lesser General Public License for more details.
-//
-// You should have received a copy of the GNU Lesser General Public License
-// along with the gossamer library. If not, see .
-
-package types
-
-// TODO: improve dot tests #687
diff --git a/dot/types/consensus_digest.go b/dot/types/consensus_digest.go
index fcda02a719..bfdfeeadb1 100644
--- a/dot/types/consensus_digest.go
+++ b/dot/types/consensus_digest.go
@@ -1,118 +1,70 @@
package types
import (
- "github.com/ChainSafe/gossamer/lib/scale"
+ "github.com/ChainSafe/gossamer/pkg/scale"
)
-// The follow are the consensus digest types for grandpa
-var (
- GrandpaScheduledChangeType = byte(1)
- GrandpaForcedChangeType = byte(2)
- GrandpaOnDisabledType = byte(3)
- GrandpaPauseType = byte(4)
- GrandpaResumeType = byte(5)
-)
+// NewBabeConsensusDigest constructs a vdt representing a babe consensus digest
+func NewBabeConsensusDigest() scale.VaryingDataType {
+ return scale.MustNewVaryingDataType(NextEpochData{}, BABEOnDisabled{}, NextConfigData{})
+}
-// The follow are the consensus digest types for BABE
-var (
- NextEpochDataType = byte(1)
- BABEOnDisabledType = byte(2)
- NextConfigDataType = byte(3)
-)
+// NewGrandpaConsensusDigest constructs a vdt representing a grandpa consensus digest
+func NewGrandpaConsensusDigest() scale.VaryingDataType {
+ return scale.MustNewVaryingDataType(GrandpaScheduledChange{}, GrandpaForcedChange{}, GrandpaOnDisabled{}, GrandpaPause{}, GrandpaResume{})
+}
// GrandpaScheduledChange represents a GRANDPA scheduled authority change
type GrandpaScheduledChange struct {
- Auths []*GrandpaAuthoritiesRaw
+ Auths []GrandpaAuthoritiesRaw
Delay uint32
}
-// Encode returns a SCALE encoded GrandpaScheduledChange with first type byte
-func (sc *GrandpaScheduledChange) Encode() ([]byte, error) {
- d, err := scale.Encode(sc)
- if err != nil {
- return nil, err
- }
-
- return append([]byte{GrandpaScheduledChangeType}, d...), nil
-}
+// Index Returns VDT index
+func (sc GrandpaScheduledChange) Index() uint { return 1 }
// GrandpaForcedChange represents a GRANDPA forced authority change
type GrandpaForcedChange struct {
- Auths []*GrandpaAuthoritiesRaw
+ Auths []GrandpaAuthoritiesRaw
Delay uint32
}
-// Encode returns a SCALE encoded GrandpaForcedChange with first type byte
-func (fc *GrandpaForcedChange) Encode() ([]byte, error) {
- d, err := scale.Encode(fc)
- if err != nil {
- return nil, err
- }
-
- return append([]byte{GrandpaForcedChangeType}, d...), nil
-}
+// Index Returns VDT index
+func (fc GrandpaForcedChange) Index() uint { return 2 }
// GrandpaOnDisabled represents a GRANDPA authority being disabled
type GrandpaOnDisabled struct {
ID uint64
}
-// Encode returns a SCALE encoded GrandpaOnDisabled with first type byte
-func (od *GrandpaOnDisabled) Encode() ([]byte, error) {
- d, err := scale.Encode(od)
- if err != nil {
- return nil, err
- }
-
- return append([]byte{GrandpaOnDisabledType}, d...), nil
-}
+// Index Returns VDT index
+func (od GrandpaOnDisabled) Index() uint { return 3 }
// GrandpaPause represents an authority set pause
type GrandpaPause struct {
Delay uint32
}
-// Encode returns a SCALE encoded GrandpaPause with first type byte
-func (p *GrandpaPause) Encode() ([]byte, error) {
- d, err := scale.Encode(p)
- if err != nil {
- return nil, err
- }
-
- return append([]byte{GrandpaPauseType}, d...), nil
-}
+// Index Returns VDT index
+func (p GrandpaPause) Index() uint { return 4 }
// GrandpaResume represents an authority set resume
type GrandpaResume struct {
Delay uint32
}
-// Encode returns a SCALE encoded GrandpaResume with first type byte
-func (r *GrandpaResume) Encode() ([]byte, error) {
- d, err := scale.Encode(r)
- if err != nil {
- return nil, err
- }
-
- return append([]byte{GrandpaResumeType}, d...), nil
-}
+// Index Returns VDT index
+func (r GrandpaResume) Index() uint { return 5 }
// NextEpochData is the digest that contains the data for the upcoming BABE epoch.
// It is included in the first block of every epoch to describe the next epoch.
type NextEpochData struct {
- Authorities []*AuthorityRaw
+ Authorities []AuthorityRaw
Randomness [RandomnessLength]byte
}
-// Encode returns a SCALE encoded NextEpochData with first type byte
-func (d *NextEpochData) Encode() ([]byte, error) {
- enc, err := scale.Encode(d)
- if err != nil {
- return nil, err
- }
-
- return append([]byte{NextEpochDataType}, enc...), nil
-}
+// Index Returns VDT index
+func (d NextEpochData) Index() uint { return 1 }
// ToEpochData returns the NextEpochData as EpochData
func (d *NextEpochData) ToEpochData() (*EpochData, error) {
@@ -132,15 +84,8 @@ type BABEOnDisabled struct {
ID uint32
}
-// Encode returns a SCALE encoded BABEOnDisabled with first type byte
-func (od *BABEOnDisabled) Encode() ([]byte, error) {
- d, err := scale.Encode(od)
- if err != nil {
- return nil, err
- }
-
- return append([]byte{BABEOnDisabledType}, d...), nil
-}
+// Index Returns VDT index
+func (od BABEOnDisabled) Index() uint { return 2 }
// NextConfigData is the digest that contains changes to the BABE configuration.
// It is potentially included in the first block of an epoch to describe the next epoch.
@@ -150,15 +95,8 @@ type NextConfigData struct {
SecondarySlots byte
}
-// Encode returns a SCALE encoded NextConfigData with first type byte
-func (d *NextConfigData) Encode() ([]byte, error) {
- enc, err := scale.Encode(d)
- if err != nil {
- return nil, err
- }
-
- return append([]byte{NextConfigDataType}, enc...), nil
-}
+// Index Returns VDT index
+func (d NextConfigData) Index() uint { return 3 }
// ToConfigData returns the NextConfigData as ConfigData
func (d *NextConfigData) ToConfigData() *ConfigData {
diff --git a/dot/types/consensus_digest_test.go b/dot/types/consensus_digest_test.go
new file mode 100644
index 0000000000..9579f3d3e3
--- /dev/null
+++ b/dot/types/consensus_digest_test.go
@@ -0,0 +1,61 @@
+// Copyright 2019 ChainSafe Systems (ON) Corp.
+// This file is part of gossamer.
+//
+// The gossamer library is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Lesser General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// The gossamer library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public License
+// along with the gossamer library. If not, see .
+
+package types
+
+import (
+ "testing"
+
+ "github.com/ChainSafe/gossamer/lib/common"
+ "github.com/ChainSafe/gossamer/lib/crypto/sr25519"
+ "github.com/ChainSafe/gossamer/lib/keystore"
+ "github.com/ChainSafe/gossamer/pkg/scale"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestBabeEncodeAndDecode(t *testing.T) {
+ expData := common.MustHexToBytes("0x0108d43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d01000000000000008eaf04151687736326c9fea17e25fc5287613693c912909cb226aa4794f26a4801000000000000004d58630000000000000000000000000000000000000000000000000000000000")
+
+ keyring, err := keystore.NewSr25519Keyring()
+ require.NoError(t, err)
+
+ authA := AuthorityRaw{
+ Key: keyring.Alice().Public().(*sr25519.PublicKey).AsBytes(),
+ Weight: 1,
+ }
+
+ authB := AuthorityRaw{
+ Key: keyring.Bob().Public().(*sr25519.PublicKey).AsBytes(),
+ Weight: 1,
+ }
+
+ var d = NewBabeConsensusDigest()
+ err = d.Set(NextEpochData{
+ Authorities: []AuthorityRaw{authA, authB},
+ Randomness: [32]byte{77, 88, 99},
+ })
+ require.NoError(t, err)
+
+ enc, err := scale.Marshal(d)
+ require.NoError(t, err)
+ require.Equal(t, expData, enc)
+
+ var dec = NewBabeConsensusDigest()
+ err = scale.Unmarshal(enc, &dec)
+ require.NoError(t, err)
+ require.Equal(t, d, dec)
+}
diff --git a/dot/types/digest.go b/dot/types/digest.go
index b2e66f716a..d26b6eb2f5 100644
--- a/dot/types/digest.go
+++ b/dot/types/digest.go
@@ -17,56 +17,20 @@
package types
import (
- "errors"
"fmt"
- "io"
- "math/big"
"github.com/ChainSafe/gossamer/lib/common"
- "github.com/ChainSafe/gossamer/lib/scale"
+ "github.com/ChainSafe/gossamer/pkg/scale"
)
-// Digest represents the block digest. It consists of digest items.
-type Digest []DigestItem
-
-// NewEmptyDigest returns an empty digest
-func NewEmptyDigest() Digest {
- return []DigestItem{}
+// NewDigestItem returns a new VaryingDataType to represent a DigestItem
+func NewDigestItem() scale.VaryingDataType {
+ return scale.MustNewVaryingDataType(ChangesTrieRootDigest{}, PreRuntimeDigest{}, ConsensusDigest{}, SealDigest{})
}
// NewDigest returns a new Digest from the given DigestItems
-func NewDigest(items ...DigestItem) Digest {
- return items
-}
-
-// Encode returns the SCALE encoded digest
-func (d *Digest) Encode() ([]byte, error) {
- enc, err := scale.Encode(big.NewInt(int64(len(*d))))
- if err != nil {
- return nil, err
- }
-
- for _, item := range *d {
- encItem, err := item.Encode()
- if err != nil {
- return nil, err
- }
-
- enc = append(enc, encItem...)
- }
-
- return enc, nil
-}
-
-// Decode decodes a SCALE encoded digest and appends it to the given Digest
-func (d *Digest) Decode(r io.Reader) error {
- var err error
- digest, err := DecodeDigest(r)
- if err != nil {
- return err
- }
- *d = digest
- return nil
+func NewDigest() scale.VaryingDataTypeSlice {
+ return scale.NewVaryingDataTypeSlice(NewDigestItem())
}
// ConsensusEngineID is a 4-character identifier of the consensus engine that produced the digest.
@@ -92,114 +56,28 @@ var BabeEngineID = ConsensusEngineID{'B', 'A', 'B', 'E'}
// GrandpaEngineID is the hard-coded grandpa ID
var GrandpaEngineID = ConsensusEngineID{'F', 'R', 'N', 'K'}
-// ChangesTrieRootDigestType is the byte representation of ChangesTrieRootDigest
-var ChangesTrieRootDigestType = byte(2)
-
-// PreRuntimeDigestType is the byte representation of PreRuntimeDigest
-var PreRuntimeDigestType = byte(6)
-
-// ConsensusDigestType is the byte representation of ConsensusDigest
-var ConsensusDigestType = byte(4)
-
-// SealDigestType is the byte representation of SealDigest
-var SealDigestType = byte(5)
-
-// DecodeDigest decodes the input into a Digest
-func DecodeDigest(r io.Reader) (Digest, error) {
- sd := scale.Decoder{Reader: r}
-
- num, err := sd.Decode(big.NewInt(0))
- if err != nil {
- return nil, fmt.Errorf("could not decode length of digest items: %w", err)
- }
-
- digest := make([]DigestItem, num.(*big.Int).Uint64())
-
- for i := 0; i < len(digest); i++ {
- digest[i], err = DecodeDigestItem(r)
- if err != nil {
- return nil, fmt.Errorf("could not decode digest item %d: %w", i, err)
- }
- }
-
- return digest, nil
-}
-
-// DecodeDigestItem will decode byte array to DigestItem
-func DecodeDigestItem(r io.Reader) (DigestItem, error) {
- typ, err := common.ReadByte(r)
- if err != nil {
- return nil, err
- }
-
- switch typ {
- case ChangesTrieRootDigestType:
- d := new(ChangesTrieRootDigest)
- err := d.Decode(r)
- return d, err
- case PreRuntimeDigestType:
- d := new(PreRuntimeDigest)
- err := d.Decode(r)
- return d, err
- case ConsensusDigestType:
- d := new(ConsensusDigest)
- err := d.Decode(r)
- return d, err
- case SealDigestType:
- d := new(SealDigest)
- err := d.Decode(r)
- return d, err
- }
-
- return nil, errors.New("invalid digest item type")
-}
-
-// DigestItem can be of one of four types of digest: ChangesTrieRootDigest, PreRuntimeDigest, ConsensusDigest, or SealDigest.
-// see https://github.com/paritytech/substrate/blob/f548309478da3935f72567c2abc2eceec3978e9f/primitives/runtime/src/generic/digest.rs#L77
-type DigestItem interface {
- String() string
- Type() byte
- Encode() ([]byte, error)
- Decode(io.Reader) error // Decode assumes the type byte (first byte) has been removed from the encoding.
-}
-
// ChangesTrieRootDigest contains the root of the changes trie at a given block, if the runtime supports it.
type ChangesTrieRootDigest struct {
Hash common.Hash
}
+// Index Returns VDT index
+func (d ChangesTrieRootDigest) Index() uint { return 2 }
+
// String returns the digest as a string
func (d *ChangesTrieRootDigest) String() string {
return fmt.Sprintf("ChangesTrieRootDigest Hash=%s", d.Hash)
}
-// Type returns the type
-func (d *ChangesTrieRootDigest) Type() byte {
- return ChangesTrieRootDigestType
-}
-
-// Encode will encode the ChangesTrieRootDigestType into byte array
-func (d *ChangesTrieRootDigest) Encode() ([]byte, error) {
- return append([]byte{ChangesTrieRootDigestType}, d.Hash[:]...), nil
-}
-
-// Decode will decode into ChangesTrieRootDigest Hash
-func (d *ChangesTrieRootDigest) Decode(r io.Reader) error {
- hash, err := common.ReadHash(r)
- if err != nil {
- return err
- }
-
- copy(d.Hash[:], hash[:])
- return nil
-}
-
// PreRuntimeDigest contains messages from the consensus engine to the runtime.
type PreRuntimeDigest struct {
ConsensusEngineID ConsensusEngineID
Data []byte
}
+// Index Returns VDT index
+func (d PreRuntimeDigest) Index() uint { return 6 }
+
// NewBABEPreRuntimeDigest returns a PreRuntimeDigest with the BABE consensus ID
func NewBABEPreRuntimeDigest(data []byte) *PreRuntimeDigest {
return &PreRuntimeDigest{
@@ -213,142 +91,30 @@ func (d *PreRuntimeDigest) String() string {
return fmt.Sprintf("PreRuntimeDigest ConsensusEngineID=%s Data=0x%x", d.ConsensusEngineID.ToBytes(), d.Data)
}
-// Type will return PreRuntimeDigestType
-func (d *PreRuntimeDigest) Type() byte {
- return PreRuntimeDigestType
-}
-
-// Encode will encode PreRuntimeDigest ConsensusEngineID and Data
-func (d *PreRuntimeDigest) Encode() ([]byte, error) {
- enc := []byte{PreRuntimeDigestType}
- enc = append(enc, d.ConsensusEngineID[:]...)
-
- // encode data
- output, err := scale.Encode(d.Data)
- if err != nil {
- return nil, err
- }
-
- return append(enc, output...), nil
-}
-
-// Decode will decode PreRuntimeDigest ConsensusEngineID and Data
-func (d *PreRuntimeDigest) Decode(r io.Reader) error {
- id, err := common.Read4Bytes(r)
- if err != nil {
- return err
- }
-
- copy(d.ConsensusEngineID[:], id)
-
- sd := scale.Decoder{Reader: r}
- output, err := sd.Decode([]byte{})
- if err != nil {
- return err
- }
-
- d.Data = output.([]byte)
- return nil
-}
-
// ConsensusDigest contains messages from the runtime to the consensus engine.
type ConsensusDigest struct {
ConsensusEngineID ConsensusEngineID
Data []byte
}
+// Index Returns VDT index
+func (d ConsensusDigest) Index() uint { return 4 }
+
// String returns the digest as a string
func (d *ConsensusDigest) String() string {
return fmt.Sprintf("ConsensusDigest ConsensusEngineID=%s Data=0x%x", d.ConsensusEngineID.ToBytes(), d.Data)
}
-// Type returns the ConsensusDigest type
-func (d *ConsensusDigest) Type() byte {
- return ConsensusDigestType
-}
-
-// Encode will encode ConsensusDigest ConsensusEngineID and Data
-func (d *ConsensusDigest) Encode() ([]byte, error) {
- enc := []byte{ConsensusDigestType}
- enc = append(enc, d.ConsensusEngineID[:]...)
- // encode data
- output, err := scale.Encode(d.Data)
- if err != nil {
- return nil, err
- }
-
- return append(enc, output...), nil
-}
-
-// Decode will decode into ConsensusEngineID and Data
-func (d *ConsensusDigest) Decode(r io.Reader) error {
- id, err := common.Read4Bytes(r)
- if err != nil {
- return err
- }
-
- copy(d.ConsensusEngineID[:], id)
-
- sd := scale.Decoder{Reader: r}
- output, err := sd.Decode([]byte{})
- if err != nil {
- return err
- }
-
- d.Data = output.([]byte)
- return nil
-}
-
-// DataType returns the data type of the runtime-to-consensus engine message
-func (d *ConsensusDigest) DataType() byte {
- return d.Data[0]
-}
-
// SealDigest contains the seal or signature. This is only used by native code.
type SealDigest struct {
ConsensusEngineID ConsensusEngineID
Data []byte
}
+// Index Returns VDT index
+func (d SealDigest) Index() uint { return 5 }
+
// String returns the digest as a string
func (d *SealDigest) String() string {
return fmt.Sprintf("SealDigest ConsensusEngineID=%s Data=0x%x", d.ConsensusEngineID.ToBytes(), d.Data)
}
-
-// Type will return SealDigest type
-func (d *SealDigest) Type() byte {
- return SealDigestType
-}
-
-// Encode will encode SealDigest ConsensusEngineID and Data
-func (d *SealDigest) Encode() ([]byte, error) {
- enc := []byte{SealDigestType}
- enc = append(enc, d.ConsensusEngineID[:]...)
- // encode data
- output, err := scale.Encode(d.Data)
- if err != nil {
- return nil, err
- }
- return append(enc, output...), nil
-}
-
-// Decode will decode into SealDigest ConsensusEngineID and Data
-func (d *SealDigest) Decode(r io.Reader) error {
- id, err := common.Read4Bytes(r)
- if err != nil {
- return err
- }
-
- copy(d.ConsensusEngineID[:], id)
-
- // decode data
- sd := scale.Decoder{Reader: r}
-
- output, err := sd.Decode([]byte{})
- if err != nil {
- return err
- }
-
- d.Data = output.([]byte)
- return nil
-}
diff --git a/dot/types/digest_test.go b/dot/types/digest_test.go
index a5f11bb6b2..55d90fc49e 100644
--- a/dot/types/digest_test.go
+++ b/dot/types/digest_test.go
@@ -21,86 +21,168 @@ import (
"testing"
"github.com/ChainSafe/gossamer/lib/common"
+ "github.com/ChainSafe/gossamer/pkg/scale"
"github.com/stretchr/testify/require"
)
-func TestDecodeDigest(t *testing.T) {
+func TestEncode(t *testing.T) {
d := common.MustHexToBytes("0x0c0642414245340201000000ef55a50f00000000044241424549040118ca239392960473fe1bc65f94ee27d890a49c1b200c006ff5dcc525330ecc16770100000000000000b46f01874ce7abbb5220e8fd89bede0adad14c73039d91e28e881823433e723f0100000000000000d684d9176d6eb69887540c9a89fa6097adea82fc4b0ff26d1062b488f352e179010000000000000068195a71bdde49117a616424bdc60a1733e96acb1da5aeab5d268cf2a572e94101000000000000001a0575ef4ae24bdfd31f4cb5bd61239ae67c12d4e64ae51ac756044aa6ad8200010000000000000018168f2aad0081a25728961ee00627cfe35e39833c805016632bf7c14da5800901000000000000000000000000000000000000000000000000000000000000000000000000000000054241424501014625284883e564bc1e4063f5ea2b49846cdddaa3761d04f543b698c1c3ee935c40d25b869247c36c6b8a8cbbd7bb2768f560ab7c276df3c62df357a7e3b1ec8d")
r := &bytes.Buffer{}
_, _ = r.Write(d)
- digest := &Digest{}
- err := digest.Decode(r)
+
+ vdts := NewDigest()
+ err := vdts.Add(
+ PreRuntimeDigest{
+ ConsensusEngineID: BabeEngineID,
+ Data: common.MustHexToBytes("0x0201000000ef55a50f00000000"),
+ },
+ ConsensusDigest{
+ ConsensusEngineID: BabeEngineID,
+ Data: common.MustHexToBytes("0x0118ca239392960473fe1bc65f94ee27d890a49c1b200c006ff5dcc525330ecc16770100000000000000b46f01874ce7abbb5220e8fd89bede0adad14c73039d91e28e881823433e723f0100000000000000d684d9176d6eb69887540c9a89fa6097adea82fc4b0ff26d1062b488f352e179010000000000000068195a71bdde49117a616424bdc60a1733e96acb1da5aeab5d268cf2a572e94101000000000000001a0575ef4ae24bdfd31f4cb5bd61239ae67c12d4e64ae51ac756044aa6ad8200010000000000000018168f2aad0081a25728961ee00627cfe35e39833c805016632bf7c14da5800901000000000000000000000000000000000000000000000000000000000000000000000000000000"),
+ },
+ SealDigest{
+ ConsensusEngineID: BabeEngineID,
+ Data: common.MustHexToBytes("0x4625284883e564bc1e4063f5ea2b49846cdddaa3761d04f543b698c1c3ee935c40d25b869247c36c6b8a8cbbd7bb2768f560ab7c276df3c62df357a7e3b1ec8d"),
+ },
+ )
+ require.NoError(t, err)
+
+ b, err := scale.Marshal(vdts)
+ require.NoError(t, err)
+ require.Equal(t, d, b)
+
+ v := NewDigest()
+ err = scale.Unmarshal(b, &v)
+ require.NoError(t, err)
+
+ encV, err := scale.Marshal(v)
+ require.NoError(t, err)
+ require.Equal(t, d, encV)
+}
+
+func TestDecodeSingleDigest(t *testing.T) {
+ exp := common.MustHexToBytes("0x06424142451001030507")
+ d := PreRuntimeDigest{
+ ConsensusEngineID: BabeEngineID,
+ Data: []byte{1, 3, 5, 7},
+ }
+
+ di := NewDigestItem()
+ err := di.Set(d)
require.NoError(t, err)
- require.Equal(t, 3, len(*digest))
- enc, err := digest.Encode()
+ enc, err := scale.Marshal(di)
+ require.NoError(t, err)
+
+ require.Equal(t, exp, enc)
+
+ v := NewDigestItem()
+ err = scale.Unmarshal(enc, &v)
+ require.NoError(t, err)
+
+ require.Equal(t, di.Value(), v.Value())
+}
+
+func TestDecodeDigest(t *testing.T) {
+ d := common.MustHexToBytes("0x0c0642414245340201000000ef55a50f00000000044241424549040118ca239392960473fe1bc65f94ee27d890a49c1b200c006ff5dcc525330ecc16770100000000000000b46f01874ce7abbb5220e8fd89bede0adad14c73039d91e28e881823433e723f0100000000000000d684d9176d6eb69887540c9a89fa6097adea82fc4b0ff26d1062b488f352e179010000000000000068195a71bdde49117a616424bdc60a1733e96acb1da5aeab5d268cf2a572e94101000000000000001a0575ef4ae24bdfd31f4cb5bd61239ae67c12d4e64ae51ac756044aa6ad8200010000000000000018168f2aad0081a25728961ee00627cfe35e39833c805016632bf7c14da5800901000000000000000000000000000000000000000000000000000000000000000000000000000000054241424501014625284883e564bc1e4063f5ea2b49846cdddaa3761d04f543b698c1c3ee935c40d25b869247c36c6b8a8cbbd7bb2768f560ab7c276df3c62df357a7e3b1ec8d")
+
+ v := NewDigest()
+ err := scale.Unmarshal(d, &v)
+ require.NoError(t, err)
+ require.Equal(t, 3, len(v.Types))
+
+ enc, err := scale.Marshal(v)
require.NoError(t, err)
require.Equal(t, d, enc)
}
func TestChangesTrieRootDigest(t *testing.T) {
- d := &ChangesTrieRootDigest{
+ exp := common.MustHexToBytes("0x02005b3219d65e772447d8219855b822783da1a4df4c3528f64c26ebcc2b1fb31c")
+ d := ChangesTrieRootDigest{
Hash: common.Hash{0, 91, 50, 25, 214, 94, 119, 36, 71, 216, 33, 152, 85, 184, 34, 120, 61, 161, 164, 223, 76, 53, 40, 246, 76, 38, 235, 204, 43, 31, 179, 28},
}
- enc, err := d.Encode()
+ di := NewDigestItem()
+ err := di.Set(d)
require.NoError(t, err)
- r := &bytes.Buffer{}
- _, _ = r.Write(enc)
+ enc, err := scale.Marshal(di)
+ require.NoError(t, err)
+
+ require.Equal(t, exp, enc)
- d2, err := DecodeDigestItem(r)
+ v := NewDigestItem()
+ err = scale.Unmarshal(enc, &v)
require.NoError(t, err)
- require.Equal(t, d, d2)
+
+ require.Equal(t, di.Value(), v.Value())
}
func TestPreRuntimeDigest(t *testing.T) {
- d := &PreRuntimeDigest{
+ exp := common.MustHexToBytes("0x06424142451001030507")
+ d := PreRuntimeDigest{
ConsensusEngineID: BabeEngineID,
Data: []byte{1, 3, 5, 7},
}
- enc, err := d.Encode()
+ di := NewDigestItem()
+ err := di.Set(d)
require.NoError(t, err)
- r := &bytes.Buffer{}
- _, _ = r.Write(enc)
+ enc, err := scale.Marshal(di)
+ require.NoError(t, err)
- d2, err := DecodeDigestItem(r)
+ require.Equal(t, exp, enc)
+
+ v := NewDigestItem()
+ err = scale.Unmarshal(enc, &v)
require.NoError(t, err)
- require.Equal(t, d, d2)
+
+ require.Equal(t, di.Value(), v.Value())
}
func TestConsensusDigest(t *testing.T) {
- d := &ConsensusDigest{
+ exp := common.MustHexToBytes("0x04424142451001030507")
+ d := ConsensusDigest{
ConsensusEngineID: BabeEngineID,
Data: []byte{1, 3, 5, 7},
}
- enc, err := d.Encode()
+ di := NewDigestItem()
+ err := di.Set(d)
require.NoError(t, err)
- r := &bytes.Buffer{}
- _, _ = r.Write(enc)
+ enc, err := scale.Marshal(di)
+ require.NoError(t, err)
+
+ require.Equal(t, exp, enc)
- d2, err := DecodeDigestItem(r)
+ v := NewDigestItem()
+ err = scale.Unmarshal(enc, &v)
require.NoError(t, err)
- require.Equal(t, d, d2)
+
+ require.Equal(t, di.Value(), v.Value())
}
func TestSealDigest(t *testing.T) {
- d := &SealDigest{
+ exp := common.MustHexToBytes("0x05424142451001030507")
+ d := SealDigest{
ConsensusEngineID: BabeEngineID,
Data: []byte{1, 3, 5, 7},
}
- enc, err := d.Encode()
+ di := NewDigestItem()
+ err := di.Set(d)
require.NoError(t, err)
- r := &bytes.Buffer{}
- _, _ = r.Write(enc)
+ enc, err := scale.Marshal(di)
+ require.NoError(t, err)
- d2, err := DecodeDigestItem(r)
+ require.Equal(t, exp, enc)
+
+ v := NewDigestItem()
+ err = scale.Unmarshal(enc, &v)
require.NoError(t, err)
- require.Equal(t, d, d2)
+
+ require.Equal(t, di.Value(), v.Value())
}
diff --git a/dot/types/extrinsic.go b/dot/types/extrinsic.go
index 524ffbc73b..d575766fde 100644
--- a/dot/types/extrinsic.go
+++ b/dot/types/extrinsic.go
@@ -75,7 +75,7 @@ func ExtrinsicsArrayToBytesArray(exts []Extrinsic) [][]byte {
func BytesArrayToExtrinsics(b [][]byte) []Extrinsic {
exts := make([]Extrinsic, len(b))
for i, be := range b {
- exts[i] = Extrinsic(be)
+ exts[i] = be
}
return exts
}
diff --git a/dot/types/grandpa.go b/dot/types/grandpa.go
index 0a17f0bbb0..26cfc555e5 100644
--- a/dot/types/grandpa.go
+++ b/dot/types/grandpa.go
@@ -53,7 +53,7 @@ func (a *GrandpaAuthoritiesRaw) Decode(r io.Reader) (*GrandpaAuthoritiesRaw, err
// FromRawEd25519 sets the Authority given GrandpaAuthoritiesRaw. It converts the byte representations of
// the authority public keys into a ed25519.PublicKey.
-func (a *Authority) FromRawEd25519(raw *GrandpaAuthoritiesRaw) error {
+func (a *Authority) FromRawEd25519(raw GrandpaAuthoritiesRaw) error {
key, err := ed25519.NewPublicKey(raw.Key[:])
if err != nil {
return err
@@ -65,10 +65,10 @@ func (a *Authority) FromRawEd25519(raw *GrandpaAuthoritiesRaw) error {
}
// GrandpaAuthoritiesRawToAuthorities turns a slice of GrandpaAuthoritiesRaw into a slice of Authority
-func GrandpaAuthoritiesRawToAuthorities(adr []*GrandpaAuthoritiesRaw) ([]*Authority, error) {
- ad := make([]*Authority, len(adr))
+func GrandpaAuthoritiesRawToAuthorities(adr []GrandpaAuthoritiesRaw) ([]Authority, error) {
+ ad := make([]Authority, len(adr))
for i, r := range adr {
- ad[i] = new(Authority)
+ ad[i] = Authority{}
err := ad[i].FromRawEd25519(r)
if err != nil {
return nil, err
@@ -117,12 +117,12 @@ func (v *GrandpaVoter) Decode(r io.Reader) error {
}
// NewGrandpaVotersFromAuthorities returns an array of GrandpaVoters given an array of GrandpaAuthorities
-func NewGrandpaVotersFromAuthorities(ad []*Authority) []*GrandpaVoter {
- v := make([]*GrandpaVoter, len(ad))
+func NewGrandpaVotersFromAuthorities(ad []Authority) []GrandpaVoter {
+ v := make([]GrandpaVoter, len(ad))
for i, d := range ad {
if pk, ok := d.Key.(*ed25519.PublicKey); ok {
- v[i] = &GrandpaVoter{
+ v[i] = GrandpaVoter{
Key: pk,
ID: d.Weight,
}
@@ -133,8 +133,8 @@ func NewGrandpaVotersFromAuthorities(ad []*Authority) []*GrandpaVoter {
}
// NewGrandpaVotersFromAuthoritiesRaw returns an array of GrandpaVoters given an array of GrandpaAuthoritiesRaw
-func NewGrandpaVotersFromAuthoritiesRaw(ad []*GrandpaAuthoritiesRaw) ([]*GrandpaVoter, error) {
- v := make([]*GrandpaVoter, len(ad))
+func NewGrandpaVotersFromAuthoritiesRaw(ad []GrandpaAuthoritiesRaw) ([]GrandpaVoter, error) {
+ v := make([]GrandpaVoter, len(ad))
for i, d := range ad {
key, err := ed25519.NewPublicKey(d.Key[:])
@@ -142,7 +142,7 @@ func NewGrandpaVotersFromAuthoritiesRaw(ad []*GrandpaAuthoritiesRaw) ([]*Grandpa
return nil, err
}
- v[i] = &GrandpaVoter{
+ v[i] = GrandpaVoter{
Key: key,
ID: d.ID,
}
@@ -152,7 +152,7 @@ func NewGrandpaVotersFromAuthoritiesRaw(ad []*GrandpaAuthoritiesRaw) ([]*Grandpa
}
// GrandpaVoters represents []*GrandpaVoter
-type GrandpaVoters []*GrandpaVoter
+type GrandpaVoters []GrandpaVoter
// String returns a formatted Voters string
func (v GrandpaVoters) String() string {
@@ -171,9 +171,9 @@ func DecodeGrandpaVoters(r io.Reader) (GrandpaVoters, error) {
return nil, err
}
- voters := make([]*GrandpaVoter, length)
+ voters := make([]GrandpaVoter, length)
for i := range voters {
- voters[i] = new(GrandpaVoter)
+ voters[i] = GrandpaVoter{}
err = voters[i].Decode(r)
if err != nil {
return nil, err
@@ -185,7 +185,7 @@ func DecodeGrandpaVoters(r io.Reader) (GrandpaVoters, error) {
// FinalisationInfo represents information about what block was finalised in what round and setID
type FinalisationInfo struct {
- Header *Header
+ Header Header
Round uint64
SetID uint64
}
diff --git a/dot/types/header.go b/dot/types/header.go
index 1cb3da31b8..3cdf9dbaa7 100644
--- a/dot/types/header.go
+++ b/dot/types/header.go
@@ -19,26 +19,24 @@ package types
import (
"errors"
"fmt"
- "io"
"math/big"
"github.com/ChainSafe/gossamer/lib/common"
- "github.com/ChainSafe/gossamer/lib/common/optional"
- "github.com/ChainSafe/gossamer/lib/scale"
+ "github.com/ChainSafe/gossamer/pkg/scale"
)
// Header is a state block header
type Header struct {
- ParentHash common.Hash `json:"parentHash"`
- Number *big.Int `json:"number"`
- StateRoot common.Hash `json:"stateRoot"`
- ExtrinsicsRoot common.Hash `json:"extrinsicsRoot"`
- Digest Digest `json:"digest"`
+ ParentHash common.Hash `json:"parentHash"`
+ Number *big.Int `json:"number"`
+ StateRoot common.Hash `json:"stateRoot"`
+ ExtrinsicsRoot common.Hash `json:"extrinsicsRoot"`
+ Digest scale.VaryingDataTypeSlice `json:"digest"`
hash common.Hash
}
// NewHeader creates a new block header and sets its hash field
-func NewHeader(parentHash, stateRoot, extrinsicsRoot common.Hash, number *big.Int, digest []DigestItem) (*Header, error) {
+func NewHeader(parentHash, stateRoot, extrinsicsRoot common.Hash, number *big.Int, digest scale.VaryingDataTypeSlice) (*Header, error) {
if number == nil {
// Hash() will panic if number is nil
return nil, errors.New("cannot have nil block number")
@@ -60,12 +58,26 @@ func NewHeader(parentHash, stateRoot, extrinsicsRoot common.Hash, number *big.In
func NewEmptyHeader() *Header {
return &Header{
Number: big.NewInt(0),
- Digest: []DigestItem{},
+ Digest: NewDigest(),
}
}
+// Exists returns a boolean indicating if the header exists
+func (bh *Header) Exists() bool {
+ exists := bh != nil
+ return exists
+}
+
+// Empty returns a boolean indicating is the header is empty
+func (bh *Header) Empty() bool {
+ if !bh.StateRoot.Equal(common.Hash{}) || !bh.ExtrinsicsRoot.Equal(common.Hash{}) || !bh.ParentHash.Equal(common.Hash{}) {
+ return false
+ }
+ return (bh.Number.Cmp(big.NewInt(0)) == 0 || bh.Number == nil) && len(bh.Digest.Types) == 0
+}
+
// DeepCopy returns a deep copy of the header to prevent side effects down the road
-func (bh *Header) DeepCopy() *Header {
+func (bh *Header) DeepCopy() (*Header, error) {
cp := NewEmptyHeader()
copy(cp.ParentHash[:], bh.ParentHash[:])
copy(cp.StateRoot[:], bh.StateRoot[:])
@@ -75,12 +87,17 @@ func (bh *Header) DeepCopy() *Header {
cp.Number = new(big.Int).Set(bh.Number)
}
- if len(bh.Digest) > 0 {
- cp.Digest = make([]DigestItem, len(bh.Digest))
- copy(cp.Digest[:], bh.Digest[:])
+ if len(bh.Digest.Types) > 0 {
+ cp.Digest = NewDigest()
+ for _, d := range bh.Digest.Types {
+ err := cp.Digest.Add(d.Value())
+ if err != nil {
+ return nil, err
+ }
+ }
}
- return cp
+ return cp, nil
}
// String returns the formatted header as a string
@@ -94,7 +111,7 @@ func (bh *Header) String() string {
// If hashing the header errors, this will panic.
func (bh *Header) Hash() common.Hash {
if bh.hash == [32]byte{} {
- enc, err := scale.Encode(bh)
+ enc, err := scale.Marshal(*bh)
if err != nil {
panic(err)
}
@@ -109,119 +126,3 @@ func (bh *Header) Hash() common.Hash {
return bh.hash
}
-
-// Encode returns the SCALE encoding of a header
-func (bh *Header) Encode() ([]byte, error) {
- return scale.Encode(bh)
-}
-
-// MustEncode returns the SCALE encoded header and panics if it fails to encode
-func (bh *Header) MustEncode() []byte {
- enc, err := bh.Encode()
- if err != nil {
- panic(err)
- }
- return enc
-}
-
-// Decode decodes the SCALE encoded input into this header
-func (bh *Header) Decode(r io.Reader) (*Header, error) {
- sd := scale.Decoder{Reader: r}
-
- ph, err := sd.Decode(common.Hash{})
- if err != nil {
- return nil, err
- }
-
- num, err := sd.Decode(big.NewInt(0))
- if err != nil {
- return nil, err
- }
-
- sr, err := sd.Decode(common.Hash{})
- if err != nil {
- return nil, err
- }
-
- er, err := sd.Decode(common.Hash{})
- if err != nil {
- return nil, err
- }
-
- d, err := DecodeDigest(r)
- if err != nil {
- return nil, err
- }
-
- bh.ParentHash = ph.(common.Hash)
- bh.Number = num.(*big.Int)
- bh.StateRoot = sr.(common.Hash)
- bh.ExtrinsicsRoot = er.(common.Hash)
- bh.Digest = d
- return bh, nil
-}
-
-// AsOptional returns the Header as an optional.Header
-func (bh *Header) AsOptional() *optional.Header {
- return optional.NewHeader(true, &optional.CoreHeader{
- ParentHash: bh.ParentHash,
- Number: bh.Number,
- StateRoot: bh.StateRoot,
- ExtrinsicsRoot: bh.ExtrinsicsRoot,
- Digest: &bh.Digest,
- })
-}
-
-// NewHeaderFromOptional returns a Header given an optional.Header. If the optional.Header is None, an error is returned.
-func NewHeaderFromOptional(oh *optional.Header) (*Header, error) {
- if !oh.Exists() {
- return nil, errors.New("header is None")
- }
-
- h := oh.Value()
-
- if h.Number == nil {
- // Hash() will panic if number is nil
- return nil, errors.New("cannot have nil block number")
- }
-
- bh := &Header{
- ParentHash: h.ParentHash,
- Number: h.Number,
- StateRoot: h.StateRoot,
- ExtrinsicsRoot: h.ExtrinsicsRoot,
- Digest: *(h.Digest.(*Digest)),
- }
-
- bh.Hash()
- return bh, nil
-}
-
-// decodeOptionalHeader decodes a SCALE encoded optional Header into an *optional.Header
-func decodeOptionalHeader(r io.Reader) (*optional.Header, error) {
- sd := scale.Decoder{Reader: r}
-
- exists, err := common.ReadByte(r)
- if err != nil {
- return nil, err
- }
-
- if exists == 1 {
- header := &Header{
- ParentHash: common.Hash{},
- Number: big.NewInt(0),
- StateRoot: common.Hash{},
- ExtrinsicsRoot: common.Hash{},
- Digest: Digest{},
- }
- _, err = sd.Decode(header)
- if err != nil {
- return nil, err
- }
-
- header.Hash()
- return header.AsOptional(), nil
- }
-
- return optional.NewHeader(false, nil), nil
-}
diff --git a/dot/types/header_test.go b/dot/types/header_test.go
index baf1909c17..adaa2cdd18 100644
--- a/dot/types/header_test.go
+++ b/dot/types/header_test.go
@@ -17,74 +17,105 @@
package types
import (
- "bytes"
"math/big"
"testing"
"github.com/ChainSafe/gossamer/lib/common"
+ "github.com/ChainSafe/gossamer/pkg/scale"
"github.com/stretchr/testify/require"
)
-func TestDecodeHeader(t *testing.T) {
- header, err := NewHeader(common.Hash{}, common.Hash{}, common.Hash{}, big.NewInt(0), Digest{})
+func TestEmptyHeader(t *testing.T) {
+ head := NewEmptyHeader()
+ isEmpty := head.Empty()
+ require.True(t, isEmpty)
+
+ head.Number = big.NewInt(21)
+ isEmpty = head.Empty()
+ require.False(t, isEmpty)
+
+ vdts := NewDigest()
+ err := vdts.Add(
+ PreRuntimeDigest{
+ ConsensusEngineID: BabeEngineID,
+ Data: common.MustHexToBytes("0x0201000000ef55a50f00000000"),
+ },
+ )
require.NoError(t, err)
- enc, err := header.Encode()
+ head2, err := NewHeader(common.Hash{}, common.Hash{}, common.Hash{}, big.NewInt(0), vdts)
require.NoError(t, err)
- rw := &bytes.Buffer{}
- rw.Write(enc)
- dec, err := new(Header).Decode(rw)
+ isEmpty = head2.Empty()
+ require.False(t, isEmpty)
+
+ head3, err := NewHeader(common.Hash{}, common.Hash{}, common.Hash{}, big.NewInt(21), vdts)
require.NoError(t, err)
- dec.Hash()
- require.Equal(t, header, dec)
+
+ isEmpty = head3.Empty()
+ require.False(t, isEmpty)
}
-func TestMustEncodeHeader(t *testing.T) {
- bh1, err := NewHeader(common.Hash{}, common.Hash{}, common.Hash{}, big.NewInt(0), Digest{})
- require.NoError(t, err)
- enc, err := bh1.Encode()
+func TestEncodeAndDecodeHeader(t *testing.T) {
+ expected, err := common.HexToBytes("0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0642414245340201000000ef55a50f00000000044241424549040118ca239392960473fe1bc65f94ee27d890a49c1b200c006ff5dcc525330ecc16770100000000000000b46f01874ce7abbb5220e8fd89bede0adad14c73039d91e28e881823433e723f0100000000000000d684d9176d6eb69887540c9a89fa6097adea82fc4b0ff26d1062b488f352e179010000000000000068195a71bdde49117a616424bdc60a1733e96acb1da5aeab5d268cf2a572e94101000000000000001a0575ef4ae24bdfd31f4cb5bd61239ae67c12d4e64ae51ac756044aa6ad8200010000000000000018168f2aad0081a25728961ee00627cfe35e39833c805016632bf7c14da5800901000000000000000000000000000000000000000000000000000000000000000000000000000000054241424501014625284883e564bc1e4063f5ea2b49846cdddaa3761d04f543b698c1c3ee935c40d25b869247c36c6b8a8cbbd7bb2768f560ab7c276df3c62df357a7e3b1ec8d")
require.NoError(t, err)
- testDigest := Digest{
- &PreRuntimeDigest{
+ vdts := NewDigest()
+ err = vdts.Add(
+ PreRuntimeDigest{
+ ConsensusEngineID: BabeEngineID,
+ Data: common.MustHexToBytes("0x0201000000ef55a50f00000000"),
+ },
+ ConsensusDigest{
ConsensusEngineID: BabeEngineID,
- Data: []byte{1, 2, 3},
+ Data: common.MustHexToBytes("0x0118ca239392960473fe1bc65f94ee27d890a49c1b200c006ff5dcc525330ecc16770100000000000000b46f01874ce7abbb5220e8fd89bede0adad14c73039d91e28e881823433e723f0100000000000000d684d9176d6eb69887540c9a89fa6097adea82fc4b0ff26d1062b488f352e179010000000000000068195a71bdde49117a616424bdc60a1733e96acb1da5aeab5d268cf2a572e94101000000000000001a0575ef4ae24bdfd31f4cb5bd61239ae67c12d4e64ae51ac756044aa6ad8200010000000000000018168f2aad0081a25728961ee00627cfe35e39833c805016632bf7c14da5800901000000000000000000000000000000000000000000000000000000000000000000000000000000"),
},
- &SealDigest{
+ SealDigest{
ConsensusEngineID: BabeEngineID,
- Data: []byte{4, 5, 6, 7},
+ Data: common.MustHexToBytes("0x4625284883e564bc1e4063f5ea2b49846cdddaa3761d04f543b698c1c3ee935c40d25b869247c36c6b8a8cbbd7bb2768f560ab7c276df3c62df357a7e3b1ec8d"),
},
- }
+ )
+ require.NoError(t, err)
+
+ headerVdt, err := NewHeader(common.Hash{}, common.Hash{}, common.Hash{}, big.NewInt(0), vdts)
+ require.NoError(t, err)
- bh2, err := NewHeader(common.Hash{}, common.Hash{}, common.Hash{}, big.NewInt(0), testDigest)
+ encVdt, err := scale.Marshal(*headerVdt)
require.NoError(t, err)
- enc2, err := bh2.Encode()
+
+ require.Equal(t, expected, encVdt)
+
+ var decVdt = NewEmptyHeader()
+ err = scale.Unmarshal(encVdt, decVdt)
require.NoError(t, err)
+ decVdt.Hash()
+ require.Equal(t, headerVdt, decVdt)
+}
- tests := []struct {
- name string
- take *Header
- want []byte
- }{
- {
- name: "correct",
- take: bh1,
- want: enc,
+func TestHeaderDeepCopy(t *testing.T) {
+ vdts := NewDigest()
+ err := vdts.Add(
+ PreRuntimeDigest{
+ ConsensusEngineID: BabeEngineID,
+ Data: common.MustHexToBytes("0x0201000000ef55a50f00000000"),
+ },
+ ConsensusDigest{
+ ConsensusEngineID: BabeEngineID,
+ Data: common.MustHexToBytes("0x0118ca239392960473fe1bc65f94ee27d890a49c1b200c006ff5dcc525330ecc16770100000000000000b46f01874ce7abbb5220e8fd89bede0adad14c73039d91e28e881823433e723f0100000000000000d684d9176d6eb69887540c9a89fa6097adea82fc4b0ff26d1062b488f352e179010000000000000068195a71bdde49117a616424bdc60a1733e96acb1da5aeab5d268cf2a572e94101000000000000001a0575ef4ae24bdfd31f4cb5bd61239ae67c12d4e64ae51ac756044aa6ad8200010000000000000018168f2aad0081a25728961ee00627cfe35e39833c805016632bf7c14da5800901000000000000000000000000000000000000000000000000000000000000000000000000000000"),
},
- {
- name: "correct2",
- take: bh2,
- want: enc2,
+ SealDigest{
+ ConsensusEngineID: BabeEngineID,
+ Data: common.MustHexToBytes("0x4625284883e564bc1e4063f5ea2b49846cdddaa3761d04f543b698c1c3ee935c40d25b869247c36c6b8a8cbbd7bb2768f560ab7c276df3c62df357a7e3b1ec8d"),
},
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- if got := tt.take.MustEncode(); !bytes.Equal(got, tt.want) {
- t.Errorf("MustEncode() = %v, want %v", got, tt.want)
- }
- })
- }
+ )
+ require.NoError(t, err)
+
+ header, err := NewHeader(common.Hash{}, common.Hash{}, common.Hash{}, big.NewInt(1), vdts)
+ require.NoError(t, err)
+
+ dc, err := header.DeepCopy()
+ require.NoError(t, err)
+ dc.Hash()
+ require.Equal(t, header, dc)
}
diff --git a/dot/types/mocks/digest_item.go b/dot/types/mocks/digest_item.go
deleted file mode 100644
index 66d1d04a10..0000000000
--- a/dot/types/mocks/digest_item.go
+++ /dev/null
@@ -1,79 +0,0 @@
-// Code generated by mockery v2.8.0. DO NOT EDIT.
-
-package types
-
-import (
- io "io"
-
- mock "github.com/stretchr/testify/mock"
-)
-
-// MockDigestItem is an autogenerated mock type for the DigestItem type
-type MockDigestItem struct {
- mock.Mock
-}
-
-// Decode provides a mock function with given fields: _a0
-func (_m *MockDigestItem) Decode(_a0 io.Reader) error {
- ret := _m.Called(_a0)
-
- var r0 error
- if rf, ok := ret.Get(0).(func(io.Reader) error); ok {
- r0 = rf(_a0)
- } else {
- r0 = ret.Error(0)
- }
-
- return r0
-}
-
-// Encode provides a mock function with given fields:
-func (_m *MockDigestItem) Encode() ([]byte, error) {
- ret := _m.Called()
-
- var r0 []byte
- if rf, ok := ret.Get(0).(func() []byte); ok {
- r0 = rf()
- } else {
- if ret.Get(0) != nil {
- r0 = ret.Get(0).([]byte)
- }
- }
-
- var r1 error
- if rf, ok := ret.Get(1).(func() error); ok {
- r1 = rf()
- } else {
- r1 = ret.Error(1)
- }
-
- return r0, r1
-}
-
-// String provides a mock function with given fields:
-func (_m *MockDigestItem) String() string {
- ret := _m.Called()
-
- var r0 string
- if rf, ok := ret.Get(0).(func() string); ok {
- r0 = rf()
- } else {
- r0 = ret.Get(0).(string)
- }
-
- return r0
-}
-
-// Type provides a mock function with given fields:
-func (_m *MockDigestItem) Type() byte {
- ret := _m.Called()
-
- var r0 byte
- if rf, ok := ret.Get(0).(func() byte); ok {
- r0 = rf()
- } else {
- r0 = ret.Get(0).(byte)
- }
-
- return r0
-}
diff --git a/lib/babe/babe.go b/lib/babe/babe.go
index c8327b00c2..ab5c8331ad 100644
--- a/lib/babe/babe.go
+++ b/lib/babe/babe.go
@@ -76,7 +76,7 @@ type ServiceConfig struct {
BlockImportHandler BlockImportHandler
Keypair *sr25519.Keypair
Runtime runtime.Instance
- AuthData []*types.Authority
+ AuthData []types.Authority
IsDev bool
ThresholdNumerator uint64 // for development purposes
ThresholdDenominator uint64 // for development purposes
@@ -303,7 +303,7 @@ func (b *Service) Stop() error {
}
// Authorities returns the current BABE authorities
-func (b *Service) Authorities() []*types.Authority {
+func (b *Service) Authorities() []types.Authority {
return b.epochData.authorities
}
@@ -312,7 +312,7 @@ func (b *Service) IsStopped() bool {
return b.ctx.Err() != nil
}
-func (b *Service) getAuthorityIndex(Authorities []*types.Authority) (uint32, error) {
+func (b *Service) getAuthorityIndex(Authorities []types.Authority) (uint32, error) {
if !b.authority {
return 0, ErrNotAuthority
}
@@ -484,7 +484,10 @@ func (b *Service) handleSlot(epoch, slotNum uint64) error {
// there is a chance that the best block header may change in the course of building the block,
// so let's copy it first.
- parent := parentHeader.DeepCopy()
+ parent, err := parentHeader.DeepCopy()
+ if err != nil {
+ return err
+ }
currentSlot := Slot{
start: time.Now(),
diff --git a/lib/babe/babe_test.go b/lib/babe/babe_test.go
index bacab21429..a8649eaf8e 100644
--- a/lib/babe/babe_test.go
+++ b/lib/babe/babe_test.go
@@ -52,6 +52,7 @@ var (
genesisHeader *types.Header
emptyHeader = &types.Header{
Number: big.NewInt(0),
+ Digest: types.NewDigest(),
}
genesisBABEConfig = &types.BabeConfiguration{
@@ -59,7 +60,7 @@ var (
EpochLength: 200,
C1: 1,
C2: 4,
- GenesisAuthorities: []*types.AuthorityRaw{},
+ GenesisAuthorities: []types.AuthorityRaw{},
Randomness: [32]byte{},
SecondarySlots: 0,
}
@@ -70,6 +71,7 @@ func createTestService(t *testing.T, cfg *ServiceConfig) *Service {
gen, genTrie, genHeader := genesis.NewTestGenesisWithTrieAndHeader(t)
genesisHeader = genHeader
+
var err error
if cfg == nil {
@@ -87,11 +89,11 @@ func createTestService(t *testing.T, cfg *ServiceConfig) *Service {
}
if cfg.AuthData == nil {
- auth := &types.Authority{
+ auth := types.Authority{
Key: cfg.Keypair.Public().(*sr25519.PublicKey),
Weight: 1,
}
- cfg.AuthData = []*types.Authority{auth}
+ cfg.AuthData = []types.Authority{auth}
}
if cfg.TransactionState == nil {
@@ -321,7 +323,7 @@ func TestService_ProducesBlocks(t *testing.T) {
babeService := createTestService(t, nil)
babeService.epochData.authorityIndex = 0
- babeService.epochData.authorities = []*types.Authority{
+ babeService.epochData.authorities = []types.Authority{
{Key: nil, Weight: 1},
{Key: nil, Weight: 1},
{Key: nil, Weight: 1},
@@ -349,7 +351,7 @@ func TestService_GetAuthorityIndex(t *testing.T) {
pubA := kpA.Public().(*sr25519.PublicKey)
pubB := kpB.Public().(*sr25519.PublicKey)
- authData := []*types.Authority{
+ authData := []types.Authority{
{Key: pubA, Weight: 1},
{Key: pubB, Weight: 1},
}
diff --git a/lib/babe/build.go b/lib/babe/build.go
index b118501edb..5110665957 100644
--- a/lib/babe/build.go
+++ b/lib/babe/build.go
@@ -113,7 +113,12 @@ func (b *BlockBuilder) buildBlock(parent *types.Header, slot Slot, rt runtime.In
// create new block header
number := big.NewInt(0).Add(parent.Number, big.NewInt(1))
- header, err := types.NewHeader(parent.Hash(), common.Hash{}, common.Hash{}, number, types.NewDigest(preDigest))
+ digest := types.NewDigest()
+ err = digest.Add(*preDigest)
+ if err != nil {
+ return nil, err
+ }
+ header, err := types.NewHeader(parent.Hash(), common.Hash{}, common.Hash{}, number, digest)
if err != nil {
return nil, err
}
@@ -154,7 +159,10 @@ func (b *BlockBuilder) buildBlock(parent *types.Header, slot Slot, rt runtime.In
return nil, err
}
- header.Digest = append(header.Digest, seal)
+ err = header.Digest.Add(*seal)
+ if err != nil {
+ return nil, err
+ }
logger.Trace("built block seal")
@@ -164,8 +172,8 @@ func (b *BlockBuilder) buildBlock(parent *types.Header, slot Slot, rt runtime.In
}
block := &types.Block{
- Header: header,
- Body: body,
+ Header: *header,
+ Body: *body,
}
return block, nil
@@ -174,7 +182,7 @@ func (b *BlockBuilder) buildBlock(parent *types.Header, slot Slot, rt runtime.In
// buildBlockSeal creates the seal for the block header.
// the seal consists of the ConsensusEngineID and a signature of the encoded block header.
func (b *BlockBuilder) buildBlockSeal(header *types.Header) (*types.SealDigest, error) {
- encHeader, err := header.Encode()
+ encHeader, err := scale.Marshal(*header)
if err != nil {
return nil, err
}
diff --git a/lib/babe/build_test.go b/lib/babe/build_test.go
index 3affc2bc92..7977ddc081 100644
--- a/lib/babe/build_test.go
+++ b/lib/babe/build_test.go
@@ -59,10 +59,10 @@ func TestSeal(t *testing.T) {
zeroHash, err := common.HexToHash("0x00")
require.NoError(t, err)
- header, err := types.NewHeader(zeroHash, zeroHash, zeroHash, big.NewInt(0), types.Digest{})
+ header, err := types.NewHeader(zeroHash, zeroHash, zeroHash, big.NewInt(0), types.NewDigest())
require.NoError(t, err)
- encHeader, err := header.Encode()
+ encHeader, err := scale.Marshal(*header)
require.NoError(t, err)
hash, err := common.Blake2bHash(encHeader)
@@ -89,7 +89,6 @@ func createTestExtrinsic(t *testing.T, rt runtime.Instance, genHash common.Hash,
rawMeta, err := rt.Metadata()
require.NoError(t, err)
- //decoded, err := scale.Decode(rawMeta, []byte{})
var decoded []byte
err = scale.Unmarshal(rawMeta, &decoded)
require.NoError(t, err)
@@ -189,22 +188,22 @@ func TestBuildBlock_ok(t *testing.T) {
preDigest, err := builder.buildBlockPreDigest(slot)
require.NoError(t, err)
+ digest := types.NewDigest()
+ err = digest.Add(*preDigest)
+ require.NoError(t, err)
+
expectedBlockHeader := &types.Header{
ParentHash: emptyHeader.Hash(),
Number: big.NewInt(1),
- Digest: types.Digest{preDigest},
+ Digest: digest,
}
require.Equal(t, expectedBlockHeader.ParentHash, block.Header.ParentHash)
require.Equal(t, expectedBlockHeader.Number, block.Header.Number)
require.NotEqual(t, block.Header.StateRoot, emptyHash)
require.NotEqual(t, block.Header.ExtrinsicsRoot, emptyHash)
- require.Equal(t, 3, len(block.Header.Digest))
- require.Equal(t, preDigest, block.Header.Digest[0])
- require.Equal(t, types.PreRuntimeDigestType, block.Header.Digest[0].Type())
- require.Equal(t, types.ConsensusDigestType, block.Header.Digest[1].Type())
- require.Equal(t, types.SealDigestType, block.Header.Digest[2].Type())
- require.Equal(t, types.NextEpochDataType, block.Header.Digest[1].(*types.ConsensusDigest).DataType())
+ require.Equal(t, 3, len(block.Header.Digest.Types))
+ require.Equal(t, *preDigest, block.Header.Digest.Types[0].Value())
// confirm block body is correct
extsRes, err := block.Body.AsExtrinsics()
@@ -265,7 +264,11 @@ func TestApplyExtrinsic(t *testing.T) {
preDigest, err := builder.buildBlockPreDigest(slot)
require.NoError(t, err)
- header, err := types.NewHeader(parentHash, common.Hash{}, common.Hash{}, big.NewInt(1), types.NewDigest(preDigest))
+ digest := types.NewDigest()
+ err = digest.Add(*preDigest)
+ require.NoError(t, err)
+
+ header, err := types.NewHeader(parentHash, common.Hash{}, common.Hash{}, big.NewInt(1), digest)
require.NoError(t, err)
//initialise block header
@@ -282,7 +285,10 @@ func TestApplyExtrinsic(t *testing.T) {
_, err = rt.ValidateTransaction(append([]byte{byte(types.TxnExternal)}, ext...))
require.NoError(t, err)
- header2, err := types.NewHeader(header1.Hash(), common.Hash{}, common.Hash{}, big.NewInt(2), types.NewDigest(preDigest2))
+ digest2 := types.NewDigest()
+ err = digest2.Add(*preDigest2)
+ require.NoError(t, err)
+ header2, err := types.NewHeader(header1.Hash(), common.Hash{}, common.Hash{}, big.NewInt(2), digest2)
require.NoError(t, err)
err = rt.InitializeBlock(header2)
require.NoError(t, err)
@@ -311,7 +317,7 @@ func TestBuildAndApplyExtrinsic(t *testing.T) {
babeService.epochData.threshold = maxThreshold
parentHash := common.MustHexToHash("0x35a28a7dbaf0ba07d1485b0f3da7757e3880509edc8c31d0850cb6dd6219361d")
- header, err := types.NewHeader(parentHash, common.Hash{}, common.Hash{}, big.NewInt(1), types.NewEmptyDigest())
+ header, err := types.NewHeader(parentHash, common.Hash{}, common.Hash{}, big.NewInt(1), types.NewDigest())
require.NoError(t, err)
rt, err := babeService.blockState.GetRuntime(nil)
@@ -386,7 +392,7 @@ func TestBuildBlock_failing(t *testing.T) {
var err error
babeService := createTestService(t, cfg)
- babeService.epochData.authorities = []*types.Authority{
+ babeService.epochData.authorities = []types.Authority{
{Key: nil, Weight: 1},
}
diff --git a/lib/babe/epoch_test.go b/lib/babe/epoch_test.go
index a0ba5e091b..45cb9313e8 100644
--- a/lib/babe/epoch_test.go
+++ b/lib/babe/epoch_test.go
@@ -62,14 +62,14 @@ func TestInitiateEpoch_Epoch1(t *testing.T) {
// epoch 1, check that genesis EpochData and ConfigData was properly set
threshold := bs.epochData.threshold
- auth := &types.Authority{
+ auth := types.Authority{
Key: bs.keypair.Public().(*sr25519.PublicKey),
Weight: 1,
}
data, err := bs.epochState.GetEpochData(0)
require.NoError(t, err)
- data.Authorities = []*types.Authority{auth}
+ data.Authorities = []types.Authority{auth}
err = bs.epochState.SetEpochData(1, data)
require.NoError(t, err)
@@ -78,7 +78,7 @@ func TestInitiateEpoch_Epoch1(t *testing.T) {
expected := &epochData{
randomness: genesisBABEConfig.Randomness,
- authorities: []*types.Authority{auth},
+ authorities: []types.Authority{auth},
authorityIndex: 0,
threshold: threshold,
}
diff --git a/lib/babe/median_test.go b/lib/babe/median_test.go
index 7075cdfa79..2a1c51f021 100644
--- a/lib/babe/median_test.go
+++ b/lib/babe/median_test.go
@@ -98,13 +98,16 @@ func addBlocksToState(t *testing.T, babeService *Service, depth int, blockState
predigest, err := builder.buildBlockPreDigest(slot)
require.NoError(t, err)
+ digest := types.NewDigest()
+ err = digest.Add(*predigest)
+ require.NoError(t, err)
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: previousHash,
Number: big.NewInt(int64(i)),
- Digest: types.Digest{predigest},
+ Digest: digest,
},
- Body: &types.Body{},
+ Body: types.Body{},
}
arrivalTime := previousAT.Add(duration)
@@ -164,13 +167,16 @@ func TestEstimateCurrentSlot(t *testing.T) {
predigest, err := builder.buildBlockPreDigest(slot)
require.NoError(t, err)
+ digest := types.NewDigest()
+ err = digest.Add(predigest)
+ require.NoError(t, err)
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: genesisHeader.Hash(),
Number: big.NewInt(int64(1)),
- Digest: types.Digest{predigest},
+ Digest: digest,
},
- Body: &types.Body{},
+ Body: types.Body{},
}
arrivalTime := time.Now().UnixNano() - slot.duration.Nanoseconds()
diff --git a/lib/babe/types.go b/lib/babe/types.go
index 077f4fd079..a63c28b733 100644
--- a/lib/babe/types.go
+++ b/lib/babe/types.go
@@ -52,12 +52,12 @@ func NewSlot(start time.Time, duration time.Duration, number uint64) *Slot {
}
// Authorities is an alias for []*types.Authority
-type Authorities []*types.Authority
+type Authorities []types.Authority
// String returns the Authorities as a formatted string
func (d Authorities) String() string {
str := ""
- for _, di := range []*types.Authority(d) {
+ for _, di := range []types.Authority(d) {
str = str + fmt.Sprintf("[key=0x%x weight=%d] ", di.Key.Encode(), di.Weight)
}
return str
@@ -67,7 +67,7 @@ func (d Authorities) String() string {
type epochData struct {
randomness Randomness
authorityIndex uint32
- authorities []*types.Authority
+ authorities []types.Authority
threshold *common.Uint128
}
diff --git a/lib/babe/verify.go b/lib/babe/verify.go
index 7431a042df..a02b7954e3 100644
--- a/lib/babe/verify.go
+++ b/lib/babe/verify.go
@@ -26,12 +26,13 @@ import (
"github.com/ChainSafe/gossamer/dot/types"
"github.com/ChainSafe/gossamer/lib/common"
"github.com/ChainSafe/gossamer/lib/crypto/sr25519"
+ "github.com/ChainSafe/gossamer/pkg/scale"
)
// verifierInfo contains the information needed to verify blocks
// it remains the same for an epoch
type verifierInfo struct {
- authorities []*types.Authority
+ authorities []types.Authority
randomness Randomness
threshold *common.Uint128
}
@@ -249,7 +250,7 @@ func (v *VerificationManager) getConfigData(epoch uint64) (*types.ConfigData, er
type verifier struct {
blockState BlockState
epoch uint64
- authorities []*types.Authority
+ authorities []types.Authority
randomness Randomness
threshold *common.Uint128
}
@@ -273,27 +274,27 @@ func newVerifier(blockState BlockState, epoch uint64, info *verifierInfo) (*veri
func (b *verifier) verifyAuthorshipRight(header *types.Header) error {
// header should have 2 digest items (possibly more in the future)
// first item should be pre-digest, second should be seal
- if len(header.Digest) < 2 {
+ if len(header.Digest.Types) < 2 {
return fmt.Errorf("block header is missing digest items")
}
logger.Trace("beginning BABE authorship right verification", "block", header.Hash())
// check for valid seal by verifying signature
- preDigestItem := header.Digest[0]
- sealItem := header.Digest[len(header.Digest)-1]
+ preDigestItem := header.Digest.Types[0]
+ sealItem := header.Digest.Types[len(header.Digest.Types)-1]
- preDigest, ok := preDigestItem.(*types.PreRuntimeDigest)
+ preDigest, ok := preDigestItem.Value().(types.PreRuntimeDigest)
if !ok {
return fmt.Errorf("first digest item is not pre-digest")
}
- seal, ok := sealItem.(*types.SealDigest)
+ seal, ok := sealItem.Value().(types.SealDigest)
if !ok {
- return fmt.Errorf("last digest item is not seal")
+ return fmt.Errorf("first digest item is not pre-digest")
}
- babePreDigest, err := b.verifyPreRuntimeDigest(preDigest)
+ babePreDigest, err := b.verifyPreRuntimeDigest(&preDigest)
if err != nil {
return fmt.Errorf("failed to verify pre-runtime digest: %w", err)
}
@@ -303,12 +304,21 @@ func (b *verifier) verifyAuthorshipRight(header *types.Header) error {
authorPub := b.authorities[babePreDigest.AuthorityIndex()].Key
// remove seal before verifying signature
- header.Digest = header.Digest[:len(header.Digest)-1]
+ h := types.NewDigest()
+ for _, val := range header.Digest.Types[:len(header.Digest.Types)-1] {
+ err = h.Add(val.Value())
+ if err != nil {
+ return err
+ }
+ }
+
+ header.Digest = h
defer func() {
- header.Digest = append(header.Digest, sealItem)
+ err = header.Digest.Add(sealItem.Value())
+ logger.Error("Error adding item to digest", "error", err)
}()
- encHeader, err := header.Encode()
+ encHeader, err := scale.Marshal(*header)
if err != nil {
return err
}
@@ -436,13 +446,11 @@ func (b *verifier) verifyPrimarySlotWinner(authorityIndex uint32, slot uint64, v
}
func getAuthorityIndex(header *types.Header) (uint32, error) {
- if len(header.Digest) == 0 {
+ if len(header.Digest.Types) == 0 {
return 0, fmt.Errorf("no digest provided")
}
- digestItem := header.Digest[0]
-
- preDigest, ok := digestItem.(*types.PreRuntimeDigest)
+ preDigest, ok := header.Digest.Types[0].Value().(types.PreRuntimeDigest)
if !ok {
return 0, fmt.Errorf("first digest item is not pre-runtime digest")
}
diff --git a/lib/babe/verify_test.go b/lib/babe/verify_test.go
index b0f284384a..ce66f35871 100644
--- a/lib/babe/verify_test.go
+++ b/lib/babe/verify_test.go
@@ -18,6 +18,7 @@ package babe
import (
"errors"
+ "fmt"
"io/ioutil"
"os"
"testing"
@@ -75,7 +76,7 @@ func TestVerificationManager_OnDisabled_InvalidIndex(t *testing.T) {
ThresholdDenominator: 1,
})
block, _ := createTestBlock(t, babeService, genesisHeader, [][]byte{}, 1, testEpochIndex)
- err := vm.SetOnDisabled(1, block.Header)
+ err := vm.SetOnDisabled(1, &block.Header)
require.Equal(t, err, ErrInvalidBlockProducerIndex)
}
@@ -91,6 +92,8 @@ func TestVerificationManager_OnDisabled_NewDigest(t *testing.T) {
babeService := createTestService(t, cfg)
+ fmt.Println("Finished creating test service")
+
vm := newTestVerificationManager(t, nil)
vm.epochInfo[testEpochIndex] = &verifierInfo{
authorities: babeService.epochData.authorities,
@@ -102,7 +105,7 @@ func TestVerificationManager_OnDisabled_NewDigest(t *testing.T) {
err = vm.blockState.AddBlock(block)
require.NoError(t, err)
- err = vm.SetOnDisabled(0, block.Header)
+ err = vm.SetOnDisabled(0, &block.Header)
require.NoError(t, err)
// create an OnDisabled change on a different branch
@@ -110,7 +113,7 @@ func TestVerificationManager_OnDisabled_NewDigest(t *testing.T) {
err = vm.blockState.AddBlock(block)
require.NoError(t, err)
- err = vm.SetOnDisabled(0, block.Header)
+ err = vm.SetOnDisabled(0, &block.Header)
require.NoError(t, err)
}
@@ -137,15 +140,15 @@ func TestVerificationManager_OnDisabled_DuplicateDigest(t *testing.T) {
err = vm.blockState.AddBlock(block)
require.NoError(t, err)
- err = vm.SetOnDisabled(0, block.Header)
+ err = vm.SetOnDisabled(0, &block.Header)
require.NoError(t, err)
// create an OnDisabled change on a different branch
- block2, _ := createTestBlock(t, babeService, block.Header, [][]byte{}, 2, testEpochIndex)
+ block2, _ := createTestBlock(t, babeService, &block.Header, [][]byte{}, 2, testEpochIndex)
err = vm.blockState.AddBlock(block2)
require.NoError(t, err)
- err = vm.SetOnDisabled(0, block2.Header)
+ err = vm.SetOnDisabled(0, &block2.Header)
require.Equal(t, ErrAuthorityAlreadyDisabled, err)
}
@@ -173,19 +176,19 @@ func TestVerificationManager_VerifyBlock_IsDisabled(t *testing.T) {
err = vm.blockState.AddBlock(block)
require.NoError(t, err)
- err = vm.SetOnDisabled(0, block.Header)
+ err = vm.SetOnDisabled(0, &block.Header)
require.NoError(t, err)
// a block that we created, that disables ourselves, should still be accepted
- err = vm.VerifyBlock(block.Header)
+ err = vm.VerifyBlock(&block.Header)
require.NoError(t, err)
- block, _ = createTestBlock(t, babeService, block.Header, [][]byte{}, 2, testEpochIndex)
+ block, _ = createTestBlock(t, babeService, &block.Header, [][]byte{}, 2, testEpochIndex)
err = vm.blockState.AddBlock(block)
require.NoError(t, err)
// any blocks following the one where we are disabled should reject
- err = vm.VerifyBlock(block.Header)
+ err = vm.VerifyBlock(&block.Header)
require.Equal(t, ErrAuthorityDisabled, err)
// let's try a block on a different chain, it shouldn't reject
@@ -194,10 +197,10 @@ func TestVerificationManager_VerifyBlock_IsDisabled(t *testing.T) {
block, _ = createTestBlock(t, babeService, parentHeader, [][]byte{}, uint64(slot), testEpochIndex)
err = vm.blockState.AddBlock(block)
require.NoError(t, err)
- parentHeader = block.Header
+ parentHeader = &block.Header
}
- err = vm.VerifyBlock(block.Header)
+ err = vm.VerifyBlock(&block.Header)
require.NoError(t, err)
}
@@ -221,7 +224,7 @@ func TestVerificationManager_VerifyBlock_Ok(t *testing.T) {
block, _ := createTestBlock(t, babeService, genesisHeader, [][]byte{}, 1, testEpochIndex)
- err = vm.VerifyBlock(block.Header)
+ err = vm.VerifyBlock(&block.Header)
require.NoError(t, err)
}
@@ -253,15 +256,15 @@ func TestVerificationManager_VerifyBlock_MultipleEpochs(t *testing.T) {
// create block in future epoch
block1, _ := createTestBlock(t, babeService, genesisHeader, [][]byte{}, cfg.EpochLength*futureEpoch+1, futureEpoch)
- block2, _ := createTestBlock(t, babeService, block1.Header, [][]byte{}, cfg.EpochLength*futureEpoch+2, futureEpoch)
+ block2, _ := createTestBlock(t, babeService, &block1.Header, [][]byte{}, cfg.EpochLength*futureEpoch+2, futureEpoch)
- err = vm.VerifyBlock(block2.Header)
+ err = vm.VerifyBlock(&block2.Header)
require.NoError(t, err)
// create block in epoch 1
block, _ := createTestBlock(t, babeService, genesisHeader, [][]byte{}, cfg.EpochLength-10, testEpochIndex)
- err = vm.VerifyBlock(block.Header)
+ err = vm.VerifyBlock(&block.Header)
require.NoError(t, err)
}
@@ -286,7 +289,7 @@ func TestVerificationManager_VerifyBlock_InvalidBlockOverThreshold(t *testing.T)
block, _ := createTestBlock(t, babeService, genesisHeader, [][]byte{}, 1, testEpochIndex)
- err = vm.VerifyBlock(block.Header)
+ err = vm.VerifyBlock(&block.Header)
require.Equal(t, ErrVRFOutputOverThreshold, errors.Unwrap(err))
}
@@ -304,13 +307,13 @@ func TestVerificationManager_VerifyBlock_InvalidBlockAuthority(t *testing.T) {
cfg.C1 = 1
cfg.C2 = 1
- cfg.GenesisAuthorities = []*types.AuthorityRaw{}
+ cfg.GenesisAuthorities = []types.AuthorityRaw{}
vm := newTestVerificationManager(t, cfg)
block, _ := createTestBlock(t, babeService, genesisHeader, [][]byte{}, 1, testEpochIndex)
- err = vm.VerifyBlock(block.Header)
+ err = vm.VerifyBlock(&block.Header)
require.Equal(t, ErrInvalidBlockProducerIndex, errors.Unwrap(err))
}
@@ -352,8 +355,8 @@ func TestVerifyPimarySlotWinner(t *testing.T) {
babeHeader, err := builder.buildBlockBABEPrimaryPreDigest(slot)
require.NoError(t, err)
- Authorities := make([]*types.Authority, 1)
- Authorities[0] = &types.Authority{
+ Authorities := make([]types.Authority, 1)
+ Authorities[0] = types.Authority{
Key: kp.Public().(*sr25519.PublicKey),
}
babeService.epochData.authorities = Authorities
@@ -383,7 +386,7 @@ func TestVerifyAuthorshipRight(t *testing.T) {
})
require.NoError(t, err)
- err = verifier.verifyAuthorshipRight(block.Header)
+ err = verifier.verifyAuthorshipRight(&block.Header)
require.NoError(t, err)
}
@@ -398,8 +401,8 @@ func TestVerifyAuthorshipRight_Equivocation(t *testing.T) {
babeService := createTestService(t, cfg)
babeService.epochData.threshold = maxThreshold
- babeService.epochData.authorities = make([]*types.Authority, 1)
- babeService.epochData.authorities[0] = &types.Authority{
+ babeService.epochData.authorities = make([]types.Authority, 1)
+ babeService.epochData.authorities[0] = types.Authority{
Key: kp.Public().(*sr25519.PublicKey),
}
@@ -417,7 +420,7 @@ func TestVerifyAuthorshipRight_Equivocation(t *testing.T) {
})
require.NoError(t, err)
- err = verifier.verifyAuthorshipRight(block.Header)
+ err = verifier.verifyAuthorshipRight(&block.Header)
require.NoError(t, err)
// create new block
@@ -427,6 +430,6 @@ func TestVerifyAuthorshipRight_Equivocation(t *testing.T) {
err = babeService.blockState.AddBlock(block2)
require.NoError(t, err)
- err = verifier.verifyAuthorshipRight(block2.Header)
+ err = verifier.verifyAuthorshipRight(&block2.Header)
require.Equal(t, ErrProducerEquivocated, err)
}
diff --git a/lib/blocktree/blocktree_test.go b/lib/blocktree/blocktree_test.go
index 434a955928..692e5834d1 100644
--- a/lib/blocktree/blocktree_test.go
+++ b/lib/blocktree/blocktree_test.go
@@ -25,8 +25,6 @@ import (
database "github.com/ChainSafe/chaindb"
"github.com/ChainSafe/gossamer/dot/types"
"github.com/ChainSafe/gossamer/lib/common"
- "github.com/ChainSafe/gossamer/lib/utils"
-
"github.com/stretchr/testify/require"
)
@@ -34,6 +32,7 @@ var zeroHash, _ = common.HexToHash("0x00")
var testHeader = &types.Header{
ParentHash: zeroHash,
Number: big.NewInt(0),
+ Digest: types.NewDigest(),
}
func newBlockTreeFromNode(head *node, db database.Database) *BlockTree {
@@ -55,6 +54,7 @@ func createFlatTree(t *testing.T, depth int) (*BlockTree, []common.Hash) {
header := &types.Header{
ParentHash: previousHash,
Number: big.NewInt(int64(i)),
+ Digest: types.NewDigest(),
}
hash := header.Hash()
@@ -171,6 +171,7 @@ func TestBlockTree_Subchain(t *testing.T) {
extraBlock := &types.Header{
ParentHash: hashes[0],
Number: big.NewInt(1),
+ Digest: types.NewDigest(),
}
extraBlock.Hash()
@@ -275,10 +276,16 @@ func TestBlockTree_GetAllBlocksAtDepth(t *testing.T) {
previousHash := btHashes[4]
for i := 4; i <= btDepth; i++ {
+ digest := types.NewDigest()
+ err := digest.Add(types.ConsensusDigest{
+ ConsensusEngineID: types.BabeEngineID,
+ Data: common.MustHexToBytes("0x0118ca239392960473fe1bc65f94ee27d890a49c1b200c006ff5dcc525330ecc16770100000000000000b46f01874ce7abbb5220e8fd89bede0adad14c73039d91e28e881823433e723f0100000000000000d684d9176d6eb69887540c9a89fa6097adea82fc4b0ff26d1062b488f352e179010000000000000068195a71bdde49117a616424bdc60a1733e96acb1da5aeab5d268cf2a572e94101000000000000001a0575ef4ae24bdfd31f4cb5bd61239ae67c12d4e64ae51ac756044aa6ad8200010000000000000018168f2aad0081a25728961ee00627cfe35e39833c805016632bf7c14da5800901000000000000000000000000000000000000000000000000000000000000000000000000000000"),
+ })
+ require.NoError(t, err)
header := &types.Header{
ParentHash: previousHash,
Number: big.NewInt(int64(i)),
- Digest: types.Digest{utils.NewMockDigestItem(9)},
+ Digest: digest,
}
hash := header.Hash()
@@ -294,10 +301,16 @@ func TestBlockTree_GetAllBlocksAtDepth(t *testing.T) {
previousHash = btHashes[2]
for i := 2; i <= btDepth; i++ {
+ digest := types.NewDigest()
+ err := digest.Add(types.SealDigest{
+ ConsensusEngineID: types.BabeEngineID,
+ Data: common.MustHexToBytes("0x4625284883e564bc1e4063f5ea2b49846cdddaa3761d04f543b698c1c3ee935c40d25b869247c36c6b8a8cbbd7bb2768f560ab7c276df3c62df357a7e3b1ec8d"),
+ })
+ require.NoError(t, err)
header := &types.Header{
ParentHash: previousHash,
Number: big.NewInt(int64(i)),
- Digest: types.Digest{utils.NewMockDigestItem(7)},
+ Digest: digest,
}
hash := header.Hash()
diff --git a/lib/blocktree/database_test.go b/lib/blocktree/database_test.go
index 0f7804e14f..7072a68813 100644
--- a/lib/blocktree/database_test.go
+++ b/lib/blocktree/database_test.go
@@ -9,6 +9,7 @@ import (
"github.com/ChainSafe/chaindb"
"github.com/ChainSafe/gossamer/dot/types"
+ "github.com/ChainSafe/gossamer/lib/common"
"github.com/ChainSafe/gossamer/lib/utils"
"github.com/stretchr/testify/require"
@@ -32,6 +33,7 @@ func createTestBlockTree(header *types.Header, depth int, db chaindb.Database) (
header := &types.Header{
ParentHash: previousHash,
Number: big.NewInt(int64(i)),
+ Digest: types.NewDigest(),
}
hash := header.Hash()
@@ -52,10 +54,18 @@ func createTestBlockTree(header *types.Header, depth int, db chaindb.Database) (
previousHash = branch.hash
for i := int(branch.depth.Uint64()); i <= depth; i++ {
+ digest := types.NewDigest()
+ err := digest.Add(types.ConsensusDigest{
+ ConsensusEngineID: types.BabeEngineID,
+ Data: common.MustHexToBytes("0x0118ca239392960473fe1bc65f94ee27d890a49c1b200c006ff5dcc525330ecc16770100000000000000b46f01874ce7abbb5220e8fd89bede0adad14c73039d91e28e881823433e723f0100000000000000d684d9176d6eb69887540c9a89fa6097adea82fc4b0ff26d1062b488f352e179010000000000000068195a71bdde49117a616424bdc60a1733e96acb1da5aeab5d268cf2a572e94101000000000000001a0575ef4ae24bdfd31f4cb5bd61239ae67c12d4e64ae51ac756044aa6ad8200010000000000000018168f2aad0081a25728961ee00627cfe35e39833c805016632bf7c14da5800901000000000000000000000000000000000000000000000000000000000000000000000000000000"),
+ })
+ if err != nil {
+ return nil, nil
+ }
header := &types.Header{
ParentHash: previousHash,
Number: big.NewInt(int64(i)),
- Digest: types.Digest{utils.NewMockDigestItem(rand.Intn(256))},
+ Digest: digest,
}
hash := header.Hash()
diff --git a/lib/common/optional/types.go b/lib/common/optional/types.go
index 4d373eb6e0..3628d8bdb1 100644
--- a/lib/common/optional/types.go
+++ b/lib/common/optional/types.go
@@ -17,6 +17,7 @@
package optional
import (
+ "bytes"
"encoding/binary"
"fmt"
"io"
@@ -382,7 +383,7 @@ func (x *Hash) Set(exists bool, value common.Hash) {
// Digest is the interface implemented by the block digest
type Digest interface {
Encode() ([]byte, error)
- Decode(io.Reader) error // Decode assumes the type byte (first byte) has been removed from the encoding.
+ Decode(buf *bytes.Buffer) error // Decode assumes the type byte (first byte) has been removed from the encoding.
}
// CoreHeader is a state block header
@@ -406,14 +407,6 @@ type Header struct {
value *CoreHeader
}
-// NewHeader returns a new optional.Header
-func NewHeader(exists bool, value *CoreHeader) *Header {
- return &Header{
- exists: exists,
- value: value,
- }
-}
-
// Exists returns true if the value is Some, false if it is None.
func (x *Header) Exists() bool {
if x == nil {
diff --git a/lib/genesis/helpers.go b/lib/genesis/helpers.go
index abb6c46d38..8a681efa2f 100644
--- a/lib/genesis/helpers.go
+++ b/lib/genesis/helpers.go
@@ -96,7 +96,7 @@ func NewGenesisBlockFromTrie(t *trie.Trie) (*types.Header, error) {
}
// create genesis block header
- header, err := types.NewHeader(common.NewHash([]byte{0}), stateRoot, trie.EmptyHash, big.NewInt(0), types.Digest{})
+ header, err := types.NewHeader(common.NewHash([]byte{0}), stateRoot, trie.EmptyHash, big.NewInt(0), types.NewDigest())
if err != nil {
return nil, fmt.Errorf("failed to create genesis block header: %s", err)
}
diff --git a/lib/genesis/test_utils.go b/lib/genesis/test_utils.go
index 3c759f60d5..3b7a1f0a5b 100644
--- a/lib/genesis/test_utils.go
+++ b/lib/genesis/test_utils.go
@@ -128,7 +128,7 @@ func NewTestGenesisWithTrieAndHeader(t *testing.T) (*Genesis, *trie.Trie, *types
genTrie, err := NewTrieFromGenesis(gen)
require.NoError(t, err)
- genesisHeader, err := types.NewHeader(common.NewHash([]byte{0}), genTrie.MustHash(), trie.EmptyHash, big.NewInt(0), types.Digest{})
+ genesisHeader, err := types.NewHeader(common.NewHash([]byte{0}), genTrie.MustHash(), trie.EmptyHash, big.NewInt(0), types.NewDigest())
require.NoError(t, err)
return gen, genTrie, genesisHeader
}
diff --git a/lib/grandpa/grandpa.go b/lib/grandpa/grandpa.go
index 65adfa0dde..f144919732 100644
--- a/lib/grandpa/grandpa.go
+++ b/lib/grandpa/grandpa.go
@@ -89,7 +89,7 @@ type Config struct {
GrandpaState GrandpaState
DigestHandler DigestHandler
Network Network
- Voters []*Voter
+ Voters []Voter
Keypair *ed25519.Keypair
Authority bool
}
@@ -390,7 +390,7 @@ func (s *Service) waitForFirstBlock() error {
select {
case block := <-ch:
- if block != nil && block.Header != nil && block.Header.Number.Int64() > 0 {
+ if block != nil && block.Header.Number.Int64() > 0 {
done = true
}
case <-s.ctx.Done():
@@ -674,7 +674,8 @@ func (s *Service) determinePreVote() (*Vote, error) {
// if we receive a vote message from the primary with a block that's greater than or equal to the current pre-voted block
// and greater than the best final candidate from the last round, we choose that.
// otherwise, we simply choose the head of our chain.
- prm, has := s.loadVote(s.derivePrimary().PublicKeyBytes(), prevote)
+ primary := s.derivePrimary()
+ prm, has := s.loadVote(primary.PublicKeyBytes(), prevote)
if has && prm.Vote.Number >= uint32(s.head.Number.Int64()) {
vote = prm.Vote
} else {
@@ -877,7 +878,7 @@ func (s *Service) createJustification(bfc common.Hash, stage subround) ([]*Signe
}
// derivePrimary returns the primary for the current round
-func (s *Service) derivePrimary() *Voter {
+func (s *Service) derivePrimary() Voter {
return s.state.voters[s.state.round%uint64(len(s.state.voters))]
}
diff --git a/lib/grandpa/grandpa_test.go b/lib/grandpa/grandpa_test.go
index 4dc0213882..bef3e05eb7 100644
--- a/lib/grandpa/grandpa_test.go
+++ b/lib/grandpa/grandpa_test.go
@@ -48,6 +48,7 @@ import (
var testGenesisHeader = &types.Header{
Number: big.NewInt(0),
StateRoot: trie.EmptyHash,
+ Digest: types.NewDigest(),
}
var (
@@ -92,10 +93,10 @@ func newTestState(t *testing.T) *state.Service {
}
}
-func newTestVoters() []*Voter {
- vs := []*Voter{}
+func newTestVoters() []Voter {
+ vs := []Voter{}
for i, k := range kr.Keys {
- vs = append(vs, &Voter{
+ vs = append(vs, Voter{
Key: k.Public().(*ed25519.PublicKey),
ID: uint64(i),
})
@@ -129,7 +130,7 @@ func TestUpdateAuthorities(t *testing.T) {
require.NoError(t, err)
require.Equal(t, uint64(0), gs.state.setID)
- next := []*Voter{
+ next := []Voter{
{Key: kr.Alice().Public().(*ed25519.PublicKey), ID: 0},
}
@@ -1068,7 +1069,8 @@ func TestDeterminePreVote_WithPrimaryPreVote(t *testing.T) {
require.NoError(t, err)
state.AddBlocksToState(t, st.Block, 1)
- primary := gs.derivePrimary().PublicKeyBytes()
+ derivePrimary := gs.derivePrimary()
+ primary := derivePrimary.PublicKeyBytes()
gs.prevotes.Store(primary, &SignedVote{
Vote: NewVoteFromHeader(header),
})
@@ -1087,7 +1089,8 @@ func TestDeterminePreVote_WithInvalidPrimaryPreVote(t *testing.T) {
header, err := st.Block.BestBlockHeader()
require.NoError(t, err)
- primary := gs.derivePrimary().PublicKeyBytes()
+ derivePrimary := gs.derivePrimary()
+ primary := derivePrimary.PublicKeyBytes()
gs.prevotes.Store(primary, &SignedVote{
Vote: NewVoteFromHeader(header),
})
diff --git a/lib/grandpa/message_handler.go b/lib/grandpa/message_handler.go
index d4a5a10317..76f66d0d70 100644
--- a/lib/grandpa/message_handler.go
+++ b/lib/grandpa/message_handler.go
@@ -473,7 +473,7 @@ func (s *Service) VerifyBlockJustification(hash common.Hash, justification []byt
return nil
}
-func isInAuthSet(auth *ed25519.PublicKey, set []*types.GrandpaVoter) bool {
+func isInAuthSet(auth *ed25519.PublicKey, set []types.GrandpaVoter) bool {
for _, a := range set {
if bytes.Equal(a.Key.Encode(), auth.Encode()) {
return true
diff --git a/lib/grandpa/message_handler_test.go b/lib/grandpa/message_handler_test.go
index a3c595cbd4..8dce03d9c5 100644
--- a/lib/grandpa/message_handler_test.go
+++ b/lib/grandpa/message_handler_test.go
@@ -27,6 +27,7 @@ import (
"github.com/ChainSafe/gossamer/lib/crypto/ed25519"
"github.com/ChainSafe/gossamer/lib/keystore"
"github.com/ChainSafe/gossamer/lib/scale"
+ scale2 "github.com/ChainSafe/gossamer/pkg/scale"
"github.com/stretchr/testify/require"
)
@@ -34,13 +35,17 @@ import (
var testHeader = &types.Header{
ParentHash: testGenesisHeader.Hash(),
Number: big.NewInt(1),
- Digest: types.Digest{
- types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest(),
- },
+ Digest: newTestDigest(),
}
var testHash = testHeader.Hash()
+func newTestDigest() scale2.VaryingDataTypeSlice {
+ digest := types.NewDigest()
+ digest.Add(*types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest())
+ return digest
+}
+
func buildTestJustification(t *testing.T, qty int, round, setID uint64, kr *keystore.Ed25519Keyring, subround subround) []*SignedVote {
var just []*SignedVote
for i := 0; i < qty; i++ {
@@ -196,15 +201,16 @@ func TestMessageHandler_NeighbourMessage(t *testing.T) {
_, err := h.handleMessage("", msg)
require.NoError(t, err)
+ digest := types.NewDigest()
+ err = digest.Add(types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest())
+ require.NoError(t, err)
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
Number: big.NewInt(2),
ParentHash: st.Block.GenesisHash(),
- Digest: types.Digest{
- types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest(),
- },
+ Digest: digest,
},
- Body: &types.Body{0},
+ Body: types.Body{0},
}
err = st.Block.AddBlock(block)
@@ -250,15 +256,16 @@ func TestMessageHandler_CommitMessage_NoCatchUpRequest_ValidSig(t *testing.T) {
require.NoError(t, err)
fm.Vote = NewVote(testHash, uint32(round))
+ digest := types.NewDigest()
+ err = digest.Add(*types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest())
+ require.NoError(t, err)
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: testGenesisHeader.Hash(),
Number: big.NewInt(1),
- Digest: types.Digest{
- types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest(),
- },
+ Digest: digest,
},
- Body: &types.Body{},
+ Body: types.Body{},
}
err = st.Block.AddBlock(block)
@@ -342,23 +349,24 @@ func TestMessageHandler_CatchUpRequest_WithResponse(t *testing.T) {
setID := uint64(0)
gs.state.round = round + 1
+ digest := types.NewDigest()
+ err := digest.Add(types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest())
+ require.NoError(t, err)
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: testGenesisHeader.Hash(),
Number: big.NewInt(2),
- Digest: types.Digest{
- types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest(),
- },
+ Digest: digest,
},
- Body: &types.Body{},
+ Body: types.Body{},
}
- err := st.Block.AddBlock(block)
+ err = st.Block.AddBlock(block)
require.NoError(t, err)
err = gs.blockState.SetFinalisedHash(testGenesisHeader.Hash(), round, setID)
require.NoError(t, err)
- err = gs.blockState.(*state.BlockState).SetHeader(block.Header)
+ err = gs.blockState.(*state.BlockState).SetHeader(&block.Header)
require.NoError(t, err)
pvj := []*SignedVote{
@@ -509,7 +517,7 @@ func TestMessageHandler_HandleCatchUpResponse(t *testing.T) {
}
func TestMessageHandler_VerifyBlockJustification(t *testing.T) {
- auths := []*types.GrandpaVoter{
+ auths := []types.GrandpaVoter{
{
Key: kr.Alice().Public().(*ed25519.PublicKey),
},
@@ -526,8 +534,8 @@ func TestMessageHandler_VerifyBlockJustification(t *testing.T) {
require.NoError(t, err)
block := &types.Block{
- Header: testHeader,
- Body: &types.Body{0},
+ Header: *testHeader,
+ Body: types.Body{0},
}
err = st.Block.AddBlock(block)
diff --git a/lib/grandpa/message_test.go b/lib/grandpa/message_test.go
index 6dc280fd96..6e9824715c 100644
--- a/lib/grandpa/message_test.go
+++ b/lib/grandpa/message_test.go
@@ -104,15 +104,16 @@ func TestNewCatchUpResponse(t *testing.T) {
round := uint64(1)
setID := uint64(1)
+ digest := types.NewDigest()
+ err := digest.Add(*types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest())
+ require.NoError(t, err)
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: testGenesisHeader.Hash(),
Number: big.NewInt(1),
- Digest: types.Digest{
- types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest(),
- },
+ Digest: digest,
},
- Body: &types.Body{},
+ Body: types.Body{},
}
hash := block.Header.Hash()
@@ -121,7 +122,7 @@ func TestNewCatchUpResponse(t *testing.T) {
Number: 1,
}
- err := st.Block.AddBlock(block)
+ err = st.Block.AddBlock(block)
require.NoError(t, err)
err = gs.blockState.SetFinalisedHash(hash, round, setID)
diff --git a/lib/grandpa/message_tracker_test.go b/lib/grandpa/message_tracker_test.go
index d1215db4c5..9d1e3644c3 100644
--- a/lib/grandpa/message_tracker_test.go
+++ b/lib/grandpa/message_tracker_test.go
@@ -89,8 +89,8 @@ func TestMessageTracker_SendMessage(t *testing.T) {
require.Equal(t, expected, gs.tracker.voteMessages[next.Hash()][kr.Alice().Public().(*ed25519.PublicKey).AsBytes()])
err = gs.blockState.(*state.BlockState).AddBlock(&types.Block{
- Header: next,
- Body: &types.Body{},
+ Header: *next,
+ Body: types.Body{},
})
require.NoError(t, err)
@@ -135,8 +135,8 @@ func TestMessageTracker_ProcessMessage(t *testing.T) {
require.Equal(t, expected, gs.tracker.voteMessages[next.Hash()][kr.Alice().Public().(*ed25519.PublicKey).AsBytes()])
err = gs.blockState.(*state.BlockState).AddBlock(&types.Block{
- Header: next,
- Body: &types.Body{},
+ Header: *next,
+ Body: types.Body{},
})
require.NoError(t, err)
diff --git a/lib/grandpa/network_test.go b/lib/grandpa/network_test.go
index abe523fc33..98a9f5ed90 100644
--- a/lib/grandpa/network_test.go
+++ b/lib/grandpa/network_test.go
@@ -90,18 +90,19 @@ func TestSendNeighbourMessage(t *testing.T) {
}()
go gs.sendNeighbourMessage()
+ digest := types.NewDigest()
+ err := digest.Add(*types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest())
+ require.NoError(t, err)
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: st.Block.GenesisHash(),
Number: big.NewInt(1),
- Digest: types.Digest{
- types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest(),
- },
+ Digest: digest,
},
- Body: &types.Body{},
+ Body: types.Body{},
}
- err := st.Block.AddBlock(block)
+ err = st.Block.AddBlock(block)
require.NoError(t, err)
hash := block.Header.Hash()
diff --git a/lib/grandpa/round_test.go b/lib/grandpa/round_test.go
index d96410d28c..3666770489 100644
--- a/lib/grandpa/round_test.go
+++ b/lib/grandpa/round_test.go
@@ -387,8 +387,8 @@ func TestPlayGrandpaRound_VaryingChain(t *testing.T) {
for _, h := range headers {
time.Sleep(time.Millisecond * 10)
block := &types.Block{
- Header: h,
- Body: &types.Body{},
+ Header: *h,
+ Body: types.Body{},
}
gs.blockState.(*state.BlockState).AddBlock(block)
}
diff --git a/lib/grandpa/state.go b/lib/grandpa/state.go
index cd6a24a357..65dc644273 100644
--- a/lib/grandpa/state.go
+++ b/lib/grandpa/state.go
@@ -56,7 +56,7 @@ type BlockState interface {
// GrandpaState is the interface required by grandpa into the grandpa state
type GrandpaState interface { //nolint
GetCurrentSetID() (uint64, error)
- GetAuthorities(setID uint64) ([]*types.GrandpaVoter, error)
+ GetAuthorities(setID uint64) ([]types.GrandpaVoter, error)
GetSetIDByBlockNumber(num *big.Int) (uint64, error)
SetLatestRound(round uint64) error
GetLatestRound() (uint64, error)
diff --git a/lib/grandpa/types.go b/lib/grandpa/types.go
index 40b837ee2c..519189bc4b 100644
--- a/lib/grandpa/types.go
+++ b/lib/grandpa/types.go
@@ -79,13 +79,13 @@ func (s subround) String() string {
// State represents a GRANDPA state
type State struct {
- voters []*Voter // set of voters
- setID uint64 // authority set ID
- round uint64 // voting round number
+ voters []Voter // set of voters
+ setID uint64 // authority set ID
+ round uint64 // voting round number
}
// NewState returns a new GRANDPA state
-func NewState(voters []*Voter, setID, round uint64) *State {
+func NewState(voters []Voter, setID, round uint64) *State {
return &State{
voters: voters,
setID: setID,
diff --git a/lib/grandpa/types_test.go b/lib/grandpa/types_test.go
index e1d882f07a..e49896fd58 100644
--- a/lib/grandpa/types_test.go
+++ b/lib/grandpa/types_test.go
@@ -35,7 +35,7 @@ func TestPubkeyToVoter(t *testing.T) {
state := NewState(voters, 0, 0)
voter, err := state.pubkeyToVoter(kr.Alice().Public().(*ed25519.PublicKey))
require.NoError(t, err)
- require.Equal(t, voters[0], voter)
+ require.Equal(t, voters[0], *voter)
}
func TestSignedVoteEncoding(t *testing.T) {
diff --git a/lib/grandpa/vote_message_test.go b/lib/grandpa/vote_message_test.go
index f9376b3e3b..44287da798 100644
--- a/lib/grandpa/vote_message_test.go
+++ b/lib/grandpa/vote_message_test.go
@@ -54,7 +54,7 @@ func TestCheckForEquivocation_NoEquivocation(t *testing.T) {
require.NoError(t, err)
for _, v := range voters {
- equivocated := gs.checkForEquivocation(v, &SignedVote{
+ equivocated := gs.checkForEquivocation(&v, &SignedVote{
Vote: vote,
}, prevote)
require.False(t, equivocated)
@@ -97,7 +97,7 @@ func TestCheckForEquivocation_WithEquivocation(t *testing.T) {
vote2, err := NewVoteFromHash(leaves[1], st.Block)
require.NoError(t, err)
- equivocated := gs.checkForEquivocation(voter, &SignedVote{
+ equivocated := gs.checkForEquivocation(&voter, &SignedVote{
Vote: vote2,
}, prevote)
require.True(t, equivocated)
@@ -149,7 +149,7 @@ func TestCheckForEquivocation_WithExistingEquivocation(t *testing.T) {
vote2 := NewVoteFromHeader(branches[0])
require.NoError(t, err)
- equivocated := gs.checkForEquivocation(voter, &SignedVote{
+ equivocated := gs.checkForEquivocation(&voter, &SignedVote{
Vote: vote2,
}, prevote)
require.True(t, equivocated)
@@ -160,7 +160,7 @@ func TestCheckForEquivocation_WithExistingEquivocation(t *testing.T) {
vote3 := NewVoteFromHeader(branches[1])
require.NoError(t, err)
- equivocated = gs.checkForEquivocation(voter, &SignedVote{
+ equivocated = gs.checkForEquivocation(&voter, &SignedVote{
Vote: vote3,
}, prevote)
require.True(t, equivocated)
diff --git a/lib/runtime/interface.go b/lib/runtime/interface.go
index 8ace2d1e9a..1448505524 100644
--- a/lib/runtime/interface.go
+++ b/lib/runtime/interface.go
@@ -40,7 +40,7 @@ type Instance interface {
Version() (Version, error)
Metadata() ([]byte, error)
BabeConfiguration() (*types.BabeConfiguration, error)
- GrandpaAuthorities() ([]*types.Authority, error)
+ GrandpaAuthorities() ([]types.Authority, error)
ValidateTransaction(e types.Extrinsic) (*transaction.Validity, error)
InitializeBlock(header *types.Header) error
InherentExtrinsics(data []byte) ([]byte, error)
diff --git a/lib/runtime/life/exports.go b/lib/runtime/life/exports.go
index b0ef1b38f3..6d0e3728b6 100644
--- a/lib/runtime/life/exports.go
+++ b/lib/runtime/life/exports.go
@@ -9,6 +9,7 @@ import (
"github.com/ChainSafe/gossamer/lib/runtime"
"github.com/ChainSafe/gossamer/lib/scale"
"github.com/ChainSafe/gossamer/lib/transaction"
+ scale2 "github.com/ChainSafe/gossamer/pkg/scale"
)
// ValidateTransaction runs the extrinsic through runtime function TaggedTransactionQueue_validate_transaction and returns *Validity
@@ -71,23 +72,23 @@ func (in *Instance) BabeConfiguration() (*types.BabeConfiguration, error) {
}
// GrandpaAuthorities returns the genesis authorities from the runtime
-func (in *Instance) GrandpaAuthorities() ([]*types.Authority, error) {
+func (in *Instance) GrandpaAuthorities() ([]types.Authority, error) {
ret, err := in.Exec(runtime.GrandpaAuthorities, []byte{})
if err != nil {
return nil, err
}
- adr, err := scale.Decode(ret, []*types.GrandpaAuthoritiesRaw{})
+ adr, err := scale.Decode(ret, []types.GrandpaAuthoritiesRaw{})
if err != nil {
return nil, err
}
- return types.GrandpaAuthoritiesRawToAuthorities(adr.([]*types.GrandpaAuthoritiesRaw))
+ return types.GrandpaAuthoritiesRawToAuthorities(adr.([]types.GrandpaAuthoritiesRaw))
}
// InitializeBlock calls runtime API function Core_initialise_block
func (in *Instance) InitializeBlock(header *types.Header) error {
- encodedHeader, err := scale.Encode(header)
+ encodedHeader, err := scale2.Marshal(*header)
if err != nil {
return fmt.Errorf("cannot encode header: %w", err)
}
@@ -114,8 +115,8 @@ func (in *Instance) FinalizeBlock() (*types.Header, error) {
return nil, err
}
- bh := new(types.Header)
- _, err = scale.Decode(data, bh)
+ bh := types.NewEmptyHeader()
+ err = scale2.Unmarshal(data, bh)
if err != nil {
return nil, err
}
@@ -126,19 +127,26 @@ func (in *Instance) FinalizeBlock() (*types.Header, error) {
// ExecuteBlock calls runtime function Core_execute_block
func (in *Instance) ExecuteBlock(block *types.Block) ([]byte, error) {
// copy block since we're going to modify it
- b := block.DeepCopy()
- b.Header.Digest = types.NewEmptyDigest()
+ b, err := block.DeepCopy()
+ if err != nil {
+ return nil, err
+ }
+ b.Header.Digest = types.NewDigest()
// TODO: hack since substrate node_runtime can't seem to handle BABE pre-runtime digests
// with type prefix (ie Primary, Secondary...)
if bytes.Equal(in.version.SpecName(), []byte("kusama")) {
// remove seal digest only
- for _, d := range block.Header.Digest {
- if d.Type() == types.SealDigestType {
+ for _, d := range block.Header.Digest.Types {
+ switch d.Value().(type) {
+ case types.SealDigest:
continue
+ default:
+ err = b.Header.Digest.Add(d.Value())
+ if err != nil {
+ return nil, err
+ }
}
-
- b.Header.Digest = append(b.Header.Digest, d)
}
}
diff --git a/lib/runtime/life/exports_test.go b/lib/runtime/life/exports_test.go
index 2ac6fd611c..a3f3df9685 100644
--- a/lib/runtime/life/exports_test.go
+++ b/lib/runtime/life/exports_test.go
@@ -1,7 +1,6 @@
package life
import (
- "bytes"
"math/big"
"testing"
"time"
@@ -14,6 +13,7 @@ import (
"github.com/ChainSafe/gossamer/lib/runtime/storage"
"github.com/ChainSafe/gossamer/lib/scale"
"github.com/ChainSafe/gossamer/lib/trie"
+ scale2 "github.com/ChainSafe/gossamer/pkg/scale"
"github.com/stretchr/testify/require"
)
@@ -77,12 +77,12 @@ func TestInstance_BabeConfiguration_NodeRuntime_WithAuthorities(t *testing.T) {
kr, _ := keystore.NewSr25519Keyring()
- expectedAuthData := []*types.AuthorityRaw{}
+ expectedAuthData := []types.AuthorityRaw{}
for _, kp := range kr.Keys {
kb := [32]byte{}
copy(kb[:], kp.Public().Encode())
- expectedAuthData = append(expectedAuthData, &types.AuthorityRaw{
+ expectedAuthData = append(expectedAuthData, types.AuthorityRaw{
Key: kb,
Weight: 1,
})
@@ -108,10 +108,10 @@ func TestInstance_GrandpaAuthorities_NodeRuntime(t *testing.T) {
kr, _ := keystore.NewEd25519Keyring()
- expected := []*types.Authority{}
+ var expected []types.Authority
for _, kp := range kr.Keys {
- expected = append(expected, &types.Authority{
+ expected = append(expected, types.Authority{
Key: kp.Public(),
Weight: 1,
})
@@ -124,7 +124,7 @@ func buildBlock(t *testing.T, instance runtime.Instance) *types.Block {
header := &types.Header{
ParentHash: trie.EmptyHash,
Number: big.NewInt(1),
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
}
err := instance.InitializeBlock(header)
@@ -144,13 +144,14 @@ func buildBlock(t *testing.T, instance runtime.Instance) *types.Block {
inherentExts, err := instance.InherentExtrinsics(ienc)
require.NoError(t, err)
- // decode inherent extrinsics
- exts, err := scale.Decode(inherentExts, [][]byte{})
+ //// decode inherent extrinsics
+ var exts [][]byte
+ err = scale2.Unmarshal(inherentExts, &exts)
require.NoError(t, err)
// apply each inherent extrinsic
- for _, ext := range exts.([][]byte) {
- in, err := scale.Encode(ext) //nolint
+ for _, ext := range exts {
+ in, err := scale2.Marshal(ext) //nolint
require.NoError(t, err)
ret, err := instance.ApplyExtrinsic(append([]byte{1}, in...))
@@ -166,12 +167,16 @@ func buildBlock(t *testing.T, instance runtime.Instance) *types.Block {
babeDigest := types.NewBabePrimaryPreDigest(0, 1, [32]byte{}, [64]byte{})
data := babeDigest.Encode()
preDigest := types.NewBABEPreRuntimeDigest(data)
- res.Digest = types.Digest{preDigest}
+
+ digest := types.NewDigest()
+ err = digest.Add(*preDigest)
+ require.NoError(t, err)
+ res.Digest = digest
expected := &types.Header{
ParentHash: header.ParentHash,
Number: big.NewInt(1),
- Digest: types.Digest{preDigest},
+ Digest: digest,
}
require.Equal(t, expected.ParentHash, res.ParentHash)
@@ -182,8 +187,8 @@ func buildBlock(t *testing.T, instance runtime.Instance) *types.Block {
require.NotEqual(t, trie.EmptyHash, res.StateRoot)
return &types.Block{
- Header: res,
- Body: types.NewBody(inherentExts),
+ Header: *res,
+ Body: *types.NewBody(inherentExts),
}
}
@@ -238,21 +243,20 @@ func TestInstance_ExecuteBlock_KusamaRuntime_KusamaBlock1(t *testing.T) {
// digest from polkadot.js
digestBytes := common.MustHexToBytes("0x0c0642414245340201000000ef55a50f00000000044241424549040118ca239392960473fe1bc65f94ee27d890a49c1b200c006ff5dcc525330ecc16770100000000000000b46f01874ce7abbb5220e8fd89bede0adad14c73039d91e28e881823433e723f0100000000000000d684d9176d6eb69887540c9a89fa6097adea82fc4b0ff26d1062b488f352e179010000000000000068195a71bdde49117a616424bdc60a1733e96acb1da5aeab5d268cf2a572e94101000000000000001a0575ef4ae24bdfd31f4cb5bd61239ae67c12d4e64ae51ac756044aa6ad8200010000000000000018168f2aad0081a25728961ee00627cfe35e39833c805016632bf7c14da5800901000000000000000000000000000000000000000000000000000000000000000000000000000000054241424501014625284883e564bc1e4063f5ea2b49846cdddaa3761d04f543b698c1c3ee935c40d25b869247c36c6b8a8cbbd7bb2768f560ab7c276df3c62df357a7e3b1ec8d")
- r := &bytes.Buffer{}
- _, _ = r.Write(digestBytes)
- digest, err := types.DecodeDigest(r)
+ digest := types.NewDigest()
+ err = scale2.Unmarshal(digestBytes, &digest)
require.NoError(t, err)
// kusama block 1, from polkadot.js
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: common.MustHexToHash("0xb0a8d493285c2df73290dfb7e61f870f17b41801197a149ca93654499ea3dafe"),
Number: big.NewInt(1),
StateRoot: common.MustHexToHash("0xfabb0c6e92d29e8bb2167f3c6fb0ddeb956a4278a3cf853661af74a076fc9cb7"),
ExtrinsicsRoot: common.MustHexToHash("0xa35fb7f7616f5c979d48222b3d2fa7cb2331ef73954726714d91ca945cc34fd8"),
Digest: digest,
},
- Body: types.NewBody(body),
+ Body: *types.NewBody(body),
}
_, err = instance.ExecuteBlock(block)
@@ -288,21 +292,20 @@ func TestInstance_ExecuteBlock_PolkadotRuntime_PolkadotBlock1(t *testing.T) {
// digest data received from querying polkadot node
digestBytes := common.MustHexToBytes("0x0c0642414245b501010000000093decc0f00000000362ed8d6055645487fe42e9c8640be651f70a3a2a03658046b2b43f021665704501af9b1ca6e974c257e3d26609b5f68b5b0a1da53f7f252bbe5d94948c39705c98ffa4b869dd44ac29528e3723d619cc7edf1d3f7b7a57a957f6a7e9bdb270a044241424549040118fa3437b10f6e7af8f31362df3a179b991a8c56313d1bcd6307a4d0c734c1ae310100000000000000d2419bc8835493ac89eb09d5985281f5dff4bc6c7a7ea988fd23af05f301580a0100000000000000ccb6bef60defc30724545d57440394ed1c71ea7ee6d880ed0e79871a05b5e40601000000000000005e67b64cf07d4d258a47df63835121423551712844f5b67de68e36bb9a21e12701000000000000006236877b05370265640c133fec07e64d7ca823db1dc56f2d3584b3d7c0f1615801000000000000006c52d02d95c30aa567fda284acf25025ca7470f0b0c516ddf94475a1807c4d250100000000000000000000000000000000000000000000000000000000000000000000000000000005424142450101d468680c844b19194d4dfbdc6697a35bf2b494bda2c5a6961d4d4eacfbf74574379ba0d97b5bb650c2e8670a63791a727943bcb699dc7a228bdb9e0a98c9d089")
- r := &bytes.Buffer{}
- _, _ = r.Write(digestBytes)
- digest, err := types.DecodeDigest(r)
+ digest := types.NewDigest()
+ err = scale2.Unmarshal(digestBytes, &digest)
require.NoError(t, err)
// polkadot block 1, from polkadot.js
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: common.MustHexToHash("0x91b171bb158e2d3848fa23a9f1c25182fb8e20313b2c1eb49219da7a70ce90c3"),
Number: big.NewInt(1),
StateRoot: common.MustHexToHash("0xc56fcd6e7a757926ace3e1ecff9b4010fc78b90d459202a339266a7f6360002f"),
ExtrinsicsRoot: common.MustHexToHash("0x9a87f6af64ef97aff2d31bebfdd59f8fe2ef6019278b634b2515a38f1c4c2420"),
Digest: digest,
},
- Body: types.NewBody(body),
+ Body: *types.NewBody(body),
}
_, _ = instance.ExecuteBlock(block) // TODO: fix
diff --git a/lib/runtime/mocks/instance.go b/lib/runtime/mocks/instance.go
index 43f873ce82..056286b1f4 100644
--- a/lib/runtime/mocks/instance.go
+++ b/lib/runtime/mocks/instance.go
@@ -27,10 +27,8 @@ func (_m *MockInstance) ApplyExtrinsic(data types.Extrinsic) ([]byte, error) {
var r0 []byte
if rf, ok := ret.Get(0).(func(types.Extrinsic) []byte); ok {
r0 = rf(data)
- } else {
- if ret.Get(0) != nil {
- r0 = ret.Get(0).([]byte)
- }
+ } else if ret.Get(0) != nil {
+ r0 = ret.Get(0).([]byte)
}
var r1 error
@@ -50,10 +48,9 @@ func (_m *MockInstance) BabeConfiguration() (*types.BabeConfiguration, error) {
var r0 *types.BabeConfiguration
if rf, ok := ret.Get(0).(func() *types.BabeConfiguration); ok {
r0 = rf()
- } else {
- if ret.Get(0) != nil {
- r0 = ret.Get(0).(*types.BabeConfiguration)
- }
+ } else if ret.Get(0) != nil {
+ r0 = ret.Get(0).(*types.BabeConfiguration)
+
}
var r1 error
@@ -78,10 +75,8 @@ func (_m *MockInstance) CheckRuntimeVersion(_a0 []byte) (runtime.Version, error)
var r0 runtime.Version
if rf, ok := ret.Get(0).(func([]byte) runtime.Version); ok {
r0 = rf(_a0)
- } else {
- if ret.Get(0) != nil {
- r0 = ret.Get(0).(runtime.Version)
- }
+ } else if ret.Get(0) != nil {
+ r0 = ret.Get(0).(runtime.Version)
}
var r1 error
@@ -101,10 +96,8 @@ func (_m *MockInstance) DecodeSessionKeys(enc []byte) ([]byte, error) {
var r0 []byte
if rf, ok := ret.Get(0).(func([]byte) []byte); ok {
r0 = rf(enc)
- } else {
- if ret.Get(0) != nil {
- r0 = ret.Get(0).([]byte)
- }
+ } else if ret.Get(0) != nil {
+ r0 = ret.Get(0).([]byte)
}
var r1 error
@@ -124,10 +117,8 @@ func (_m *MockInstance) Exec(function string, data []byte) ([]byte, error) {
var r0 []byte
if rf, ok := ret.Get(0).(func(string, []byte) []byte); ok {
r0 = rf(function, data)
- } else {
- if ret.Get(0) != nil {
- r0 = ret.Get(0).([]byte)
- }
+ } else if ret.Get(0) != nil {
+ r0 = ret.Get(0).([]byte)
}
var r1 error
@@ -147,10 +138,8 @@ func (_m *MockInstance) ExecuteBlock(block *types.Block) ([]byte, error) {
var r0 []byte
if rf, ok := ret.Get(0).(func(*types.Block) []byte); ok {
r0 = rf(block)
- } else {
- if ret.Get(0) != nil {
- r0 = ret.Get(0).([]byte)
- }
+ } else if ret.Get(0) != nil {
+ r0 = ret.Get(0).([]byte)
}
var r1 error
@@ -170,10 +159,8 @@ func (_m *MockInstance) FinalizeBlock() (*types.Header, error) {
var r0 *types.Header
if rf, ok := ret.Get(0).(func() *types.Header); ok {
r0 = rf()
- } else {
- if ret.Get(0) != nil {
- r0 = ret.Get(0).(*types.Header)
- }
+ } else if ret.Get(0) != nil {
+ r0 = ret.Get(0).(*types.Header)
}
var r1 error
@@ -198,26 +185,22 @@ func (_m *MockInstance) GetCodeHash() common.Hash {
var r0 common.Hash
if rf, ok := ret.Get(0).(func() common.Hash); ok {
r0 = rf()
- } else {
- if ret.Get(0) != nil {
- r0 = ret.Get(0).(common.Hash)
- }
+ } else if ret.Get(0) != nil {
+ r0 = ret.Get(0).(common.Hash)
}
return r0
}
// GrandpaAuthorities provides a mock function with given fields:
-func (_m *MockInstance) GrandpaAuthorities() ([]*types.Authority, error) {
+func (_m *MockInstance) GrandpaAuthorities() ([]types.Authority, error) {
ret := _m.Called()
- var r0 []*types.Authority
- if rf, ok := ret.Get(0).(func() []*types.Authority); ok {
+ var r0 []types.Authority
+ if rf, ok := ret.Get(0).(func() []types.Authority); ok {
r0 = rf()
- } else {
- if ret.Get(0) != nil {
- r0 = ret.Get(0).([]*types.Authority)
- }
+ } else if ret.Get(0) != nil {
+ r0 = ret.Get(0).([]types.Authority)
}
var r1 error
@@ -237,10 +220,8 @@ func (_m *MockInstance) InherentExtrinsics(data []byte) ([]byte, error) {
var r0 []byte
if rf, ok := ret.Get(0).(func([]byte) []byte); ok {
r0 = rf(data)
- } else {
- if ret.Get(0) != nil {
- r0 = ret.Get(0).([]byte)
- }
+ } else if ret.Get(0) != nil {
+ r0 = ret.Get(0).([]byte)
}
var r1 error
@@ -274,10 +255,8 @@ func (_m *MockInstance) Keystore() *keystore.GlobalKeystore {
var r0 *keystore.GlobalKeystore
if rf, ok := ret.Get(0).(func() *keystore.GlobalKeystore); ok {
r0 = rf()
- } else {
- if ret.Get(0) != nil {
- r0 = ret.Get(0).(*keystore.GlobalKeystore)
- }
+ } else if ret.Get(0) != nil {
+ r0 = ret.Get(0).(*keystore.GlobalKeystore)
}
return r0
@@ -290,10 +269,8 @@ func (_m *MockInstance) Metadata() ([]byte, error) {
var r0 []byte
if rf, ok := ret.Get(0).(func() []byte); ok {
r0 = rf()
- } else {
- if ret.Get(0) != nil {
- r0 = ret.Get(0).([]byte)
- }
+ } else if ret.Get(0) != nil {
+ r0 = ret.Get(0).([]byte)
}
var r1 error
@@ -313,10 +290,8 @@ func (_m *MockInstance) NetworkService() runtime.BasicNetwork {
var r0 runtime.BasicNetwork
if rf, ok := ret.Get(0).(func() runtime.BasicNetwork); ok {
r0 = rf()
- } else {
- if ret.Get(0) != nil {
- r0 = ret.Get(0).(runtime.BasicNetwork)
- }
+ } else if ret.Get(0) != nil {
+ r0 = ret.Get(0).(runtime.BasicNetwork)
}
return r0
@@ -377,10 +352,8 @@ func (_m *MockInstance) ValidateTransaction(e types.Extrinsic) (*transaction.Val
var r0 *transaction.Validity
if rf, ok := ret.Get(0).(func(types.Extrinsic) *transaction.Validity); ok {
r0 = rf(e)
- } else {
- if ret.Get(0) != nil {
- r0 = ret.Get(0).(*transaction.Validity)
- }
+ } else if ret.Get(0) != nil {
+ r0 = ret.Get(0).(*transaction.Validity)
}
var r1 error
@@ -414,10 +387,8 @@ func (_m *MockInstance) Version() (runtime.Version, error) {
var r0 runtime.Version
if rf, ok := ret.Get(0).(func() runtime.Version); ok {
r0 = rf()
- } else {
- if ret.Get(0) != nil {
- r0 = ret.Get(0).(runtime.Version)
- }
+ } else if ret.Get(0) != nil {
+ r0 = ret.Get(0).(runtime.Version)
}
var r1 error
diff --git a/lib/runtime/wasmer/exports.go b/lib/runtime/wasmer/exports.go
index 48368eafe9..eff5b2c987 100644
--- a/lib/runtime/wasmer/exports.go
+++ b/lib/runtime/wasmer/exports.go
@@ -24,6 +24,7 @@ import (
"github.com/ChainSafe/gossamer/lib/runtime"
"github.com/ChainSafe/gossamer/lib/scale"
"github.com/ChainSafe/gossamer/lib/transaction"
+ scale2 "github.com/ChainSafe/gossamer/pkg/scale"
)
// ValidateTransaction runs the extrinsic through runtime function TaggedTransactionQueue_validate_transaction and returns *Validity
@@ -91,23 +92,23 @@ func (in *Instance) BabeConfiguration() (*types.BabeConfiguration, error) {
}
// GrandpaAuthorities returns the genesis authorities from the runtime
-func (in *Instance) GrandpaAuthorities() ([]*types.Authority, error) {
+func (in *Instance) GrandpaAuthorities() ([]types.Authority, error) {
ret, err := in.exec(runtime.GrandpaAuthorities, []byte{})
if err != nil {
return nil, err
}
- adr, err := scale.Decode(ret, []*types.GrandpaAuthoritiesRaw{})
+ adr, err := scale.Decode(ret, []types.GrandpaAuthoritiesRaw{})
if err != nil {
return nil, err
}
- return types.GrandpaAuthoritiesRawToAuthorities(adr.([]*types.GrandpaAuthoritiesRaw))
+ return types.GrandpaAuthoritiesRawToAuthorities(adr.([]types.GrandpaAuthoritiesRaw))
}
// InitializeBlock calls runtime API function Core_initialise_block
func (in *Instance) InitializeBlock(header *types.Header) error {
- encodedHeader, err := scale.Encode(header)
+ encodedHeader, err := scale2.Marshal(*header)
if err != nil {
return fmt.Errorf("cannot encode header: %w", err)
}
@@ -134,8 +135,8 @@ func (in *Instance) FinalizeBlock() (*types.Header, error) {
return nil, err
}
- bh := new(types.Header)
- _, err = scale.Decode(data, bh)
+ bh := types.NewEmptyHeader()
+ err = scale2.Unmarshal(data, bh)
if err != nil {
return nil, err
}
@@ -146,24 +147,32 @@ func (in *Instance) FinalizeBlock() (*types.Header, error) {
// ExecuteBlock calls runtime function Core_execute_block
func (in *Instance) ExecuteBlock(block *types.Block) ([]byte, error) {
// copy block since we're going to modify it
- b := block.DeepCopy()
+ b, err := block.DeepCopy()
+ if err != nil {
+ return nil, err
+ }
if in.version == nil {
- var err error
in.version, err = in.Version()
if err != nil {
return nil, err
}
}
+ b.Header.Digest = types.NewDigest()
+
// remove seal digest only
- b.Header.Digest = types.NewEmptyDigest()
- for _, d := range block.Header.Digest {
- if d.Type() == types.SealDigestType {
+ for _, d := range block.Header.Digest.Types {
+ switch d.Value().(type) {
+ case types.SealDigest:
continue
+ default:
+ err = b.Header.Digest.Add(d.Value())
+ if err != nil {
+ return nil, err
+ }
}
- b.Header.Digest = append(b.Header.Digest, d)
}
bdEnc, err := b.Encode()
@@ -171,7 +180,7 @@ func (in *Instance) ExecuteBlock(block *types.Block) ([]byte, error) {
return nil, err
}
- return in.exec(runtime.CoreExecuteBlock, bdEnc)
+ return in.Exec(runtime.CoreExecuteBlock, bdEnc)
}
// DecodeSessionKeys decodes the given public session keys. Returns a list of raw public keys including their key type.
diff --git a/lib/runtime/wasmer/exports_test.go b/lib/runtime/wasmer/exports_test.go
index 96e2198b83..853d77b249 100644
--- a/lib/runtime/wasmer/exports_test.go
+++ b/lib/runtime/wasmer/exports_test.go
@@ -1,7 +1,6 @@
package wasmer
import (
- "bytes"
"encoding/json"
"io/ioutil"
"math/big"
@@ -304,7 +303,7 @@ func TestNodeRuntime_ValidateTransaction(t *testing.T) {
ext := createTestExtrinsic(t, rt, genesisHeader.Hash(), 0)
ext = append([]byte{byte(types.TxnExternal)}, ext...)
- _ = buildBlock(t, rt, genesisHeader.Hash())
+ _ = buildBlockVdt(t, rt, genesisHeader.Hash())
_, err = rt.ValidateTransaction(ext)
require.NoError(t, err)
}
@@ -328,7 +327,7 @@ func TestInstance_GrandpaAuthorities_NodeRuntime(t *testing.T) {
authA, _ := ed25519.NewPublicKey(authABytes)
authB, _ := ed25519.NewPublicKey(authBBytes)
- expected := []*types.Authority{
+ expected := []types.Authority{
{Key: authA, Weight: 1},
{Key: authB, Weight: 1},
}
@@ -355,7 +354,7 @@ func TestInstance_GrandpaAuthorities_PolkadotRuntime(t *testing.T) {
authA, _ := ed25519.NewPublicKey(authABytes)
authB, _ := ed25519.NewPublicKey(authBBytes)
- expected := []*types.Authority{
+ expected := []types.Authority{
{Key: authA, Weight: 1},
{Key: authB, Weight: 1},
}
@@ -419,7 +418,7 @@ func TestInstance_BabeConfiguration_NodeRuntime_WithAuthorities(t *testing.T) {
authA, _ := common.HexToHash("0xeea1eabcac7d2c8a6459b7322cf997874482bfc3d2ec7a80888a3a7d71410364")
authB, _ := common.HexToHash("0xb64994460e59b30364cad3c92e3df6052f9b0ebbb8f88460c194dc5794d6d717")
- expectedAuthData := []*types.AuthorityRaw{
+ expectedAuthData := []types.AuthorityRaw{
{Key: authA, Weight: 1},
{Key: authB, Weight: 1},
}
@@ -442,7 +441,7 @@ func TestInstance_InitializeBlock_NodeRuntime(t *testing.T) {
header := &types.Header{
Number: big.NewInt(1),
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
}
err := rt.InitializeBlock(header)
@@ -454,18 +453,18 @@ func TestInstance_InitializeBlock_PolkadotRuntime(t *testing.T) {
header := &types.Header{
Number: big.NewInt(1),
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
}
err := rt.InitializeBlock(header)
require.NoError(t, err)
}
-func buildBlock(t *testing.T, instance runtime.Instance, parentHash common.Hash) *types.Block {
+func buildBlockVdt(t *testing.T, instance runtime.Instance, parentHash common.Hash) *types.Block {
header := &types.Header{
ParentHash: parentHash,
Number: big.NewInt(1),
- Digest: types.Digest{},
+ Digest: types.NewDigest(),
}
err := instance.InitializeBlock(header)
@@ -509,12 +508,16 @@ func buildBlock(t *testing.T, instance runtime.Instance, parentHash common.Hash)
babeDigest := types.NewBabePrimaryPreDigest(0, 1, [32]byte{}, [64]byte{})
data := babeDigest.Encode()
preDigest := types.NewBABEPreRuntimeDigest(data)
- res.Digest = types.Digest{preDigest}
+
+ digest := types.NewDigest()
+ err = digest.Add(preDigest)
+ require.NoError(t, err)
+ res.Digest = digest
expected := &types.Header{
ParentHash: header.ParentHash,
Number: big.NewInt(1),
- Digest: types.Digest{preDigest},
+ Digest: digest,
}
require.Equal(t, expected.ParentHash, res.ParentHash)
@@ -525,26 +528,26 @@ func buildBlock(t *testing.T, instance runtime.Instance, parentHash common.Hash)
require.NotEqual(t, trie.EmptyHash, res.StateRoot)
return &types.Block{
- Header: res,
- Body: types.NewBody(inherentExts),
+ Header: *res,
+ Body: *types.NewBody(inherentExts),
}
}
func TestInstance_FinalizeBlock_NodeRuntime(t *testing.T) {
instance := NewTestInstance(t, runtime.NODE_RUNTIME)
- buildBlock(t, instance, common.Hash{})
+ buildBlockVdt(t, instance, common.Hash{})
}
func TestInstance_ExecuteBlock_NodeRuntime(t *testing.T) {
instance := NewTestInstance(t, runtime.NODE_RUNTIME)
- block := buildBlock(t, instance, common.Hash{})
+ block := buildBlockVdt(t, instance, common.Hash{})
// reset state back to parent state before executing
parentState, err := storage.NewTrieState(nil)
require.NoError(t, err)
instance.SetContextStorage(parentState)
- block.Header.Digest = types.NewEmptyDigest()
+ block.Header.Digest = types.NewDigest()
_, err = instance.ExecuteBlock(block)
require.NoError(t, err)
}
@@ -567,7 +570,7 @@ func TestInstance_ExecuteBlock_GossamerRuntime(t *testing.T) {
instance, err := NewRuntimeFromGenesis(gen, cfg)
require.NoError(t, err)
- block := buildBlock(t, instance, common.Hash{})
+ block := buildBlockVdt(t, instance, common.Hash{})
// reset state back to parent state before executing
parentState, err := storage.NewTrieState(genTrie)
@@ -604,7 +607,7 @@ func TestInstance_ApplyExtrinsic_GossamerRuntime(t *testing.T) {
// TODO: where did this hash come from??
parentHash := common.MustHexToHash("0x35a28a7dbaf0ba07d1485b0f3da7757e3880509edc8c31d0850cb6dd6219361d")
- header, err := types.NewHeader(parentHash, common.Hash{}, common.Hash{}, big.NewInt(1), types.NewEmptyDigest())
+ header, err := types.NewHeader(parentHash, common.Hash{}, common.Hash{}, big.NewInt(1), types.NewDigest())
require.NoError(t, err)
err = instance.InitializeBlock(header)
require.NoError(t, err)
@@ -622,14 +625,14 @@ func TestInstance_ExecuteBlock_PolkadotRuntime(t *testing.T) {
DefaultTestLogLvl = 0
instance := NewTestInstance(t, runtime.POLKADOT_RUNTIME)
- block := buildBlock(t, instance, common.Hash{})
+ block := buildBlockVdt(t, instance, common.Hash{})
// reset state back to parent state before executing
parentState, err := storage.NewTrieState(nil)
require.NoError(t, err)
instance.SetContextStorage(parentState)
- block.Header.Digest = types.Digest{}
+ block.Header.Digest = types.NewDigest()
_, err = instance.ExecuteBlock(block)
require.NoError(t, err)
}
@@ -664,21 +667,21 @@ func TestInstance_ExecuteBlock_PolkadotRuntime_PolkadotBlock1(t *testing.T) {
// digest data received from querying polkadot node
digestBytes := common.MustHexToBytes("0x0c0642414245b501010000000093decc0f00000000362ed8d6055645487fe42e9c8640be651f70a3a2a03658046b2b43f021665704501af9b1ca6e974c257e3d26609b5f68b5b0a1da53f7f252bbe5d94948c39705c98ffa4b869dd44ac29528e3723d619cc7edf1d3f7b7a57a957f6a7e9bdb270a044241424549040118fa3437b10f6e7af8f31362df3a179b991a8c56313d1bcd6307a4d0c734c1ae310100000000000000d2419bc8835493ac89eb09d5985281f5dff4bc6c7a7ea988fd23af05f301580a0100000000000000ccb6bef60defc30724545d57440394ed1c71ea7ee6d880ed0e79871a05b5e40601000000000000005e67b64cf07d4d258a47df63835121423551712844f5b67de68e36bb9a21e12701000000000000006236877b05370265640c133fec07e64d7ca823db1dc56f2d3584b3d7c0f1615801000000000000006c52d02d95c30aa567fda284acf25025ca7470f0b0c516ddf94475a1807c4d250100000000000000000000000000000000000000000000000000000000000000000000000000000005424142450101d468680c844b19194d4dfbdc6697a35bf2b494bda2c5a6961d4d4eacfbf74574379ba0d97b5bb650c2e8670a63791a727943bcb699dc7a228bdb9e0a98c9d089")
- r := &bytes.Buffer{}
- _, _ = r.Write(digestBytes)
- digest, err := types.DecodeDigest(r)
+
+ digest := types.NewDigest()
+ err = scale.Unmarshal(digestBytes, &digest)
require.NoError(t, err)
// polkadot block 1, from polkadot.js
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: common.MustHexToHash("0x91b171bb158e2d3848fa23a9f1c25182fb8e20313b2c1eb49219da7a70ce90c3"),
Number: big.NewInt(1),
StateRoot: common.MustHexToHash("0xc56fcd6e7a757926ace3e1ecff9b4010fc78b90d459202a339266a7f6360002f"),
ExtrinsicsRoot: common.MustHexToHash("0x9a87f6af64ef97aff2d31bebfdd59f8fe2ef6019278b634b2515a38f1c4c2420"),
Digest: digest,
},
- Body: types.NewBody(body),
+ Body: *types.NewBody(body),
}
_, err = instance.ExecuteBlock(block)
@@ -715,21 +718,21 @@ func TestInstance_ExecuteBlock_KusamaRuntime_KusamaBlock1(t *testing.T) {
// digest from polkadot.js
digestBytes := common.MustHexToBytes("0x0c0642414245340201000000ef55a50f00000000044241424549040118ca239392960473fe1bc65f94ee27d890a49c1b200c006ff5dcc525330ecc16770100000000000000b46f01874ce7abbb5220e8fd89bede0adad14c73039d91e28e881823433e723f0100000000000000d684d9176d6eb69887540c9a89fa6097adea82fc4b0ff26d1062b488f352e179010000000000000068195a71bdde49117a616424bdc60a1733e96acb1da5aeab5d268cf2a572e94101000000000000001a0575ef4ae24bdfd31f4cb5bd61239ae67c12d4e64ae51ac756044aa6ad8200010000000000000018168f2aad0081a25728961ee00627cfe35e39833c805016632bf7c14da5800901000000000000000000000000000000000000000000000000000000000000000000000000000000054241424501014625284883e564bc1e4063f5ea2b49846cdddaa3761d04f543b698c1c3ee935c40d25b869247c36c6b8a8cbbd7bb2768f560ab7c276df3c62df357a7e3b1ec8d")
- r := &bytes.Buffer{}
- _, _ = r.Write(digestBytes)
- digest, err := types.DecodeDigest(r)
+
+ digest := types.NewDigest()
+ err = scale.Unmarshal(digestBytes, &digest)
require.NoError(t, err)
// kusama block 1, from polkadot.js
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: common.MustHexToHash("0xb0a8d493285c2df73290dfb7e61f870f17b41801197a149ca93654499ea3dafe"),
Number: big.NewInt(1),
StateRoot: common.MustHexToHash("0xfabb0c6e92d29e8bb2167f3c6fb0ddeb956a4278a3cf853661af74a076fc9cb7"),
ExtrinsicsRoot: common.MustHexToHash("0xa35fb7f7616f5c979d48222b3d2fa7cb2331ef73954726714d91ca945cc34fd8"),
Digest: digest,
},
- Body: types.NewBody(body),
+ Body: *types.NewBody(body),
}
_, err = instance.ExecuteBlock(block)
@@ -761,21 +764,21 @@ func TestInstance_ExecuteBlock_KusamaRuntime_KusamaBlock3784(t *testing.T) {
// digest from polkadot.js
digestBytes := common.MustHexToBytes("0x080642414245340203000000bd64a50f0000000005424142450101bc0d6850dba8d32ea1dbe26cb4ac56da6cca662c7cc642dc8eed32d2bddd65029f0721436eafeebdf9b4f17d1673c6bc6c3c51fe3dda3121a5fc60c657a5808b")
- r := &bytes.Buffer{}
- _, _ = r.Write(digestBytes)
- digest, err := types.DecodeDigest(r)
+
+ digest := types.NewDigest()
+ err = scale.Unmarshal(digestBytes, &digest)
require.NoError(t, err)
// kusama block 3784, from polkadot.js
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: common.MustHexToHash("0x4843b4aa38cf2e3e2f6fae401b98dd705bed668a82dd3751dc38f1601c814ca8"),
Number: big.NewInt(3784),
StateRoot: common.MustHexToHash("0xac44cc18ec22f0f3fca39dfe8725c0383af1c982a833e081fbb2540e46eb09a5"),
ExtrinsicsRoot: common.MustHexToHash("0x52b7d4852fc648cb8f908901e1e36269593c25050c31718454bca74b69115d12"),
Digest: digest,
},
- Body: types.NewBody(body),
+ Body: *types.NewBody(body),
}
_, err = instance.ExecuteBlock(block)
@@ -806,22 +809,22 @@ func TestInstance_ExecuteBlock_KusamaRuntime_KusamaBlock901442(t *testing.T) {
// digest from polkadot.js
digestBytes := common.MustHexToBytes("0x080642414245340244000000aeffb30f00000000054241424501011cbef2a084a774c34d9990c7bfc6b4d2d5e9f5b59feca792cd2bb89a890c2a6f09668b5e8224879f007f49f299d25fbb3c0f30d94fb8055e07fa8a4ed10f8083")
- r := &bytes.Buffer{}
- _, _ = r.Write(digestBytes)
- digest, err := types.DecodeDigest(r)
+
+ digest := types.NewDigest()
+ err = scale.Unmarshal(digestBytes, &digest)
require.NoError(t, err)
- require.Equal(t, 2, len(digest))
+ require.Equal(t, 2, len(digest.Types))
// kusama block 901442, from polkadot.js
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: common.MustHexToHash("0x68d9c5f75225f09d7ce493eff8aabac7bae8b65cb81a2fd532a99fbb8c663931"),
Number: big.NewInt(901442),
StateRoot: common.MustHexToHash("0x6ea065f850894c5b58cb1a73ec887e56842851943641149c57cea357cae4f596"),
ExtrinsicsRoot: common.MustHexToHash("0x13483a4c148fff5f072e86b5af52bf031556514e9c87ea19f9e31e7b13c0c414"),
Digest: digest,
},
- Body: types.NewBody(body),
+ Body: *types.NewBody(body),
}
_, err = instance.ExecuteBlock(block)
@@ -852,22 +855,22 @@ func TestInstance_ExecuteBlock_KusamaRuntime_KusamaBlock1377831(t *testing.T) {
// digest from polkadot.js
digestBytes := common.MustHexToBytes("0x080642414245b50101020000008abebb0f00000000045553c32a949242580161bcc35d7c3e492e66defdcf4525d7a338039590012f42660acabf1952a2d5d01725601705404d6ac671507a6aa2cf09840afbdfbb006f48062dae16c56b8dc5c6ea6ffba854b7e8f46e153e98c238cbe7bbb1556f0b0542414245010136914c6832dd5ba811a975a3b654d76a1ec81684f4b03d115ce2e694feadc96411930438fde4beb008c5f8e26cfa2f5b554fa3814b5b73d31f348446fd4fd688")
- r := &bytes.Buffer{}
- _, _ = r.Write(digestBytes)
- digest, err := types.DecodeDigest(r)
+
+ digest := types.NewDigest()
+ err = scale.Unmarshal(digestBytes, &digest)
require.NoError(t, err)
- require.Equal(t, 2, len(digest))
+ require.Equal(t, 2, len(digest.Types))
// kusama block 1377831, from polkadot.js
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: common.MustHexToHash("0xca387b3cc045e8848277069d8794cbf077b08218c0b55f74d81dd750b14e768c"),
Number: big.NewInt(1377831),
StateRoot: common.MustHexToHash("0x7e5569e652c4b1a3cecfcf5e5e64a97fe55071d34bab51e25626ec20cae05a02"),
ExtrinsicsRoot: common.MustHexToHash("0x7f3ea0ed63b4053d9b75e7ee3e5b3f6ce916e8f59b7b6c5e966b7a56ea0a563a"),
Digest: digest,
},
- Body: types.NewBody(body),
+ Body: *types.NewBody(body),
}
_, err = instance.ExecuteBlock(block)
@@ -898,22 +901,23 @@ func TestInstance_ExecuteBlock_KusamaRuntime_KusamaBlock1482003(t *testing.T) {
// digest from polkadot.js
digestBytes := common.MustHexToBytes("0x100642414245b50101320000009759bd0f000000009e210440b679b326821b2fca4245b00fbdf8805d3aaf61cf1bf503394effef513cba364ecbebaa6529ac9660ab4c063bea175e9d6ca0685d5df4418fef6d8f07c27a0f957275efc0ba4e50bfbd6e1384fe84dcfda5785e80341213d23fa3600104424142450d7101d10204b57800309efc8132f8a177637557c9c2d9be6a4ba1e31e1b8c32e1e699ee170100000000000000964bbef8761a9505e0cd68956f96929cc6fb56937903d1681e73aed2a9659619010000000000000002b987ef285b8918c77eb19d35859a92b6074f595119246a95f05b5aed5b3a5f0100000000000000bcea83362dcd04d5b701459ac0a9cb9ed9ffbb6199bcce4022129dce2442ba4e0100000000000000162257d27926f8d958a9b763e36899d0efd245e218ea72c29b9233094d8a4f25010000000000000000dc83b2e0ebf20defdc1feefe506cf1d72c17022f318b033ff5889756ccd962010000000000000042e274ddcb6310dc1ba6196dacc48aa484fbf8d1229277255f24c331ad8ab6200100000000000000fa110af82bc8810ef86db861c0590432dd987293f7b8d237706b9a765bcc5e6301000000000000003ce145a3a3cf58cb62b7ee01b3c25cc52a6b648590ff66c5e90bc1b0e64a04380100000000000000da6380d552c6fce2effa16055b935d40b000f42e9a7a448a50ba273b7a67977e010000000000000038f1ca1d81d8566cbc6d6aa6caa86032f144799c0c121aa8cbb6a2cbc1c4da4501000000000000008ea2f21d319cd3306bfc46590c6d06498e6884d90f42682e4f7a104ba1b668020100000000000000eac271b2814c397e8bdcac88c9a364355ba86119de6f1d221d6558bd2c68fc0f0100000000000000c2e8b87c8ee12b9c2f412824e560d9757a6df8b94e9958dde2a03a4455804d7701000000000000004a15c3e9ad2addb360bccc771dac1498848b9d4c99229616d0aeb70d4c8d965c010000000000000060d180c03896ded51c26326c10ff7d550e2f7ebca27b80980ef5305245caa84e0100000000000000d6402926eff84f7793792b280b612246e9ec5ea12cf9341af2d6426ecf884a51010000000000000012fd99c560cdceef73f67478e8fdf160daf585603f229ca34d619d303e55503501000000000000002e1ad1c806e59126783cc0ffb2634c247296ae93e18381c543f97388e264c77c01000000000000005c68595ebd57820549b8e3437d02c9f4a7a59973ec568c4a018615de33e387570100000000000000aaca301bac17ded6fc8b7b18756214888367786ea19313015ef8142300803e1d0100000000000000880f8d97fcdc7021b3c31b510833a02ae0b3cfab521e858527128bb41b69737c01000000000000005c027eb5abd497132af46fa7906a2bd019482c0b9579382cf99a796f42d10e710100000000000000842bc40308e126f84e37d2548ae59e117678b301b9cbb81825f76b51eff3353c01000000000000007a2b08afc3ae8dd6142454c92f89da44b59d48b77a4cb2c1f70b802d81a5616501000000000000008eb08114174368dbcc919de7a24c9ff92fc687e7460f5081f2913de6bc55a02601000000000000001accbd204cce7d8a5d5af6bef8e16867f8c8117e90f6de50ad0a43032144c80e0100000000000000deddb694fb8d803ab5d187226dc8af702e268413e02f11b7bce61b54ba49c37c01000000000000000c1af511431545658f228841176c169c225b0fd31cb837bb6874507e2be1667e01000000000000004435a1e26a4307623315c9ae2517542e2cab93573882b9193de8f45c4a35e12501000000000000008ed8b2c7248c7145ae63f0f007f1f5d10e13bf77dd751e694c1bba068b7200520100000000000000648cba1b55dc09e1d0aafafa0a48c7d1f59914335da50c439aa76912ae33f028010000000000000064756a866505ec05ee12cbde1c62812a3f8bf74358a110116d3026b9d422500b010000000000000058c724a9c349fde3719f36a85ab9777c77804c6f136beb400597afbb340b0e4a0100000000000000187726f667f54f5f86407f33dd7ebcf5f3cd6989d513f3489c419c1742a0981f0100000000000000c0be316574806d2b59d5ea77c2b00ba9f473d7111ce31afd9ea35e1a8c2dd7360100000000000000f678ab316ca11894b3ee1955fbcc110bce84f7c673ca29b5cdd5869b880a5e68010000000000000052d3be9bb543ed32f4311acc7b3ff24b02de1b38f369169a5c70d7eb8f95935c0100000000000000d8fa2576e4329e51e2468031a612506121a7178c1085840b0d55192e72aa304c0100000000000000e0b214ed0fd7aa0f4f7eb9704584494273cbbddf5e0a7c8e822a4b41b6a0991c010000000000000046a66f7d163b96dbeb44324a9f2557091527dbf83e1d36a2740fb8f549372f5b01000000000000008e1b17c7c4d08fe7f956609521f4bffe7f941f5067bc6fbdc835a2d8ac294527010000000000000082c8f9dfb03c214735c492afd7792029e55cdcfddb6c0b4a3be1eedb4793b81a01000000000000008447eb61c24a4403d3191397bb12d7080907666a1701111f9f7be5654a52063d0100000000000000f89b7d34e30056a8fb7429462065acb315fcea481b84a9c20f8a1125eee0106001000000000000004c3b7b04d9714ccb6024a358f3e41e93d82682567a894a0fd030afe03f0b4a62010000000000000056c613fd149197bc382db5aa69ca4f1349454a9fc5e4aa22f1e9c3d8852c84650100000000000000767ac80a7d18e6862e1053d9f496b434a15bf7f7d4a68e27ea23c1bbeb5cb24d01000000000000001072db214c06b269fa0ffd213780ec3996114b0596f25d9bdb01e7a7bed9bb2c0100000000000000fa62539bb1779616fc593111aac86c4840f6da936fb4091d6b3d13e9760d9044010000000000000046694ae4b8e82c078e2580b0a38a8877c838ac3ea2144f7fb9bfefe6642fca4d0100000000000000e6594a327ac9dd82bbfd0513ed672675167c55177a17273467bbd220469ee5350100000000000000a826b82af0d922c80243de032d68f9b289e141257dbe8de0dbfec11416a4570c01000000000000007e071d68ff497493bd4fb4defec748df5a92d22d05a658d34ba041c1882ae7100100000000000000b87d6ffd38143c176320d877c3a35b13c35279a86687e154b58c0ba527932a3c01000000000000006096e853ce12c7ffd178bb26b044ae14ab0e64dd47b8c956574f0d4d2e8bfa68010000000000000038d8ae2e30bdf01914d1a945649a8ed71641507d9560694ee9816f5f3e3b201c01000000000000004a22a2403587a47cfb1f62228ca880c0dd6d075e39a3a3e95dac79f9ca91c95901000000000000006aadefd8bce06224db3c08058b697d1c5667c8b051deca1e568e98f8636e002d0100000000000000e8d8a3ab46970ffadcab32b165cefab5ae9a330fff302b215ec0d72ad73aec090100000000000000aaa07c84b5690de59dc360397351cd7a04ff14c4a7acfbf94f06a11a35b9dc500100000000000000688af6f5926b70a73d000d6de3f11a58bbcc7ed12f491a9a664a7d34b293c20b0100000000000000ce995e13776450fc419200f344645dc5ec2ccad95da141a839adcb93784912520100000000000000a0e844d2b4a21ca429f4ffbb8ce70f34851220bfdebf29554aea2d4bc5fb9b440100000000000000f08014d7ecf7f84e4cc405b920998baa444e0201faffd2b8470825c467d7235d0100000000000000640a3a2794bd7e47899cd1d7e0ac1daabe4c5a7d8860a962637a6ee193af08170100000000000000401bb8d2fe09d8b6d04e9323a673cefa4d6052bd65286919c68fe8531321a64c0100000000000000148798c41d796e1b561e1ef82c6111e143d70beb79039bbabc3709b5ff957d520100000000000000a612c3e9b9d981933f4f490a7b499383ad1ec1e31c394da8d9f50098a8cd2d6d01000000000000004a0501294b8029e24a88b4d386e7948cc0fd3a5bd099bcb4f295f15d51a5857d0100000000000000585b918383075118154616fedd07fdb5b8087b64579f79975584889c4e38e8060100000000000000e4d11597912f30bad9257a859caeadeb48a9064fcefe0ad8ce9bc5960fca0d100100000000000000d6b5a996919814cd3a438850c8655388c4c454e1d0d4c892e1a8d23a092b3a5f01000000000000002cf89e6d048e7759e19c72d50ad444b1e961b333ad2f848d2791e059c4caa73b010000000000000058ab66b2ec837ff9106bbbdcd91d42129bcd204b4f10d27432b5f52717e32c5e0100000000000000228fb2a647a6c627a6cbad1ed196b56462d1aa24d35be38f4b27c852e487c2250100000000000000d0fff1f8dd08368ef87606ea228435de58edeccf5639e88623bb6b8d6ab066610100000000000000a6adb2b070db495aaa8b42273146603fef4bb3439d89c223e7678a8730ba35070100000000000000ea05c190c48078fe0359256b9801749bcf94542dae3a300f433654b8de64231d0100000000000000268f481d9116197d487ae4955791849a1d5f9c68c853c4bd47915033277bdc7a0100000000000000089a6ace8a07d359554a589baafcc9f8b4a61abdb1e311a50242fcc7e87413520100000000000000e0955ff956b6ad5b1a8a0cc7341d9c17829bd3f79d1e523f6f76f1a01673024e0100000000000000702eb01695e26f92f0ffc885ee0103c13571a9a4cb1c383722aaf3e38f6fc8380100000000000000c6a297a97d28483000abb87cd5309b4c316dd007cf3957a0f7e6e10b3867bc6e01000000000000001233162e12deeef969f4f7a3835724e676e94a262fe52985547ccf525ba19d1a0100000000000000e01777e43c6707d113461e7f3c13931f6e743215f1a873862705fca4b8785e25010000000000000040866ee02fb29e38c2fc4e1ea1a3cbf80061e48d9653ccfe705d04292d7da62201000000000000003e859f6b7e34f035889d6a77de24bc76dddd9c21fd2943bd98b9cad3128d2e520100000000000000c86646ae19521395cdc1b68799095c29dd44a9e6659724dc5ba7347875c0696001000000000000002650bc867719e90c43b97dd6680a9c05b3dbcd5f99db413e7c8fbf3070f4975d0100000000000000b61f34196133d7c9add42fafd835e77516553cb2bc453362a68589197e6d7702010000000000000052e018ab8de9f83f117c25e55e1fafedf2658d2b13adb1287f53dbc96634335c0100000000000000903d6ae813c70e1b8a123c44bf757f5efbefb1a15a3f3cee133cfcacdb21c5490100000000000000c89f811a11e9804c1c4030b05e93e2120e46d78d52d2c9eb1bf137efd241c61c0100000000000000dccb837dedb52b997835f88a40f3323886328389004e89c9df128f4e2f99df000100000000000000bcf7e0c0ec0581a6bff67f33fb40a078ecbc4bc61567a50af211a1a9d80dfb520100000000000000a42b2f9e12f211dcd50b9a11179b0dca805943af2c934488e9a9231eea058d7401000000000000002438d2c7aefdceae24436b7c67e5e94e0f5647dbf053b99ab8836c498a7f277a01000000000000002e87e5b7c59c45c4da2c6c76f194372257bb0d942e971a6b38c0363ba935c640010000000000000030673798468c2fb61816c9588b220625d24943830de0fd9b15438b348060f4750100000000000000889d752ae5f531fd6559effa5c77a4ab30805a74a85881fb22f690215f297e050100000000000000544e1fad3dcba3a935652342e6cef05d7e5f7b67eb6a523f52dff7872683d21601000000000000004aecea2529a1e04901d14672828a7a244248546de6c7a4347f00298bd7cd58000100000000000000aeb888e3fa12ef194d187e32c4438314633987243b4be1ad75b8a4ab312e1103010000000000000076168fd4cb72943677f41c883d5950a0262e633b0695576a2e7fab85162534390100000000000000482b4426073f194d49705dffaaee9aacef27fd4fe5cb33c25855d0368fdb1a4f01000000000000008889361702f5cb1061a697985ba1fe460ab9180dc973307d36235db478b9722c01000000000000001c08ea2b20b0943a0f4af1574ceafe4211bf4f1ba3708f0ed56723f51ed6ab2b010000000000000058445556f8f65ffb279e075a9c235cb70121620eb51b0ef6779fdb1536b0bd64010000000000000030a09710ccfc8f18db0ba7b673daf98e19fedb189f844b7127eb5a348b3ae2130100000000000000ae9da6ab58910bd5842c103a816b2b483072e2f5a71c73b38f725565d9be0b0b01000000000000006a2d4ee2416aad2e14fa8322d189a9680253e40cc5fbcdd7c5ea94e9f791e635010000000000000056c6e52b6ca6350392a56c4ff42931ec2c6873ccd7964e1dbc8b5eb4c225e81c010000000000000052222f72eab42511f1bc3c5b4271cdd3bbbf84a755183abdd02f56930aa9fd10010000000000000032753e56e472dea44aecb76aa1ebf1b41859b14fb935a990977eeabf64032a360100000000000000c8c11088e72d076a8df738a24f3b63677495377c675b8e1377d3f8ce9a7fc6640100000000000000608fec4564befd5867b54a37311cc1be2292825c55eef6bf5a651fcd8191250501000000000000004a3672c6ca9fd22c28139bdd742b21dc736487994a9e8adbe1fc9d838b65476501000000000000001c61da7c83d91b12ad017159c5161de07b82cd4ce88696e2a3835c87db94b50e010000000000000072a07ea9ceecd162f8029cc58e0217169206ec69bfd4d5d45a85ac3fe64d9e580100000000000000a46dc30d881365a57c356d2b2c730449abb16e87a233d986204f03071aa13b7201000000000000009e2fcf735cc82fbbde1c14d1c6b6385a7f40b1b85717d8ae06c6ca1bb3ceaa3a010000000000000024175533fba299cc842442773f2f043f3bbaf67ed9e1ed2e44740ab4317c87550100000000000000ca3a62f97737cad7f7b0ba76e29bfc3546e99115235436427087bacd7ef6726201000000000000009a424f103bbcb2212b61205d216823dce73b807f27f5c24b8e0fcf460554696901000000000000003c62b94c8588e64334f5238f1fe39bb8665d6d2396c614fb1f0094acd94fdf1401000000000000001c01d34a3af1a67c0011564426df4aa7afe0d4f270c83ec4f1d7a54ecf057e2f0100000000000000c8d5277f99c6d34149e5e08ddca8edb2cb012b173fb96ab66ebd1e43879de62c0100000000000000fea159528386ca237ea93deb9dcb910e16272b24d351fa0d065cc29ae6a8a63801000000000000007ca2ca060d3c746afbaa98d610fc05ece6e15470506172a844138b1e34d38e04010000000000000080a96ace5196a51b6b17b7b4976282df1e9a9b62e98d18a83a0f0c71f0dad8400100000000000000f29515bedca3dfdd9f4b0dd5573fb5365ecb64afe2614beb4daed99d90ca314901000000000000008ec48eb902823704af3070777bb462785166b2b2d24e18e8b5dcd30d5d4856340100000000000000e6483b29203441853526bdf735ca30edab2fe0365fe84e29c51bd437ebee7c330100000000000000a84c137cabd458211c20c7ebd6ab69499618e090cead62991eb23c372f7a1e74010000000000000082028b177d8811df7cb1fb3b7138b58a767bf0058a36e5b949d9de08063fc96f0100000000000000902250f658fec89bf1e514f991f92fc55b29efd76b435bbff13947a8dbec1d1201000000000000005c64f23ff990859550aa77f3a86068c6da68171586efa4867cf9a4ac09be051a01000000000000008a468fee7d82ff5c7fe3799aad72d96e794a4d88b365a3280825644d25d9f92b010000000000000042dc3e1416a1f7bf0719ecac2789c08ba4595dae10d7b0f3adc5fc6435f0d5330100000000000000960830d9513a0e2518880595395503ec90dcf7c9956bb8f4f12424288f50031d0100000000000000e63332c8237e79e8147b3152db55f98ed7a2b746b1d4cf2295ddedb853abf510010000000000000058d1efe485e09865b483c403a1d47813eab134d11d8c92db7e66b7b19117505a0100000000000000aced024c01687658713b3dbfc8d41063fbf7db90fe5a47c83886bb51ca7ca2260100000000000000d86d05fff521e4d46009a2e0df9abaf5d9f63a9a2bf2e783a59b5f29a9e15c36010000000000000056b1aafb1160013fabe4577211a764230292e5cbe59fb1c7b0d3c0dcace622560100000000000000cabbd685cab2c594a2b42738968d56201e590683870027b3ad6be3617d966b750100000000000000fae9a33b3de208d85559cd533861c5c37863333bb852631ad46be3165c8ab03201000000000000004e20c133f7e99fcb94951822bbd58107022dcd50c48c03708fbfc79a54c86d510100000000000000b2e41c634e5e673a5ea21dd806717466fa2ceb279a55a8e774868399488c996a01000000000000006a3398a599f2f1cb856b772319ea571aec7e988abba58991b5fb2dffd54cde4a010000000000000034c0505fb88a037bb69fb1a4debfae9bb73817556347a624a135d72144b8b95c01000000000000004804977adb1b062fb72a1d17f8c6d42368860d1ef48fccdde0391518c3423e240100000000000000603e917a6977f283d18a0da400ec87d0f16f6a511cc68092124bf2dca9acad1301000000000000004c41aae61cdb1f2456e7faaa005e85365a5ac657d8f9b7abe6456ac3dbd49c590100000000000000ba6ef6af1bfa68003c889598792f299f79b73ed58a5c584feae0168ad4c3ff6f0100000000000000a48135caded700203e1953d41ab7418d3a0ea4be6c5ad9643a099615ff58292a01000000000000005420027f4a8035711041cd1a39230e80895e39906f34ffde192df9aa3c1c810c010000000000000032be102a24c2ecf582b9e593dac8f417924651f3bab14adab907e833b1eb8953010000000000000038ac7dbc87fc3d12c2d93343d55ac364d15df3abcf997750bd242188fba3b43601000000000000002442758774e775068c74220866b2cd27bcac9cf5f84da054a391a4f263ebf329010000000000000010295f1846d4891b952226b23b02d34e0440e78dc52b054ef2d16e963228e40301000000000000003ae275a0b203a05bc07e96ab102645a968474f2953f7c360c19939f9509bb367010000000000000014c4fd324955a25b44784231523351a170b791b927da2718a9bef9e1b84c70120100000000000000d031cc5b1aaed244e6fc31552c3da8c4828fad90ed258a07f194250f71240578010000000000000040c126a7025961a41bf05b53af9798aa2aa3c7bb3d98e82659b5f04563c1fa5f0100000000000000ac2166d5634e631b96cdf44c4676c208ab4aeeefcb6b17fe1765e184b7135b70010000000000000094ca3fe24a4e1c62e189e436bd952077091dc33046bcf9c2b1fc61077a754d2c01000000000000004040782fd0860c362c58f30bc8b4c5442794eea0d8eef07eb967494c10a2f2500100000000000000eafaed0fe1545946768a9716ff1fb375116f68a7d529df45445b3e5d69cf23580100000000000000903ea8a34a7166a64e18a80c109bbd5a0b0aed20796e0f790b833e4851a084420100000000000000f8038cb26a6700be115e9cf5d18b4e1ebe3035b423b8a3fede633ee6a76b5b0201000000000000001451c142367a76b17c8f2f3a2e43b671d165ef793033b1394f06abf4d7d4501101000000000000005a378b6b9203df494f791e4544c0bdbf257244fcd262b5dd1322bc679a2f992d01000000000000002803a214e2b60b9a1a7062115c44c03c1bafdb9595f6beab8d68f103ef65a07a0100000000000000e2a67e80c18ddbc02ed0c1dd94de28d3ae64545510bab8441de7f9619807800301000000000000009463ba6873947fd43d601bfaf816b8f2856cd8164406079b302d898300c2e51401000000000000008661b275126f190b8b03a015c482b6aea9f7a8a520669495f9ad36382a9a416a0100000000000000068796a0a56f351bb079dc8c6ee8fe05d0c22868aba30883b7c92a771b7bda5c010000000000000036a2563ac9161109d3ca6dc1943f75e2caee529b099b6ce8d38f4da089f268070100000000000000c386d5e8612b45057bae741e105afcde06af5b2248a3944fc5f8c2c1f49c4b3b0446524e4b9d7001d102be3176ba563578a27e5d499afb69c3f1272f8f33fd8fd28c800625fce8b053d50100000000000000d1cbb104eb5f010bf887b8d3ae2ecd77572bb17c74b2688bfa2cc175101f33b1010000000000000036c3ba9b05f6814e0de9a3ab1e108141416c95d784122b76ee52e3b0e8c86aba010000000000000047348abc24fc6f4797ec0ddc9c55f25a54bedc2ce7b2da9ee119d33f09f0d1e50100000000000000ddffb4a5a93a9e724db26abeecfc5fe2e89cbb9a5050d44d0c3a219cdc932ac401000000000000007af1312fa10df57db7a897af5faaa7d14a66dee1c20972c13c8e36fd6f5412270100000000000000afe37da24aa41def0065c9ed84db03f4cdd2fbc34f675b0942a135ce881fe47201000000000000001c29d7ce15b7fb3d22341fed44715d8f34c356a32251e6441825006a62b72041010000000000000090bad22c20a221271b1796a03386d2ea1e4f1707abd25a77f802d4144c3b3a2b0100000000000000b4295abb37df4a3bd35b74e4fd436154812c12f136945c6a6e745d438c22598d01000000000000008b1405c139148dfaa0a4d9cabd6b6f005dce1848df7fd30bd7eb0ae8e00fa5880100000000000000c98bacb2b0b5e88da24060475191f4ed37af1401b2bb037ce499e3243dc3c9cc0100000000000000b74f28c43f03e75b603d48d965006da8f539ed1b905e1e2460ca562a17ef8db301000000000000001797dc668e97eec57830acc88c1736599a1d1eb8175c63224fe6db9d4b43f9850100000000000000381bb2f0ef2a9384e93fe56774bd6417277dc3a5a76d15ac55bfcfdd0f907787010000000000000032056774822ed8ebdfb83d794aada5199e601b7ac327698e1550ae7a0f6758620100000000000000bd5a26ac70fa1eb1eaead625f4d29f53eeb18f45019407cc116bb1ce52d1558101000000000000007f6ceb4f00bd5c6fbeec01831f900b83fd7b4e0d13005db6c1c3e8cbddb957c10100000000000000e1f6b1b1f31e7617c76c91fa702ea40b34b20ce5669f33d66811e5f245f1da7d0100000000000000694d99913da55b6ee2b72224469ce328a615db6479826df186ac516d0435959f010000000000000023e2c6cd29c9287050211d30ebd5af88680678d40c841e7797709a6f899a26840100000000000000a130eb8f61f41ca057c7ee5ade07b74ded056b6cb7d8470805888a269cc508360100000000000000f1e59c4c3d8bbf74609dc42e2678f28d3e79bef0c057cc2c15cdc448bea17855010000000000000036cd1eebeeaea926936837b65de98409c469d2438b9cd13f9aecd9828f2320f80100000000000000a6e890135689a8333ca6c55999d8558ca4ff8ae7f2f389378d2efd98d9a7012d010000000000000017b50d72a7b3109874ce67d2ea7e22abf891b5faffa3ef4a8245d54878cb021301000000000000005e7a5c5c33ae82aa725c36f84cb900a3dc5f29fc0331554d95203a345a31a9c40100000000000000c02c56fec10f424876ceace65c21c231214c1ced594725a0d6f5abbf34716de60100000000000000e39ae6044c2a41c024df12bad4484af6b824a4d1da2ddeb99f6751a2d8f41b6b0100000000000000480eb3fcbf3c885bb57d9dc1ef1397117f69db1b7ea14483029e28106273b4850100000000000000a5ce03e4e99d5af6147bfed01871a1de10459f121b205faf52c52ec6eecc9a210100000000000000f45066c35babc6ba27b7ca056df70f04dc877dbcc0bd9ac8c9d82d56856b1500010000000000000098cd7c69c861ef3ec32eca898456b9bbca10f8b2b5589db075885416e80544ef01000000000000000bbf8f4a346f74964fc07987fe00053d72f1f132d4964e7e5012bc9fa460a5030100000000000000d5703af4abd74aea1ced12395b3557dfbddf41384d20a15e3c3d70e6f696877f010000000000000052d43d31d31eba86ce8ca4a89182bd3a2963a2e5b0224a80780cce6f2cdf6c330100000000000000a8331fd9318a6f27d2a3da1dd8b84dab2ec7dda2a464c648e8b8a7454a75579e01000000000000001bf458138d654c66325b2b7d3aff0c41109f3f874b11bed59f7c4ffd702fee210100000000000000a50f9e7484ff6a8ba136ee3842d0d5b9db4e82bf55ebb1f7e20ec903035b5596010000000000000087fa0fe1dd2374248be1aeaef7b0bde64f08e55aec77d6b0b8ddcf1615e01fe70100000000000000596d81e0039b9ef6266345878349fde921c94c20d16b7692ffc1b089807cbd9201000000000000002868883051b17c28d5cc53dde5a69a210208402bcead13086cd43d19aaf820d4010000000000000086299a4880451a9f5793f9b74d1e680e6deea0bbe84c0730dab7a164e23df8a50100000000000000c1f633c7f4c65a703def23d07e85e650e7a124383b9f4e71dadf6e938f2cc4290100000000000000182da31370a90a76902f45222b29432a72474fe3aafa7c62d368111f6214205c01000000000000008fe7f46a1b1e0dd662da1eef2413c4a05f8bbbeb03ef7c71d1a8f08bdb53ffb50100000000000000ec189cd23dfbcd51661cae853e6dc2733391c4375b75fec40b37ab56bb1a781c0100000000000000edeca0f369da1d06ca877e340a26edeb9b0ac6d55c49a3d9a39401ec27e4a8dc01000000000000008b6095191469b75cbb2c9af9c72d6b4365e53c06e928b15580a725edf2793e5301000000000000006a98fcba174345cbef21cf2390349028093f1c485c559afa25e73b4c7585dd4b010000000000000020a94668bd0318e90ef37861e37af261812f4dfee97669ea48d02871cabdcd9801000000000000007a20bf7c9973bddaad18c1e04483b18dba1574089effee6765b4aab03db6d23501000000000000009267d47d901b7bba2dfe9d9faa6a68e6ff2c217bbe1de2a4522cf94604c49651010000000000000055eb142abb22faa5c26afab0b76b180590f159b688641d9db6f89486b484d65a0100000000000000a71fffea17e3043f4e36c2004cdd7c7f8308e238efb2801cf6607f5e0c5348d20100000000000000382259ea2c100680295e728b078969f0ef7e077f858171c66059d002cad771d30100000000000000e3dd7d97be697c1066d4d3f3bd4e3ac21d935b34bb7bd4c7ddf87c7d7673881701000000000000002f0c50f427c7772dd9c4011fe2e34278efce6aeb716c173a5f4ba00b70f5a73a0100000000000000f4906ffd03b2fa8aaca2c04f6d6a489d9476c3e7ee8a161630b1a8fddcccbbc301000000000000000caa2b70176fc6f9fad111cfbf9a1a8fe7b2fbb92211c36fec2a8102d169cd9e0100000000000000f26c8b40fe23a64d3bf0fbad27ea08edfe070db5f65fb28ec26fa743ad6df6f60100000000000000a606b66b8466c1d7085e0ed1d6146fc322e257a55aae164f3a2e3812e212bd190100000000000000bbfcfcf1c9a6a576d76559e3ef69d5a13a2751f32661c0798d9b32ab017aad5201000000000000007fe53b7d4f2a836b30650cd5569af304c47fbd28d8c7fe39f002a000fd464e7101000000000000006f9e97d7e766c7f369e931933a724ee8860bdcc31dc81108c8a48dfdeb8646ca01000000000000004f1e8a61e5c40431e0d1d91cac91735a0cb2a30862d99a3214fbe0d79662cd0c01000000000000006832d483010e7c234350719bec24e9e7af9304ac3280ab24b1c6d7875d750b81010000000000000014903a1932f729eec51c64c8652f5a7bdf17dfa359799e550f1083e16457ff270100000000000000acc1556550579ad3aad1d51c84f355088bc39c586c9ff897dbea3d2ee11466d501000000000000007db403c44e5a3477c87352cf349111957a3771fc0e151aeb2dc46b3be272ab800100000000000000df8dce4bce99d3a6c7c2dca5a5a216ffd023888562026cce394e4d5e06464c9d0100000000000000f2d9e6989cc7f249cb708da9a9c4a2f460532c39985fa30c9f23d8bf30b2a0cd0100000000000000f7f82054b1489f15dfe4506d9f88f579f782d35b8379e497a3708355cc4033750100000000000000c4d1330d166175441af8f807eecc6bc9d196b7ee7199207ffeae982ff2ae8bc201000000000000005e7b52eec0c156214b2bbfba7c059cebd6db3c45835a665e0f6aa35621d7788c01000000000000004c8bd6887657bb9de51fb9d21cb73b4f85d210df5fe526f35bb6a20ef5e570b20100000000000000dd165feb16d970b3225b3854f57a1313ad6f454df614f8279a7f9670bc2e13d201000000000000003bd44e2e82498b2f4f7df63a496a79832d792d30706469b0cffdeccfe10383600100000000000000bc2bb43d3b59fe071edd53a1ff82229754099fbe4866ab14eabebea28acd121c010000000000000081975e9bce27b6c069616c3cbf163ff2a26b0002dd0f7845008c57059bfd0298010000000000000057202418c6101d0a0f8d03b611b768c4b9b8d2254b6c12c614afe86fb3879c680100000000000000f3d58d0f267316d55c08caf4638bfdf4e781a44ba0165a6589a6102370490d89010000000000000032649d7f5f5d7a5480c9988d0e10cf099674760c1799d1d9d286c65b6a089cb201000000000000002c435333bf117e42eb577e7da4817eb42b50cc5569e9636a6ec49a8231ee9c210100000000000000994df0e11b294c602e0b227c41902fe0fb7f9442e0dde56f5e5b48825b90a5fe0100000000000000d99439653859671b34738bcf6e584e65e996e9085b08b3cdd44951e17ac8f3b001000000000000008a3849c6cabe5082e90ec44d3b8d9816b01bd577d5fecfb109a547a5cb10d033010000000000000001dba1dc0533585985bc9a3a1176cd3ef1efc419eab6c7bec10917f348a467e601000000000000002e821f1558745b04cb87be3f276a7d9ecc388756d64fffc17301594f6ce88f10010000000000000081152139c7ffdbd5dcb1a57ea9cc893d3faec3e3c6a71624bc31828897bae9820100000000000000653c227d39cb3e05f2dfcd02c1efa499ba9f9c866e4116892ef3bdd27789c53b0100000000000000ed452e8c9de0a22c714eb0dad77c86d602bb19cf270c2ef0f043adfcb7b78a730100000000000000a2ce3196fc0345f86c6107b812d9a4c8bb556da96f45534977efc66c40f11e750100000000000000d28052dffc44331f1c4cab46b139a5cd74debc43e753b72076ea53d64ee0f97801000000000000002d20751aa6147a65fc4369ded68e83cd379a4d7b9bcd9b0c08cba4b70abd0c06010000000000000044c2491c0f07ed47038b9fe0715df4880efe2dc0021ab79baa16bdf8801b9e570100000000000000be81a8172aeb78bf198d11cfe9f0469ce948442f29c00f367d036750fc264faf0100000000000000e827303f26bcbbb420974d760094760e6c2be0bc4e95e6bfbaa203fc593dd2430100000000000000b877182d649bf6a3cbcf83d23cc7b36995f6bd15701ac9700b06d0aa753d6d8201000000000000006403c64325daefe5f12ddb9196053f91c80a65b233e9ebbc1750ea9da4e5762101000000000000003b08fe08e2666a1cdeeed6e06de040de1d5be68f4f7fc3f3f5544edd3bf2fe280100000000000000baff92b53651ee2bade8d835e16b9285175db0f478c73c4f15167941ba3ea635010000000000000021981f96f9177d61e09526c18868b549902fe948b3f4939cf0b6364ecfdfa3dc01000000000000004fac5246491c037b0a9e51616990f7f6a9c2e06bd6626b853f3449664aa1d9a30100000000000000ac8f78625e4fbb787873ce0f674a39a0dfd637e507430b776a2740d5cf06317901000000000000003d99caffab70949d7f61c7d413765aacdbcdb91f16f5466f19f632c1482a917b01000000000000003f1d6c381dca5539bb59d5acf90a2df4a75faa3388da0b0121681da1d2e161b20100000000000000826f98f9b44c4d5db76a7a0589bfef47bb56bace0d7da80f09b54d362a8149d30100000000000000920c6f0c681aebc27488fc9bba305c2192b4a158167ac8058ac05d4bac71766d0100000000000000d5af4d6c0ad7a8030ead1d12882e465d6b44b5fcfa381d1b516ee24e525973d901000000000000000b7d96129a974413809d38b63765381cf71a450879526ea81f85279ac62a6c61010000000000000096f8f9a5388bd2d5264095c5bf4bd5eeb2fc3cf0f3c9463d0bd2c5994025e8f00100000000000000399d0657551bc8c81d0600413d6227cb21e3fdad2c48436ecd68e4e1010a07cb01000000000000000cdd32be1023b699cd5367d0c13ed249b92f68f21e95627dd391ee06aa04c72a0100000000000000cb9a71fb5dc862cc5ae578f7a0c972417f29fd76b193e1117ced2abdcb5c5dc70100000000000000ed7ea587204974fd1254bb7dc1e7090922f2e38b152f4dc08646c00d1e6547680100000000000000fe67d8036100e86613063a48fc47ee960077c2dee57bb7a9ce55957ab4e1a1da0100000000000000f5e73f61a88d305f9f88d3efe06e964140f87505ebd5f644a4b92ae52e5797130100000000000000d6014f78638d11506b5df8d4533b2e3f0cb8ced5f174bb6673b33a5ac5a35f290100000000000000384ea72b9abb03ad899b91058b56d446e9ecdcbb7bf0c833b6553d3723bf85cc0100000000000000699b7afae579652ffa2e23a261220dec2d90d7570d2dec51b871f313574c2cc301000000000000008765598dbfe72fb93c2e1d148ad00f7266cdd4cbdffcec86711a9664501321c2010000000000000052fccafc1c664f8792f23a1bc468fd777fd9da29621f98c64e67f2794aa5e64f0100000000000000e1c4e93102228ef8281e734ebe1fcea1b05a68b69fd583cd5eefb7ac4b86dae00100000000000000feaa69db2241b65fa0e332a77f86f9dfc50c163353b4a5ccc058cf3c6f7725dd01000000000000005f8936dc6ea6bc0e914a77310d6ecd5f75baa11f981e9d6c18d78433d82899e601000000000000000acc7024c10d76dcc968a5f2482d7c5be47abe879e95a2fae7887206fd1e681f0100000000000000ee37ff73d973d47c403f77ae91d1811d82b683bd1d1b4af7d27a822121517ff201000000000000002ae51fb0c979ee950011794994e0d66d4c120d9429cc604dbcfbca94548033540100000000000000ae5ec6c8875f197907e4d3d2278e35240d1912ef2e0464baeafcf01fccb37d9b0100000000000000b16e5080eab65e0a9c24f4c056f95de843518ef3ec91b5dd4b07e2d6cfc49fac010000000000000064c99d806bec03038aa17580cf1437e43db68727821747b89ac0510a60699ba70100000000000000522314443f35f8665f15ba4e61123cc75f707470d1bd9e7a31e4c01acce249340100000000000000a2e8a586a6465f5f0f1f59397e3583e5ce4d04a72244f05031b0780501a8145701000000000000002d46ef1367dee8ac1227ef5f82d5aa6555f6ea215b1a3cc27c3414de760c842501000000000000008ac9ed615769af924b1083f091d560729519d17662c21d3182bf197319c2aa9e0100000000000000104854d7e85ff9a6825dc4de7e668d5345432b747522f73d8c2613ec8c82808d0100000000000000feecb15246516594f044600df85d1654bd36e5353d936945dc16496b77a15f93010000000000000064684cba9d4dd50d31124e12f9c476980c0ffc1f64ce9d33afa675db1bb201040100000000000000c29e560494d8b77f06eb95ec3e3f41b33dc5241be230b76c32b8b6d17c63680e010000000000000032c1db84cfdadc5790e1ec356cd4c2f5251dc2d8304c64039220ee81e13d45e501000000000000009ed7c59bcd539604aa5ddaccd8c0400cedb6d9cbde7666201dd953ec90cf877e0100000000000000f46f6a6697a3770cac385d1743a437d66ede2537c892ad6fbbc9a5145bc27ce7010000000000000025f2c599f64098c77484791ba096fdea0c041075d4bf37e2556c639f1ea52be00100000000000000fa54144438903fa56cc2f23596fd161cc127d29b933a3c6d23bd66fd15c45f4c010000000000000023b53b773dbbd1d980ba24f8931e5758484070f4b0cbc5347b9acaa0380d514d01000000000000001f81141a8c46fe442e4555cc9d2da4e61c4412c27e6938683eec4bbd43e8879c010000000000000082eb3046dd03a320330c9af754d88c238bb2e6525465b57a042b28e755492e3801000000000000004e49af86a14b8ceeb39bc7aaa567f5c4088df40476ff3e833c5fd298136ba9e20100000000000000d399d14bb9c069b89f60501e66caee61961f9466d5d45488770eea7161e527b4010000000000000035a9b95981c7cef53adcb5a45bfd6b9eac7063145c9c30bcac2998b78c68f53f01000000000000005737f7e7de716804af080f9ddb03352b5ac71cf51c65b7b87c7b3370e3d1613b01000000000000005e00e22fa583881d898d6414bb3c202aa74e3cad4ea531cb0053bb4823098f100100000000000000bfb4faa776bd4cec6ea19566475857efae2cd2afc009d3d84c9c6ce68e2d166401000000000000001bf3029a3c9caf3cb5bfcbb7dec6fa419b3cb42d85970a305e3f76c01afadd7e01000000000000007814f049c3eeb469a0241e7f91188660d954028d612d1730aae41a788861ed6f01000000000000002d670f6ddc4dbe590bc6890ff5d40eb93afece47c5d4897f8d5403e348ec261f0100000000000000fc43ea5f56363696ac1de3106b38bb7c7efbb1ced6597de27b34b64d6b7c46c20100000000000000f472db5155597dda08b4e8d82ae9e224cc7ad60798ce146eea51897b277939170100000000000000ff185b43021488634315977358d816c5da8520702359b6f1d2b6fdb8c0a754ca0100000000000000539c6a9cf39788feef973f48f54c2dc4863d3b28b5af4e08579471056a262c600100000000000000be53158c79e951df2b9ba1565182bf27cdecca36c4a28acd5576a909c989d86501000000000000009af27f2eb469d7d1db97d8836dc18d5e1414c47a9da27c6e4d21e21e33edb26201000000000000003f8640dbb084347eb3623a97c1304cff9c8f98516b990280e163a5ce35228a8d0100000000000000bdd403ef085a334fabd948e159eccdaecdbf24ecb53b984f440f3e074e96d57901000000000000002daa75813c73ca5543def4d0891dbc76e450b9fd4802a0bf80811db845f927640100000000000000d8936d6b05ca589a7400ce46c0876e028b383083c3c909e7434ab1a1228044690100000000000000a9a8c1d21714101306499e029eb6472378cb2d5ff5d42d05032a204738bc23f80100000000000000b8b23064d88162748adc202060ec705772b975e62938a3f2ca5b80f4e9db942901000000000000005d93cb9ed6b2d69d022f2e3d3d0ded29a29dba0982b6dc583664bb16408b545e0100000000000000d5e192a1d8666467c694a30fa1ba6420355bf0663983e14df991b9f97e25b91d01000000000000007be9a15295a120bd330f07e9295800e06cbc2165decc736f9d5f8ec580efad150100000000000000b13857cd183fee7fc5eac82874d253091f006d1a2863c9ba5d65b1367c68184a0100000000000000338eb7fff4c556816ca861e0c1f7111eebabc6efd4bdda9252b77ef12678bde00100000000000000663bc85589190c716bf3a13d579e703994a887136896a5c33d79469f9cc793e001000000000000002a977454e0036aeb0662937af4ca409dde3188c27b49c7d2a4e722c5dbfdd95301000000000000005d99312941c8cf08c1e4b8536ba520efe321862aea306a013f6e72df904aa99e010000000000000055017cbfe6279168629e1d902ab9368382ce6942699a646c97399b96919597300100000000000000b303f9c26f683847adba82de29bd2328a06c66c994e0737532f0978f8c64b17d0100000000000000f3b90e1e57d2ab26c6c1d0abe4ef8e306dd65f6325722da00655b160b27a5e8a010000000000000000000000054241424501011220a3a94a9199bccf411c3355d1e323ddadb33f7fde6d460f9aa0fd057fb161d41051581242634641a22de40fa9b30d6ffdfe8b1cfac5b7209a217196f25b85")
- r := &bytes.Buffer{}
- _, _ = r.Write(digestBytes)
- digest, err := types.DecodeDigest(r)
+
+ digest := types.NewDigest()
+ err = scale.Unmarshal(digestBytes, &digest)
require.NoError(t, err)
- require.Equal(t, 4, len(digest))
+
+ require.Equal(t, 4, len(digest.Types))
// kusama block 1482003, from polkadot.js
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: common.MustHexToHash("0x587f6da1bfa71a675f10dfa0f63edfcf168e8ece97eb5f526aaf0e8a8e82db3f"),
Number: big.NewInt(1482003),
StateRoot: common.MustHexToHash("0xd2de750002f33968437bdd54912dd4f55c3bddc5a391a8e0b8332568e1efea8d"),
ExtrinsicsRoot: common.MustHexToHash("0xdf5da95780b77e83ad0bf820d5838f07a0d5131aa95a75f8dfbd01fbccb300bd"),
Digest: digest,
},
- Body: types.NewBody(body),
+ Body: *types.NewBody(body),
}
_, err = instance.ExecuteBlock(block)
@@ -944,21 +948,20 @@ func TestInstance_ExecuteBlock_KusamaRuntime_KusamaBlock4939774(t *testing.T) {
require.Equal(t, 2, len(exts))
digestBytes := common.MustHexToBytes("0x080642414245b50101ef0100000815f30f000000004014ed1a99f017ea2c0d879d7317f51106938f879b296ff92c64319c0c70fe453d72035395da8d53e885def26e63cf90461ee549d0864f9691a4f401b31c1801730c014bc0641b307e8a30692e7d074b4656993b40d6f08698bc49dea40c11090542414245010192ed24972a8108b9bad1a8785b443efe72d4bc2069ab40eac65519fb01ff04250f44f6202d30ca88c30fee385bc8d7f51df15dddacf4e5d53788d260ce758c89")
- r := &bytes.Buffer{}
- _, _ = r.Write(digestBytes)
- digest, err := types.DecodeDigest(r)
+ digest := types.NewDigest()
+ err = scale.Unmarshal(digestBytes, &digest)
require.NoError(t, err)
- require.Equal(t, 2, len(digest))
+ require.Equal(t, 2, len(digest.Types))
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: common.MustHexToHash("0xac08290f49cb9760a3a4c5a49351af76ba9432add29178e5cc27d4451f9126c9"),
Number: big.NewInt(4939774),
StateRoot: common.MustHexToHash("0x5d66f43cdbf1740b8ca41f0cd016602f1648fb08b74fe49f5f078845071d0a54"),
ExtrinsicsRoot: common.MustHexToHash("0x5d887e118ee6320aca38e49cbd98adc25472c6efbf77a695ab0d6c476a4ec6e9"),
Digest: digest,
},
- Body: types.NewBody(body),
+ Body: *types.NewBody(body),
}
_, err = instance.ExecuteBlock(block)
@@ -989,21 +992,21 @@ func TestInstance_ExecuteBlock_PolkadotBlock1089328(t *testing.T) {
// digest from polkadot.js
digestBytes := common.MustHexToBytes("0x080642414245b501017b000000428edd0f00000000c4fd75c7535d8eec375d70d21cc62262247b599aa67d8a9cf2f7d1b8cb93cd1f9539f04902c33d4c0fe47f723dfed8505d31de1c04d0036a9df233ff902fce0d70060908faa4b3f481e54cbd6a52dfc20c3faac82f746d84dc03c2f824a89a0d0542414245010122041949669a56c8f11b3e3e7c803e477ad24a71ed887bc81c956b59ea8f2b30122e6042494aab60a75e0db8fdff45951e456e6053bd64eb5722600e4a13038b")
- r := &bytes.Buffer{}
- _, _ = r.Write(digestBytes)
- digest, err := types.DecodeDigest(r)
+
+ digest := types.NewDigest()
+ err = scale.Unmarshal(digestBytes, &digest)
require.NoError(t, err)
- require.Equal(t, 2, len(digest))
+ require.Equal(t, 2, len(digest.Types))
block := &types.Block{
- Header: &types.Header{
+ Header: types.Header{
ParentHash: common.MustHexToHash("0x21dc35454805411be396debf3e1d5aad8d6e9d0d7679cce0cc632ba8a647d07c"),
Number: big.NewInt(1089328),
StateRoot: common.MustHexToHash("0x257b1a7f6bc0287fcbf50676dd29817f2f7ae193cb65b31962e351917406fa23"),
ExtrinsicsRoot: common.MustHexToHash("0x950173af1d9fdcd0be5428fc3eaf05d5f34376bd3882d9a61b348fa2dc641012"),
Digest: digest,
},
- Body: types.NewBody(body),
+ Body: *types.NewBody(body),
}
_, err = instance.ExecuteBlock(block)
diff --git a/lib/runtime/wasmtime/exports.go b/lib/runtime/wasmtime/exports.go
index dc6f3a6f61..ef52eacbf6 100644
--- a/lib/runtime/wasmtime/exports.go
+++ b/lib/runtime/wasmtime/exports.go
@@ -24,6 +24,7 @@ import (
"github.com/ChainSafe/gossamer/lib/runtime"
"github.com/ChainSafe/gossamer/lib/scale"
"github.com/ChainSafe/gossamer/lib/transaction"
+ scale2 "github.com/ChainSafe/gossamer/pkg/scale"
)
// Metadata calls runtime function Metadata_metadata
@@ -67,7 +68,7 @@ func (in *Instance) BabeConfiguration() (*types.BabeConfiguration, error) {
}
// GrandpaAuthorities returns the genesis authorities from the runtime
-func (in *Instance) GrandpaAuthorities() ([]*types.Authority, error) {
+func (in *Instance) GrandpaAuthorities() ([]types.Authority, error) {
ret, err := in.exec(runtime.GrandpaAuthorities, []byte{})
if err != nil {
return nil, err
@@ -78,7 +79,7 @@ func (in *Instance) GrandpaAuthorities() ([]*types.Authority, error) {
return nil, err
}
- return types.GrandpaAuthoritiesRawToAuthorities(adr.([]*types.GrandpaAuthoritiesRaw))
+ return types.GrandpaAuthoritiesRawToAuthorities(adr.([]types.GrandpaAuthoritiesRaw))
}
// ValidateTransaction runs the extrinsic through runtime function TaggedTransactionQueue_validate_transaction and returns *Validity
@@ -101,9 +102,9 @@ func (in *Instance) ValidateTransaction(e types.Extrinsic) (*transaction.Validit
//nolint
// InitializeBlock calls runtime API function Core_initialize_block
func (in *Instance) InitializeBlock(header *types.Header) error {
- encodedHeader, err := scale.Encode(header)
+ encodedHeader, err := scale2.Marshal(*header)
if err != nil {
- return fmt.Errorf("cannot encode header: %s", err)
+ return fmt.Errorf("cannot encode header: %w", err)
}
_, err = in.exec(runtime.CoreInitializeBlock, encodedHeader)
@@ -128,8 +129,8 @@ func (in *Instance) FinalizeBlock() (*types.Header, error) {
return nil, err
}
- bh := new(types.Header)
- _, err = scale.Decode(data, bh)
+ bh := types.NewEmptyHeader()
+ err = scale2.Unmarshal(data, bh)
if err != nil {
return nil, err
}
@@ -139,15 +140,18 @@ func (in *Instance) FinalizeBlock() (*types.Header, error) {
// ExecuteBlock calls runtime function Core_execute_block
func (in *Instance) ExecuteBlock(block *types.Block) ([]byte, error) {
- b := block.DeepCopy()
+ b, err := block.DeepCopy()
+ if err != nil {
+ return nil, err
+ }
+ b.Header.Digest = types.NewDigest()
- b.Header.Digest = types.Digest{}
bdEnc, err := b.Encode()
if err != nil {
return nil, err
}
- return in.exec(runtime.CoreExecuteBlock, bdEnc)
+ return in.Exec(runtime.CoreExecuteBlock, bdEnc)
}
// DecodeSessionKeys decodes the given public session keys. Returns a list of raw public keys including their key type.
diff --git a/lib/utils/test_utils.go b/lib/utils/test_utils.go
index a9a80e3f80..6e9f87ce3f 100644
--- a/lib/utils/test_utils.go
+++ b/lib/utils/test_utils.go
@@ -21,9 +21,6 @@ import (
"os"
"path"
"testing"
-
- typesmocks "github.com/ChainSafe/gossamer/dot/types/mocks"
- "github.com/stretchr/testify/mock"
)
// TestDir test data directory
@@ -77,14 +74,3 @@ func RemoveTestDir(t *testing.T) {
fmt.Println(fmt.Errorf("failed to remove test directory: %s", err))
}
}
-
-// NewMockDigestItem creates a mock digest item for testing purposes.
-func NewMockDigestItem(i int) *typesmocks.MockDigestItem {
- mockDigestItem := new(typesmocks.MockDigestItem)
- mockDigestItem.On("String").Return("")
- mockDigestItem.On("Type").Return(byte(i))
- mockDigestItem.On("Encode").Return([]byte{byte(i)}, nil)
- mockDigestItem.On("Decode", mock.AnythingOfType("io.Reader")).Return(nil)
-
- return mockDigestItem
-}
diff --git a/pkg/scale/decode.go b/pkg/scale/decode.go
index 68f43405b6..d345121e5e 100644
--- a/pkg/scale/decode.go
+++ b/pkg/scale/decode.go
@@ -397,6 +397,12 @@ func (ds *decodeState) decodeStruct(dstv reflect.Value) (err error) {
if !field.CanInterface() {
continue
}
+ // if the value is not a zero value, set it as non-zero value from dst.
+ // this is required for VaryingDataTypeSlice and VaryingDataType
+ inv := reflect.ValueOf(in)
+ if inv.Field(i.fieldIndex).IsValid() && !inv.Field(i.fieldIndex).IsZero() {
+ field.Set(inv.Field(i.fieldIndex))
+ }
err = ds.unmarshal(field)
if err != nil {
err = fmt.Errorf("%s, field: %+v", err, field)
diff --git a/tests/stress/stress_test.go b/tests/stress/stress_test.go
index 82684a6bc1..f3d8613e79 100644
--- a/tests/stress/stress_test.go
+++ b/tests/stress/stress_test.go
@@ -463,7 +463,7 @@ func TestSync_SubmitExtrinsic(t *testing.T) {
continue
}
- header = block.Header
+ header = &block.Header
logger.Debug("got block from node", "header", header, "body", block.Body, "node", nodes[idx].Key)
if block.Body != nil {
diff --git a/tests/utils/chain.go b/tests/utils/chain.go
index d43b6529a2..636d6690d2 100644
--- a/tests/utils/chain.go
+++ b/tests/utils/chain.go
@@ -141,7 +141,7 @@ func GetBlock(t *testing.T, node *Node, hash common.Hash) *types.Block {
require.NoError(t, err, fmt.Sprintf("%v", block.Block.Body))
return &types.Block{
- Header: h,
- Body: b,
+ Header: *h,
+ Body: *b,
}
}
diff --git a/tests/utils/request_utils.go b/tests/utils/request_utils.go
index 85136dbc9f..29c20b4de7 100644
--- a/tests/utils/request_utils.go
+++ b/tests/utils/request_utils.go
@@ -29,6 +29,7 @@ import (
"github.com/ChainSafe/gossamer/dot/types"
"github.com/ChainSafe/gossamer/lib/common"
+ "github.com/ChainSafe/gossamer/pkg/scale"
"github.com/stretchr/testify/require"
)
@@ -159,19 +160,19 @@ func NewEndpoint(port string) string {
return "http://" + HOSTNAME + ":" + port
}
-func rpcLogsToDigest(t *testing.T, logs []string) types.Digest {
- digest := types.Digest{}
+func rpcLogsToDigest(t *testing.T, logs []string) scale.VaryingDataTypeSlice {
+ digest := types.NewDigest()
for _, l := range logs {
itemBytes, err := common.HexToBytes(l)
require.NoError(t, err)
- r := &bytes.Buffer{}
- _, _ = r.Write(itemBytes)
- item, err := types.DecodeDigestItem(r)
+ var di = types.NewDigestItem()
+ err = scale.Unmarshal(itemBytes, &di)
require.NoError(t, err)
- digest = append(digest, item)
+ err = digest.Add(di.Value())
+ require.NoError(t, err)
}
return digest