diff --git a/dot/network/mock_warp_sync_provider_test.go b/dot/network/mock_warp_sync_provider_test.go index 2ec249711f..04280930e7 100644 --- a/dot/network/mock_warp_sync_provider_test.go +++ b/dot/network/mock_warp_sync_provider_test.go @@ -12,6 +12,7 @@ package network import ( reflect "reflect" + grandpa "github.com/ChainSafe/gossamer/internal/primitives/consensus/grandpa" common "github.com/ChainSafe/gossamer/lib/common" gomock "go.uber.org/mock/gomock" ) @@ -53,3 +54,18 @@ func (mr *MockWarpSyncProviderMockRecorder) Generate(arg0 any) *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Generate", reflect.TypeOf((*MockWarpSyncProvider)(nil).Generate), arg0) } + +// Verify mocks base method. +func (m *MockWarpSyncProvider) Verify(arg0 []byte, arg1 grandpa.SetID, arg2 grandpa.AuthorityList) (*WarpSyncVerificationResult, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Verify", arg0, arg1, arg2) + ret0, _ := ret[0].(*WarpSyncVerificationResult) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Verify indicates an expected call of Verify. +func (mr *MockWarpSyncProviderMockRecorder) Verify(arg0, arg1, arg2 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Verify", reflect.TypeOf((*MockWarpSyncProvider)(nil).Verify), arg0, arg1, arg2) +} diff --git a/dot/network/warp_sync.go b/dot/network/warp_sync.go index 8c6978163e..5e5e7b85b5 100644 --- a/dot/network/warp_sync.go +++ b/dot/network/warp_sync.go @@ -8,6 +8,9 @@ import ( "fmt" "github.com/ChainSafe/gossamer/dot/network/messages" + "github.com/ChainSafe/gossamer/dot/types" + "github.com/ChainSafe/gossamer/internal/primitives/consensus/grandpa" + primitives "github.com/ChainSafe/gossamer/internal/primitives/consensus/grandpa" "github.com/ChainSafe/gossamer/lib/common" libp2pnetwork "github.com/libp2p/go-libp2p/core/network" "github.com/libp2p/go-libp2p/core/peer" @@ -15,11 +18,23 @@ import ( const MaxAllowedSameRequestPerPeer = 5 +type WarpSyncVerificationResult struct { + SetId grandpa.SetID + AuthorityList primitives.AuthorityList + Header types.Header + Completed bool +} + // WarpSyncProvider is an interface for generating warp sync proofs type WarpSyncProvider interface { // Generate proof starting at given block hash. The proof is accumulated until maximum proof // size is reached. Generate(start common.Hash) (encodedProof []byte, err error) + Verify( + encodedProof []byte, + setId grandpa.SetID, + authorities primitives.AuthorityList, + ) (*WarpSyncVerificationResult, error) } func (s *Service) handleWarpSyncRequest(req messages.WarpProofRequest) ([]byte, error) { diff --git a/dot/types/authority.go b/dot/types/authority.go index 66a7ccdd54..2fa1b554ce 100644 --- a/dot/types/authority.go +++ b/dot/types/authority.go @@ -167,3 +167,5 @@ func AuthoritiesRawToAuthorityAsAddress(authsRaw []AuthorityRaw, kt crypto.KeyTy } return auths, nil } + +type AuthorityList []Authority diff --git a/internal/client/consensus/grandpa/warp_sync.go b/internal/client/consensus/grandpa/warp_sync.go index aa1a5e050b..3a4eda3960 100644 --- a/internal/client/consensus/grandpa/warp_sync.go +++ b/internal/client/consensus/grandpa/warp_sync.go @@ -4,12 +4,18 @@ package grandpa import ( + "bytes" "fmt" + "github.com/ChainSafe/gossamer/dot/network" "github.com/ChainSafe/gossamer/dot/types" + "github.com/ChainSafe/gossamer/internal/primitives/consensus/grandpa" + primitives "github.com/ChainSafe/gossamer/internal/primitives/consensus/grandpa" + "github.com/ChainSafe/gossamer/internal/primitives/consensus/grandpa/app" "github.com/ChainSafe/gossamer/internal/primitives/core/hash" "github.com/ChainSafe/gossamer/internal/primitives/runtime" "github.com/ChainSafe/gossamer/lib/common" + "github.com/ChainSafe/gossamer/lib/crypto/ed25519" "github.com/ChainSafe/gossamer/pkg/scale" ) @@ -78,9 +84,58 @@ func (w *WarpSyncProof) lastProofBlockNumber() uint64 { return w.Proofs[len(w.Proofs)-1].Justification.Justification.Commit.TargetNumber + 1 } +func (w *WarpSyncProof) verify( + setId grandpa.SetID, + authorities primitives.AuthorityList, + hardForks map[string]SetIdAuthorityList, +) (*SetIdAuthorityList, error) { + currentSetId := setId + currentAuthorities := authorities + + for fragmentNumber, proof := range w.Proofs { + headerHash := proof.Header.Hash() + number := proof.Header.Number + + hardForkKey := fmt.Sprintf("%v-%v", headerHash, number) + if fork, ok := hardForks[hardForkKey]; ok { + currentSetId = fork.SetID + currentAuthorities = fork.AuthorityList + } else { + err := proof.Justification.Verify(uint64(currentSetId), currentAuthorities) + if err != nil { + return nil, err + } + + if !bytes.Equal(proof.Justification.Target().Hash.Bytes(), headerHash.ToBytes()) { + return nil, fmt.Errorf("mismatch between header and justification") + } + + scheduledChange, err := findScheduledChange(proof.Header) + if err != nil { + return nil, fmt.Errorf("finding scheduled change: %w", err) + } + + if scheduledChange != nil { + auths, err := grandpaAuthoritiesRawToAuthorities(scheduledChange.Auths) + if err != nil { + return nil, fmt.Errorf("cannot parse GRANPDA raw authorities: %w", err) + } + + currentSetId += 1 + currentAuthorities = auths + } else if fragmentNumber != len(w.Proofs)-1 || !w.IsFinished { + return nil, fmt.Errorf("Header is missing authority set change digest") + } + } + } + + return &SetIdAuthorityList{currentSetId, currentAuthorities}, nil +} + type WarpSyncProofProvider struct { blockState BlockState grandpaState GrandpaState + hardForks map[string]SetIdAuthorityList } func NewWarpSyncProofProvider(blockState BlockState, grandpaState GrandpaState) *WarpSyncProofProvider { @@ -90,15 +145,20 @@ func NewWarpSyncProofProvider(blockState BlockState, grandpaState GrandpaState) } } +type SetIdAuthorityList struct { + grandpa.SetID + primitives.AuthorityList +} + // Generate build a warp sync encoded proof starting from the given block hash -func (np *WarpSyncProofProvider) Generate(start common.Hash) ([]byte, error) { +func (p *WarpSyncProofProvider) Generate(start common.Hash) ([]byte, error) { // Get and traverse all GRANDPA authorities changes from the given block hash - beginBlockHeader, err := np.blockState.GetHeader(start) + beginBlockHeader, err := p.blockState.GetHeader(start) if err != nil { return nil, fmt.Errorf("%w: %w", errMissingStartBlock, err) } - lastFinalizedBlockHeader, err := np.blockState.GetHighestFinalisedHeader() + lastFinalizedBlockHeader, err := p.blockState.GetHighestFinalisedHeader() if err != nil { return nil, fmt.Errorf("getting best block header: %w", err) } @@ -107,7 +167,7 @@ func (np *WarpSyncProofProvider) Generate(start common.Hash) ([]byte, error) { return nil, errStartBlockNotFinalized } - authoritySetChanges, err := np.grandpaState.GetAuthoritiesChangesFromBlock(beginBlockHeader.Number) + authoritySetChanges, err := p.grandpaState.GetAuthoritiesChangesFromBlock(beginBlockHeader.Number) if err != nil { return nil, err } @@ -115,12 +175,26 @@ func (np *WarpSyncProofProvider) Generate(start common.Hash) ([]byte, error) { limitReached := false finalProof := NewWarpSyncProof() for _, blockNumber := range authoritySetChanges { - header, err := np.blockState.GetHeaderByNumber(blockNumber) + header, err := p.blockState.GetHeaderByNumber(blockNumber) if err != nil { return nil, err } - encJustification, err := np.blockState.GetJustification(header.Hash()) // get the justification of such block + scheduledChange, err := findScheduledChange(*header) + if err != nil { + return nil, fmt.Errorf("finding scheduled change: %w", err) + } + + // the last block in a set is the one that triggers a change to the next set, + // therefore the block must have a digest that signals the authority set change + if scheduledChange == nil { + // if it doesn't contain a signal for standard change then the set must have changed + // through a forced changed, in which case we stop collecting proofs as the chain of + // trust in authority handoffs was broken. + break + } + + encJustification, err := p.blockState.GetJustification(header.Hash()) // get the justification of such block if err != nil { return nil, err } @@ -150,11 +224,12 @@ func (np *WarpSyncProofProvider) Generate(start common.Hash) ([]byte, error) { // last authority set change. if we didn't prove any authority set // change then we fallback to make sure it's higher or equal to the // initial warp sync block. - lastFinalizedBlockHeader, err := np.blockState.GetHighestFinalisedHeader() + lastFinalizedBlockHeader, err := p.blockState.GetHighestFinalisedHeader() if err != nil { return nil, fmt.Errorf("getting best block header: %w", err) } - latestJustification, err := np.blockState.GetJustification(lastFinalizedBlockHeader.Hash()) + + latestJustification, err := p.blockState.GetJustification(lastFinalizedBlockHeader.Hash()) if err != nil { return nil, err } @@ -178,3 +253,89 @@ func (np *WarpSyncProofProvider) Generate(start common.Hash) ([]byte, error) { // Encode and return the proof return scale.Marshal(finalProof) } + +// Verify checks the validity of the given warp sync proof +func (p *WarpSyncProofProvider) Verify( + encodedProof []byte, + setId grandpa.SetID, + authorities primitives.AuthorityList, +) (*network.WarpSyncVerificationResult, error) { + var proof WarpSyncProof + err := scale.Unmarshal(encodedProof, &proof) + if err != nil { + return nil, fmt.Errorf("decoding warp sync proof: %w", err) + } + + if len(proof.Proofs) == 0 { + return nil, fmt.Errorf("empty warp sync proof") + } + + lastProof := proof.Proofs[len(proof.Proofs)-1] + lastHeader := lastProof.Header + + nextSetAndAuthorities, err := proof.verify(setId, authorities, p.hardForks) + if err != nil { + return nil, fmt.Errorf("verifying warp sync proof: %w", err) + } + + return &network.WarpSyncVerificationResult{ + SetId: nextSetAndAuthorities.SetID, + AuthorityList: nextSetAndAuthorities.AuthorityList, + Header: lastHeader, + Completed: proof.IsFinished, + }, nil +} + +func findScheduledChange( + header types.Header, +) (*types.GrandpaScheduledChange, error) { + for _, digestItem := range header.Digest { + digestValue, err := digestItem.Value() + if err != nil { + return nil, fmt.Errorf("getting digest value: %w", err) + } + + switch val := digestValue.(type) { + case types.ConsensusDigest: + consensusDigest := types.GrandpaConsensusDigest{} + if val.ConsensusEngineID == types.GrandpaEngineID { + err := scale.Unmarshal(val.Data, &consensusDigest) + if err != nil { + return nil, err + } + + scheduledChange, err := consensusDigest.Value() + if err != nil { + return nil, err + } + + parsedScheduledChange, _ := scheduledChange.(types.GrandpaScheduledChange) + return &parsedScheduledChange, nil + } + } + } + return nil, nil +} + +func grandpaAuthoritiesRawToAuthorities(adr []types.GrandpaAuthoritiesRaw) (primitives.AuthorityList, error) { + ad := make([]primitives.AuthorityIDWeight, len(adr)) + for i, r := range adr { + ad[i] = primitives.AuthorityIDWeight{} + + key, err := ed25519.NewPublicKey(r.Key[:]) + if err != nil { + return nil, err + } + + keyBytes := key.AsBytes() + pkey, err := app.NewPublic(keyBytes[:]) + if err != nil { + return nil, err + } + + ad[i].AuthorityID = pkey + ad[i].AuthorityWeight = primitives.AuthorityWeight(r.ID) + } + + return ad, nil +} diff --git a/internal/client/consensus/grandpa/warp_sync_test.go b/internal/client/consensus/grandpa/warp_sync_test.go index 3fe109397b..11ad44ee49 100644 --- a/internal/client/consensus/grandpa/warp_sync_test.go +++ b/internal/client/consensus/grandpa/warp_sync_test.go @@ -5,11 +5,21 @@ package grandpa import ( "errors" + "math/rand" + "slices" "testing" "github.com/ChainSafe/gossamer/dot/types" + primitives "github.com/ChainSafe/gossamer/internal/primitives/consensus/grandpa" + ced25519 "github.com/ChainSafe/gossamer/internal/primitives/core/ed25519" + "github.com/ChainSafe/gossamer/internal/primitives/core/hash" + "github.com/ChainSafe/gossamer/internal/primitives/keyring/ed25519" + "github.com/ChainSafe/gossamer/internal/primitives/runtime" + "github.com/ChainSafe/gossamer/internal/primitives/runtime/generic" "github.com/ChainSafe/gossamer/lib/common" - "github.com/stretchr/testify/assert" + grandpa "github.com/ChainSafe/gossamer/pkg/finality-grandpa" + "github.com/ChainSafe/gossamer/pkg/scale" + "github.com/stretchr/testify/require" "go.uber.org/mock/gomock" ) @@ -29,8 +39,8 @@ func TestGenerateWarpSyncProofBlockNotFound(t *testing.T) { // Check errMissingStartBlock returned by provider _, err := provider.Generate(common.EmptyHash) - assert.Error(t, err) - assert.ErrorIs(t, err, errMissingStartBlock) + require.Error(t, err) + require.ErrorIs(t, err, errMissingStartBlock) } func TestGenerateWarpSyncProofBlockNotFinalized(t *testing.T) { @@ -60,67 +70,248 @@ func TestGenerateWarpSyncProofBlockNotFinalized(t *testing.T) { // Check errMissingStartBlock returned by provider _, err := provider.Generate(notFinalizedBlockHeader.Hash()) - assert.Error(t, err) - assert.ErrorIs(t, err, errStartBlockNotFinalized) + require.Error(t, err) + require.ErrorIs(t, err, errStartBlockNotFinalized) } +// This test generates a small blockchain with authority set changes and expected +// justifications to create a warp sync proof and verify it. +// //nolint:lll -func TestGenerateWarpSyncProofOk(t *testing.T) { +func TestGenerateAndVerifyWarpSyncProofOk(t *testing.T) { + t.Parallel() + + type signedPrecommit = grandpa.SignedPrecommit[hash.H256, uint64, primitives.AuthoritySignature, primitives.AuthorityID] + type preCommit = grandpa.Precommit[hash.H256, uint64] + + // Initialize mocks ctrl := gomock.NewController(t) defer ctrl.Finish() - encodedJustification1 := []byte{42, 0, 0, 0, 0, 0, 0, 0, 236, 148, 26, 228, 225, 134, 98, 140, 150, 154, 23, 185, 43, 231, 172, 194, 69, 150, 27, 191, 202, 50, 108, 91, 220, 57, 214, 47, 202, 62, 70, 238, 10, 0, 0, 0, 0, 0, 0, 0, 4, 236, 148, 26, 228, 225, 134, 98, 140, 150, 154, 23, 185, 43, 231, 172, 194, 69, 150, 27, 191, 202, 50, 108, 91, 220, 57, 214, 47, 202, 62, 70, 238, 10, 0, 0, 0, 0, 0, 0, 0, 201, 232, 26, 136, 31, 77, 15, 194, 34, 200, 248, 43, 219, 148, 207, 56, 240, 171, 208, 221, 162, 202, 153, 209, 150, 27, 71, 207, 227, 102, 133, 32, 206, 74, 78, 26, 148, 166, 18, 67, 188, 76, 163, 200, 68, 249, 134, 28, 122, 74, 182, 69, 135, 90, 199, 52, 72, 109, 41, 12, 37, 18, 161, 4, 136, 220, 52, 23, 213, 5, 142, 196, 180, 80, 62, 12, 18, 234, 26, 10, 137, 190, 32, 15, 233, 137, 34, 66, 61, 67, 52, 1, 79, 166, 176, 238, 0} - encodedJustification2 := []byte{50, 0, 0, 0, 0, 0, 0, 0, 236, 148, 26, 228, 225, 134, 98, 140, 150, 154, 23, 185, 43, 231, 172, 194, 69, 150, 27, 191, 202, 50, 108, 91, 220, 57, 214, 47, 202, 62, 70, 238, 10, 0, 0, 0, 0, 0, 0, 0, 4, 236, 148, 26, 228, 225, 134, 98, 140, 150, 154, 23, 185, 43, 231, 172, 194, 69, 150, 27, 191, 202, 50, 108, 91, 220, 57, 214, 47, 202, 62, 70, 238, 10, 0, 0, 0, 0, 0, 0, 0, 201, 232, 26, 136, 31, 77, 15, 194, 34, 200, 248, 43, 219, 148, 207, 56, 240, 171, 208, 221, 162, 202, 153, 209, 150, 27, 71, 207, 227, 102, 133, 32, 206, 74, 78, 26, 148, 166, 18, 67, 188, 76, 163, 200, 68, 249, 134, 28, 122, 74, 182, 69, 135, 90, 199, 52, 72, 109, 41, 12, 37, 18, 161, 4, 136, 220, 52, 23, 213, 5, 142, 196, 180, 80, 62, 12, 18, 234, 26, 10, 137, 190, 32, 15, 233, 137, 34, 66, 61, 67, 52, 1, 79, 166, 176, 238, 0} - var blockHeaders []*types.Header - blockStateMock := NewMockBlockState(ctrl) grandpaStateMock := NewMockGrandpaState(ctrl) - for blockNumber := uint(1); blockNumber <= 10; blockNumber++ { - // Create block header - var header *types.Header - parentHash := common.Hash{0x00} - if blockNumber > 1 { - parentHash = blockHeaders[blockNumber-2].Hash() + // Set authorities + availableAuthorities := ed25519.AvailableAuthorities + genesisAuthorities := primitives.AuthorityList{ + primitives.AuthorityIDWeight{ + AuthorityID: ed25519.Alice.Pair().Public().(ced25519.Public), + AuthorityWeight: 1, + }, + } + currentAuthorities := []ed25519.Keyring{ed25519.Alice} + + // Set initial values for the scheduled changes + currentSetId := primitives.SetID(0) + authoritySetChanges := []uint{} + + // Genesis block + genesis := &types.Header{ + ParentHash: common.MustBlake2bHash([]byte("genesis")), + Number: 1, + } + + // All blocks headers + headers := []*types.Header{ + genesis, + } + + const maxBlocks = 100 + + // Create blocks with their scheduled changes and justifications + for n := uint(1); n <= maxBlocks; n++ { + lastBlockHeader := headers[len(headers)-1] + + newAuthorities := []ed25519.Keyring{} + + digest := types.NewDigest() + + // Authority set change happens every 10 blocks + if n != 0 && n%10 == 0 { + // Pick new random authorities + nAuthorities := rand.Intn(len(availableAuthorities)-1) + 1 + require.GreaterOrEqual(t, nAuthorities, 1) + + rand.Shuffle(len(availableAuthorities), func(i, j int) { + availableAuthorities[i], availableAuthorities[j] = availableAuthorities[j], availableAuthorities[i] + }) + + newAuthorities = availableAuthorities[:nAuthorities] + + // Map new authorities to GRANDPA raw authorities format + nextAuthorities := []types.GrandpaAuthoritiesRaw{} + for _, key := range newAuthorities { + nextAuthorities = append(nextAuthorities, + types.GrandpaAuthoritiesRaw{ + Key: [32]byte(key.Pair().Public().Bytes()), + ID: 1, + }, + ) + } + + // Create scheduled change + scheduledChange := createGRANDPAConsensusDigest(t, types.GrandpaScheduledChange{ + Auths: nextAuthorities, + Delay: 0, + }) + digest.Add(scheduledChange) } - header = types.NewHeader( - parentHash, - common.Hash{byte(blockNumber)}, - common.Hash{byte(blockNumber)}, - blockNumber, - types.Digest{}, - ) + // Create new block header + header := &types.Header{ + ParentHash: lastBlockHeader.Hash(), + Number: lastBlockHeader.Number + 1, + Digest: digest, + } - blockHeaders = append(blockHeaders, header) + headers = append(headers, header) - // Mock block state responses - blockStateMock.EXPECT().GetHeader(header.Hash()).Return(header, nil).AnyTimes() - blockStateMock.EXPECT().GetHeaderByNumber(blockNumber).Return(header, nil).AnyTimes() - - // authorities set changes happens only in block 5 - if blockNumber < 5 { - grandpaStateMock.EXPECT().GetAuthoritiesChangesFromBlock(blockNumber).Return([]uint{5}, nil).AnyTimes() - } else if blockNumber == 5 { - blockStateMock.EXPECT().GetJustification(header.Hash()).Return(encodedJustification1, nil).AnyTimes() - } else { - grandpaStateMock.EXPECT().GetAuthoritiesChangesFromBlock(blockNumber).Return([]uint{}, nil).AnyTimes() + // If we have an authority set change, create a justification + if len(newAuthorities) > 0 { + targetHash := hash.H256(string(header.Hash().ToBytes())) + targetNumber := uint64(header.Number) + + // Create precommits for current voters + precommits := []signedPrecommit{} + for _, voter := range currentAuthorities { + precommit := preCommit{ + TargetHash: targetHash, + TargetNumber: targetNumber, + } + + msg := grandpa.NewMessage[hash.H256, uint64, preCommit](precommit) + encoded := primitives.NewLocalizedPayload(1, currentSetId, msg) + signature := voter.Sign(encoded) + + signedPreCommit := signedPrecommit{ + Precommit: preCommit{ + TargetHash: targetHash, + TargetNumber: targetNumber, + }, + Signature: signature, + ID: voter.Pair().Public().(ced25519.Public), + } + + precommits = append(precommits, signedPreCommit) + } + + // Create justification + justification := primitives.GrandpaJustification[hash.H256, uint64]{ + Round: 1, + Commit: primitives.Commit[hash.H256, uint64]{ + TargetHash: targetHash, + TargetNumber: targetNumber, + Precommits: precommits, + }, + VoteAncestries: genericHeadersList(t, headers), + } + + encodedJustification, err := scale.Marshal(justification) + require.NoError(t, err) + + blockStateMock.EXPECT().GetJustification(header.Hash()).Return(encodedJustification, nil).AnyTimes() + blockStateMock.EXPECT().GetHighestFinalisedHeader().Return(header, nil).AnyTimes() + + // Update authorities and set id + authoritySetChanges = append(authoritySetChanges, header.Number) + currentAuthorities = slices.Clone(newAuthorities) + currentSetId++ } + } - blockStateMock.EXPECT().GetHighestFinalisedHeader().Return(blockHeaders[len(blockHeaders)-1], nil).AnyTimes() - blockStateMock.EXPECT().GetJustification(blockHeaders[len(blockHeaders)-1].Hash()).Return(encodedJustification2, nil).AnyTimes() + // Return expected authority changes for each block + authChanges := []uint{} + for n := uint(1); n <= maxBlocks; n++ { + for _, change := range authoritySetChanges { + if n <= change { + authChanges = append(authChanges, change) + } + } + grandpaStateMock.EXPECT().GetAuthoritiesChangesFromBlock(n).Return(authChanges, nil).AnyTimes() + } - provider := &WarpSyncProofProvider{ - blockState: blockStateMock, - grandpaState: grandpaStateMock, + // Mock responses + for _, header := range headers { + blockStateMock.EXPECT().GetHeaderByNumber(header.Number).Return(header, nil).AnyTimes() + blockStateMock.EXPECT().GetHeader(header.Hash()).Return(header, nil).AnyTimes() } - proof, err := provider.Generate(blockHeaders[0].Hash()) - assert.NoError(t, err) + // Initialize warp sync provider + provider := NewWarpSyncProofProvider(blockStateMock, grandpaStateMock) - expectedProof := []byte{ - 0x4, 0x1c, 0xa4, 0x2, 0x25, 0x71, 0x86, 0xee, 0x43, 0x46, 0xfd, 0x2c, 0x9, 0xfe, 0xeb, 0x91, 0x17, 0x10, 0xe5, 0x88, 0x41, 0x89, 0xc3, 0xc7, 0x5f, 0xb5, 0x1, 0x1a, 0x75, 0x21, 0x37, 0x2f, 0xf9, 0x14, 0x5, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x5, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2a, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xec, 0x94, 0x1a, 0xe4, 0xe1, 0x86, 0x62, 0x8c, 0x96, 0x9a, 0x17, 0xb9, 0x2b, 0xe7, 0xac, 0xc2, 0x45, 0x96, 0x1b, 0xbf, 0xca, 0x32, 0x6c, 0x5b, 0xdc, 0x39, 0xd6, 0x2f, 0xca, 0x3e, 0x46, 0xee, 0xa, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x4, 0xec, 0x94, 0x1a, 0xe4, 0xe1, 0x86, 0x62, 0x8c, 0x96, 0x9a, 0x17, 0xb9, 0x2b, 0xe7, 0xac, 0xc2, 0x45, 0x96, 0x1b, 0xbf, 0xca, 0x32, 0x6c, 0x5b, 0xdc, 0x39, 0xd6, 0x2f, 0xca, 0x3e, 0x46, 0xee, 0xa, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc9, 0xe8, 0x1a, 0x88, 0x1f, 0x4d, 0xf, 0xc2, 0x22, 0xc8, 0xf8, 0x2b, 0xdb, 0x94, 0xcf, 0x38, 0xf0, 0xab, 0xd0, 0xdd, 0xa2, 0xca, 0x99, 0xd1, 0x96, 0x1b, 0x47, 0xcf, 0xe3, 0x66, 0x85, 0x20, 0xce, 0x4a, 0x4e, 0x1a, 0x94, 0xa6, 0x12, 0x43, 0xbc, 0x4c, 0xa3, 0xc8, 0x44, 0xf9, 0x86, 0x1c, 0x7a, 0x4a, 0xb6, 0x45, 0x87, 0x5a, 0xc7, 0x34, 0x48, 0x6d, 0x29, 0xc, 0x25, 0x12, 0xa1, 0x4, 0x88, 0xdc, 0x34, 0x17, 0xd5, 0x5, 0x8e, 0xc4, 0xb4, 0x50, 0x3e, 0xc, 0x12, 0xea, 0x1a, 0xa, 0x89, 0xbe, 0x20, 0xf, 0xe9, 0x89, 0x22, 0x42, 0x3d, 0x43, 0x34, 0x1, 0x4f, 0xa6, 0xb0, 0xee, 0x0, 0x1, + // Generate proof + proof, err := provider.Generate(headers[0].Hash()) + require.NoError(t, err) + + // Verify proof + expectedAuthorities := primitives.AuthorityList{} + for _, key := range currentAuthorities { + expectedAuthorities = append(expectedAuthorities, + primitives.AuthorityIDWeight{ + AuthorityID: [32]byte(key.Pair().Public().Bytes()), + AuthorityWeight: 1, + }, + ) } - assert.Equal(t, expectedProof, proof) + + result, err := provider.Verify(proof, 0, genesisAuthorities) + require.NoError(t, err) + require.Equal(t, currentSetId, result.SetId) + require.Equal(t, expectedAuthorities, result.AuthorityList) +} + +func TestFindScheduledChange(t *testing.T) { + t.Parallel() + + scheduledChange := createGRANDPAConsensusDigest(t, types.GrandpaScheduledChange{ + Auths: []types.GrandpaAuthoritiesRaw{}, + Delay: 2, + }) + + digest := types.NewDigest() + digest.Add(scheduledChange) + + blockHeader := &types.Header{ + ParentHash: common.Hash{0x00}, + Number: 1, + Digest: digest, + } + + // Find scheduled change in block header + scheduledChangeDigest, err := findScheduledChange(*blockHeader) + require.NoError(t, err) + require.NotNil(t, scheduledChangeDigest) +} + +func createGRANDPAConsensusDigest(t *testing.T, digestData any) types.ConsensusDigest { + t.Helper() + + grandpaConsensusDigest := types.NewGrandpaConsensusDigest() + require.NoError(t, grandpaConsensusDigest.SetValue(digestData)) + + marshaledData, err := scale.Marshal(grandpaConsensusDigest) + require.NoError(t, err) + + return types.ConsensusDigest{ + ConsensusEngineID: types.GrandpaEngineID, + Data: marshaledData, + } +} + +func genericHeadersList(t *testing.T, headers []*types.Header) []runtime.Header[uint64, hash.H256] { + t.Helper() + + headerList := []runtime.Header[uint64, hash.H256]{} + for _, header := range headers { + if header == nil { + continue + } + newHeader := generic.Header[uint64, hash.H256, runtime.BlakeTwo256]{} + newHeader.SetParentHash(hash.H256(header.ParentHash.String())) + newHeader.SetNumber(uint64(header.Number)) + newHeader.DigestMut().Push(header.Digest) + } + + return headerList } diff --git a/internal/primitives/consensus/grandpa/grandpa.go b/internal/primitives/consensus/grandpa/grandpa.go index 43bc7cd95f..c04ded8fab 100644 --- a/internal/primitives/consensus/grandpa/grandpa.go +++ b/internal/primitives/consensus/grandpa/grandpa.go @@ -55,6 +55,11 @@ type SignedMessage[H, N any] grandpa.SignedMessage[H, N, AuthoritySignature, Aut // Commit is a commit message for this chain's block type. type Commit[H, N any] grandpa.Commit[H, N, AuthoritySignature, AuthorityID] +type ScheduledChange[N runtime.Number] struct { + NextAuthorities AuthorityList + Delay N +} + // GrandpaJustification is A GRANDPA justification for block finality, it includes // a commit message and an ancestry proof including all headers routing all // precommit target blocks to the commit target block. Due to the current voting diff --git a/internal/primitives/keyring/ed25519/ed25519.go b/internal/primitives/keyring/ed25519/ed25519.go index 556cfe66ec..76ec3c8c03 100644 --- a/internal/primitives/keyring/ed25519/ed25519.go +++ b/internal/primitives/keyring/ed25519/ed25519.go @@ -22,6 +22,17 @@ const ( Two ) +var AvailableAuthorities = []Keyring{ + Alice, + Bob, + Charlie, + Dave, + Eve, + Ferdie, + One, + Two, +} + func (k Keyring) Sign(msg []byte) ed25519.Signature { return k.Pair().Sign(msg) }