diff --git a/dot/mock_node_builder_test.go b/dot/mock_node_builder_test.go index b6a87fd5cc..20405a2018 100644 --- a/dot/mock_node_builder_test.go +++ b/dot/mock_node_builder_test.go @@ -229,7 +229,7 @@ func (mr *MocknodeBuilderIfaceMockRecorder) loadRuntime(config, ns, stateSrvc, k } // newSyncService mocks base method. -func (m *MocknodeBuilderIface) newSyncService(config *config.Config, st *state.Service, finalityGadget BlockJustificationVerifier, verifier *babe.VerificationManager, cs *core.Service, net *network.Service, telemetryMailer Telemetry) (*sync.Service, error) { +func (m *MocknodeBuilderIface) newSyncService(config *config.Config, st *state.Service, finalityGadget sync.FinalityGadget, verifier *babe.VerificationManager, cs *core.Service, net *network.Service, telemetryMailer Telemetry) (*sync.Service, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "newSyncService", config, st, finalityGadget, verifier, cs, net, telemetryMailer) ret0, _ := ret[0].(*sync.Service) diff --git a/dot/node.go b/dot/node.go index 8b0352eb03..fff57f8315 100644 --- a/dot/node.go +++ b/dot/node.go @@ -62,7 +62,7 @@ type nodeBuilderIface interface { ) (*core.Service, error) createGRANDPAService(config *cfg.Config, st *state.Service, ks KeyStore, net *network.Service, telemetryMailer Telemetry) (*grandpa.Service, error) - newSyncService(config *cfg.Config, st *state.Service, finalityGadget BlockJustificationVerifier, + newSyncService(config *cfg.Config, st *state.Service, finalityGadget dotsync.FinalityGadget, verifier *babe.VerificationManager, cs *core.Service, net *network.Service, telemetryMailer Telemetry) (*dotsync.Service, error) createBABEService(config *cfg.Config, st *state.Service, ks KeyStore, cs *core.Service, diff --git a/dot/services.go b/dot/services.go index 827465b240..574bb00689 100644 --- a/dot/services.go +++ b/dot/services.go @@ -497,7 +497,7 @@ func (nodeBuilder) createBlockVerifier(st *state.Service) *babe.VerificationMana return babe.NewVerificationManager(st.Block, st.Slot, st.Epoch) } -func (nodeBuilder) newSyncService(config *cfg.Config, st *state.Service, fg BlockJustificationVerifier, +func (nodeBuilder) newSyncService(config *cfg.Config, st *state.Service, fg sync.FinalityGadget, verifier *babe.VerificationManager, cs *core.Service, net *network.Service, telemetryMailer Telemetry) ( *sync.Service, error) { slotDuration, err := st.Epoch.GetSlotDuration() diff --git a/dot/services_integration_test.go b/dot/services_integration_test.go index 6b7261f52a..a922cc0298 100644 --- a/dot/services_integration_test.go +++ b/dot/services_integration_test.go @@ -17,6 +17,7 @@ import ( "github.com/ChainSafe/gossamer/dot/network" rpc "github.com/ChainSafe/gossamer/dot/rpc" "github.com/ChainSafe/gossamer/dot/state" + "github.com/ChainSafe/gossamer/dot/sync" "github.com/ChainSafe/gossamer/dot/telemetry" "github.com/ChainSafe/gossamer/dot/types" "github.com/ChainSafe/gossamer/internal/log" @@ -373,7 +374,7 @@ func Test_nodeBuilder_newSyncService(t *testing.T) { require.NoError(t, err) type args struct { - fg BlockJustificationVerifier + fg sync.FinalityGadget verifier *babe.VerificationManager cs *core.Service net *network.Service diff --git a/dot/sync/chain_sync.go b/dot/sync/chain_sync.go index 333f545a44..a37240138a 100644 --- a/dot/sync/chain_sync.go +++ b/dot/sync/chain_sync.go @@ -843,6 +843,21 @@ func (cs *chainSync) processBlockData(blockData types.BlockData, origin blockOri announceImportedBlock := cs.getSyncMode() == tip if blockData.Header != nil { + var ( + hasJustification = blockData.Justification != nil && len(*blockData.Justification) > 0 + round uint64 + setID uint64 + ) + + if hasJustification { + var err error + round, setID, err = cs.finalityGadget.VerifyBlockJustification( + blockData.Header.Hash(), blockData.Header.Number, *blockData.Justification) + if err != nil { + return fmt.Errorf("verifying justification: %w", err) + } + } + if blockData.Body != nil { err := cs.processBlockDataWithHeaderAndBody(blockData, origin, announceImportedBlock) if err != nil { @@ -850,11 +865,18 @@ func (cs *chainSync) processBlockData(blockData types.BlockData, origin blockOri } } - if blockData.Justification != nil && len(*blockData.Justification) > 0 { - err := cs.handleJustification(blockData.Header, *blockData.Justification) + if hasJustification { + header := blockData.Header + err := cs.blockState.SetFinalisedHash(header.Hash(), round, setID) if err != nil { - return fmt.Errorf("handling justification: %w", err) + return fmt.Errorf("setting finalised hash: %w", err) } + err = cs.blockState.SetJustification(header.Hash(), *blockData.Justification) + if err != nil { + return fmt.Errorf("setting justification for block number %d: %w", header.Number, err) + } + + return nil } } @@ -902,21 +924,6 @@ func (cs *chainSync) handleBody(body *types.Body) { blockSizeGauge.Set(float64(acc)) } -func (cs *chainSync) handleJustification(header *types.Header, justification []byte) (err error) { - headerHash := header.Hash() - err = cs.finalityGadget.VerifyBlockJustification(headerHash, justification) - if err != nil { - return fmt.Errorf("verifying block number %d justification: %w", header.Number, err) - } - - err = cs.blockState.SetJustification(headerHash, justification) - if err != nil { - return fmt.Errorf("setting justification for block number %d: %w", header.Number, err) - } - - return nil -} - // handleHeader handles blocks (header+body) included in BlockResponses func (cs *chainSync) handleBlock(block *types.Block, announceImportedBlock bool) error { parent, err := cs.blockState.GetHeader(block.Header.ParentHash) diff --git a/dot/sync/chain_sync_test.go b/dot/sync/chain_sync_test.go index 628e44d38d..4af6deac79 100644 --- a/dot/sync/chain_sync_test.go +++ b/dot/sync/chain_sync_test.go @@ -1851,6 +1851,7 @@ func TestChainSync_BootstrapSync_SuccessfulSync_WithInvalidJusticationBlock(t *t mockFinalityGadget.EXPECT(). VerifyBlockJustification( invalidJustificationBlock.Header.Hash(), + invalidJustificationBlock.Header.Number, *invalidJustification). Return(uint64(0), uint64(0), errVerifyBlockJustification) diff --git a/dot/sync/interfaces.go b/dot/sync/interfaces.go index 03820704a5..03a03cda8e 100644 --- a/dot/sync/interfaces.go +++ b/dot/sync/interfaces.go @@ -28,6 +28,7 @@ type BlockState interface { GetReceipt(common.Hash) ([]byte, error) GetMessageQueue(common.Hash) ([]byte, error) GetJustification(common.Hash) ([]byte, error) + SetFinalisedHash(hash common.Hash, round uint64, setID uint64) error SetJustification(hash common.Hash, data []byte) error GetHashByNumber(blockNumber uint) (common.Hash, error) GetBlockByHash(common.Hash) (*types.Block, error) @@ -61,7 +62,8 @@ type BabeVerifier interface { // FinalityGadget implements justification verification functionality type FinalityGadget interface { - VerifyBlockJustification(common.Hash, []byte) error + VerifyBlockJustification(finalizedHash common.Hash, finalizedNumber uint, encoded []byte) ( + round uint64, setID uint64, err error) } // BlockImportHandler is the interface for the handler of newly imported blocks diff --git a/dot/sync/mocks_test.go b/dot/sync/mocks_test.go index bb57e94a7d..7334207617 100644 --- a/dot/sync/mocks_test.go +++ b/dot/sync/mocks_test.go @@ -355,6 +355,20 @@ func (mr *MockBlockStateMockRecorder) RangeInMemory(arg0, arg1 any) *gomock.Call return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RangeInMemory", reflect.TypeOf((*MockBlockState)(nil).RangeInMemory), arg0, arg1) } +// SetFinalisedHash mocks base method. +func (m *MockBlockState) SetFinalisedHash(arg0 common.Hash, arg1, arg2 uint64) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "SetFinalisedHash", arg0, arg1, arg2) + ret0, _ := ret[0].(error) + return ret0 +} + +// SetFinalisedHash indicates an expected call of SetFinalisedHash. +func (mr *MockBlockStateMockRecorder) SetFinalisedHash(arg0, arg1, arg2 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetFinalisedHash", reflect.TypeOf((*MockBlockState)(nil).SetFinalisedHash), arg0, arg1, arg2) +} + // SetJustification mocks base method. func (m *MockBlockState) SetJustification(arg0 common.Hash, arg1 []byte) error { m.ctrl.T.Helper() @@ -539,17 +553,19 @@ func (m *MockFinalityGadget) EXPECT() *MockFinalityGadgetMockRecorder { } // VerifyBlockJustification mocks base method. -func (m *MockFinalityGadget) VerifyBlockJustification(arg0 common.Hash, arg1 []byte) error { +func (m *MockFinalityGadget) VerifyBlockJustification(arg0 common.Hash, arg1 uint, arg2 []byte) (uint64, uint64, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "VerifyBlockJustification", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 + ret := m.ctrl.Call(m, "VerifyBlockJustification", arg0, arg1, arg2) + ret0, _ := ret[0].(uint64) + ret1, _ := ret[1].(uint64) + ret2, _ := ret[2].(error) + return ret0, ret1, ret2 } // VerifyBlockJustification indicates an expected call of VerifyBlockJustification. -func (mr *MockFinalityGadgetMockRecorder) VerifyBlockJustification(arg0, arg1 any) *gomock.Call { +func (mr *MockFinalityGadgetMockRecorder) VerifyBlockJustification(arg0, arg1, arg2 any) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "VerifyBlockJustification", reflect.TypeOf((*MockFinalityGadget)(nil).VerifyBlockJustification), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "VerifyBlockJustification", reflect.TypeOf((*MockFinalityGadget)(nil).VerifyBlockJustification), arg0, arg1, arg2) } // MockBlockImportHandler is a mock of BlockImportHandler interface. diff --git a/dot/sync/syncer_integration_test.go b/dot/sync/syncer_integration_test.go index e578ae2b12..7361a5280e 100644 --- a/dot/sync/syncer_integration_test.go +++ b/dot/sync/syncer_integration_test.go @@ -113,9 +113,10 @@ func newTestSyncer(t *testing.T) *Service { cfg.LogLvl = log.Trace mockFinalityGadget := NewMockFinalityGadget(ctrl) mockFinalityGadget.EXPECT().VerifyBlockJustification(gomock.AssignableToTypeOf(common.Hash{}), - gomock.AssignableToTypeOf([]byte{})).DoAndReturn(func(hash common.Hash, justification []byte) error { - return nil - }).AnyTimes() + gomock.AssignableToTypeOf(uint(0)), gomock.AssignableToTypeOf([]byte{})). + DoAndReturn(func(hash common.Hash, justification []byte) error { + return nil + }).AnyTimes() cfg.FinalityGadget = mockFinalityGadget cfg.Network = NewMockNetwork(ctrl) diff --git a/go.mod b/go.mod index 7d59b785d4..f9228c261c 100644 --- a/go.mod +++ b/go.mod @@ -40,6 +40,7 @@ require ( github.com/stretchr/testify v1.9.0 github.com/tetratelabs/wazero v1.1.0 github.com/tidwall/btree v1.7.0 + github.com/tyler-smith/go-bip39 v1.1.0 go.uber.org/mock v0.4.0 golang.org/x/crypto v0.26.0 golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 diff --git a/go.sum b/go.sum index 1d94c1ebb9..03a698f26f 100644 --- a/go.sum +++ b/go.sum @@ -665,6 +665,8 @@ github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+F github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce h1:fb190+cK2Xz/dvi9Hv8eCYJYvIGUTN2/KLq1pT6CjEc= github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce/go.mod h1:o8v6yHRoik09Xen7gje4m9ERNah1d1PPsVq1VEx9vE4= +github.com/tyler-smith/go-bip39 v1.1.0 h1:5eUemwrMargf3BSLRRCalXT93Ns6pQJIjYQN2nyfOP8= +github.com/tyler-smith/go-bip39 v1.1.0/go.mod h1:gUYDtqQw1JS3ZJ8UWVcGTGqqr6YIN3CWg+kkNaLt55U= github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= github.com/urfave/cli v1.22.2/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= github.com/urfave/cli v1.22.10/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= diff --git a/internal/client/consensus/grandpa/authorities.go b/internal/client/consensus/grandpa/authorities.go new file mode 100644 index 0000000000..261042fe9a --- /dev/null +++ b/internal/client/consensus/grandpa/authorities.go @@ -0,0 +1,10 @@ +// Copyright 2023 ChainSafe Systems (ON) +// SPDX-License-Identifier: LGPL-3.0-only + +package grandpa + +// generic representation of hash and number tuple +type HashNumber[H, N any] struct { + Hash H + Number N +} diff --git a/internal/client/consensus/grandpa/justification.go b/internal/client/consensus/grandpa/justification.go new file mode 100644 index 0000000000..94fc50bb92 --- /dev/null +++ b/internal/client/consensus/grandpa/justification.go @@ -0,0 +1,302 @@ +// Copyright 2023 ChainSafe Systems (ON) +// SPDX-License-Identifier: LGPL-3.0-only + +package grandpa + +import ( + "errors" + "fmt" + "io" + "reflect" + + primitives "github.com/ChainSafe/gossamer/internal/primitives/consensus/grandpa" + "github.com/ChainSafe/gossamer/internal/primitives/runtime" + "github.com/ChainSafe/gossamer/internal/primitives/runtime/generic" + grandpa "github.com/ChainSafe/gossamer/pkg/finality-grandpa" + "github.com/ChainSafe/gossamer/pkg/scale" +) + +var ( + errInvalidAuthoritiesSet = errors.New("current state of blockchain has invalid authorities set") + errBadJustification = errors.New("bad justification for header") + errBlockNotDescendentOfBase = errors.New("block not descendent of base") +) + +// A GRANDPA justification for block finality, it includes a commit message and +// an ancestry proof including all headers routing all precommit target blocks +// to the commit target block. Due to the current voting strategy the precommit +// targets should be the same as the commit target, since honest voters don't +// vote past authority set change blocks. +// +// This is meant to be stored in the db and passed around the network to other +// nodes, and are used by syncing nodes to prove authority set handoffs. +type GrandpaJustification[Hash runtime.Hash, N runtime.Number] struct { + // The GRANDPA justification for block finality. + Justification primitives.GrandpaJustification[Hash, N] +} + +// Type used for decoding grandpa justifications (can pass in generic Header type) +type decodeGrandpaJustification[ + Hash runtime.Hash, + N runtime.Number, + Hasher runtime.Hasher[Hash], +] GrandpaJustification[Hash, N] + +func decodeJustification[ + Hash runtime.Hash, + N runtime.Number, + Hasher runtime.Hasher[Hash], +](encodedJustification []byte) (*GrandpaJustification[Hash, N], error) { + newJustificaiton := decodeGrandpaJustification[Hash, N, Hasher]{} + err := scale.Unmarshal(encodedJustification, &newJustificaiton) + if err != nil { + return nil, err + } + return newJustificaiton.GrandpaJustification(), nil +} + +func (dgj *decodeGrandpaJustification[H, N, Hasher]) UnmarshalSCALE(reader io.Reader) (err error) { + type roundCommitHeader struct { + Round uint64 + Commit primitives.Commit[H, N] + Headers []generic.Header[N, H, Hasher] + } + rch := roundCommitHeader{} + decoder := scale.NewDecoder(reader) + err = decoder.Decode(&rch) + if err != nil { + return + } + + dgj.Justification.Round = rch.Round + dgj.Justification.Commit = rch.Commit + dgj.Justification.VoteAncestries = make([]runtime.Header[N, H], len(rch.Headers)) + for i, header := range rch.Headers { + header := header + dgj.Justification.VoteAncestries[i] = &header + } + return +} + +func (dgj decodeGrandpaJustification[Hash, N, Hasher]) GrandpaJustification() *GrandpaJustification[Hash, N] { + return &GrandpaJustification[Hash, N]{ + Justification: primitives.GrandpaJustification[Hash, N]{ + Round: dgj.Justification.Round, + Commit: dgj.Justification.Commit, + VoteAncestries: dgj.Justification.VoteAncestries, + }, + } +} + +// DecodeGrandpaJustificationVerifyFinalizes will decode a GRANDPA justification and validate the commit and +// the votes' ancestry proofs finalize the given block. +func DecodeGrandpaJustificationVerifyFinalizes[ + Hash runtime.Hash, + N runtime.Number, + Hasher runtime.Hasher[Hash], +]( + encoded []byte, + finalizedTarget HashNumber[Hash, N], + setID uint64, + voters grandpa.VoterSet[string], +) (GrandpaJustification[Hash, N], error) { + justification, err := decodeJustification[Hash, N, Hasher](encoded) + if err != nil { + return GrandpaJustification[Hash, N]{}, fmt.Errorf("error decoding justification for header: %s", err) + } + + decodedTarget := HashNumber[Hash, N]{ + Hash: justification.Justification.Commit.TargetHash, + Number: justification.Justification.Commit.TargetNumber, + } + + if decodedTarget != finalizedTarget { + return GrandpaJustification[Hash, N]{}, fmt.Errorf("invalid commit target in grandpa justification") + } + + return *justification, justification.verifyWithVoterSet(setID, voters) +} + +// Verify will validate the commit and the votes' ancestry proofs. +func (j *GrandpaJustification[Hash, N]) Verify(setID uint64, authorities primitives.AuthorityList) error { + var weights []grandpa.IDWeight[string] + for _, authority := range authorities { + weight := grandpa.IDWeight[string]{ + ID: string(authority.AuthorityID.Bytes()), + Weight: uint64(authority.AuthorityWeight), + } + weights = append(weights, weight) + } + + voters := grandpa.NewVoterSet[string](weights) + if voters != nil { + err := j.verifyWithVoterSet(setID, *voters) + return err + } + return fmt.Errorf("%w", errInvalidAuthoritiesSet) +} + +// Validate the commit and the votes' ancestry proofs. +func (j *GrandpaJustification[Hash, N]) verifyWithVoterSet( + setID uint64, + voters grandpa.VoterSet[string], +) error { + ancestryChain := newAncestryChain[Hash, N](j.Justification.VoteAncestries) + signedPrecommits := make([]grandpa.SignedPrecommit[Hash, N, string, string], 0) + for _, pc := range j.Justification.Commit.Precommits { + signedPrecommits = append(signedPrecommits, grandpa.SignedPrecommit[Hash, N, string, string]{ + Precommit: pc.Precommit, + Signature: string(pc.Signature[:]), + ID: string(pc.ID.Bytes()), + }) + } + commitValidationResult, err := grandpa.ValidateCommit[Hash, N, string, string]( + grandpa.Commit[Hash, N, string, string]{ + TargetHash: j.Justification.Commit.TargetHash, + TargetNumber: j.Justification.Commit.TargetNumber, + Precommits: signedPrecommits, + }, + voters, + ancestryChain, + ) + if err != nil { + return fmt.Errorf("%w: invalid commit in grandpa justification", errBadJustification) + } + + if !commitValidationResult.Valid() { + return fmt.Errorf("%w: invalid commit in grandpa justification", errBadJustification) + } + + // we pick the precommit for the lowest block as the base that + // should serve as the root block for populating ancestry (i.e. + // collect all headers from all precommit blocks to the base) + precommits := j.Justification.Commit.Precommits + var minPrecommit *grandpa.SignedPrecommit[Hash, N, primitives.AuthoritySignature, primitives.AuthorityID] + if len(precommits) == 0 { + panic("can only fail if precommits is empty; commit has been validated above; " + + "valid commits must include precommits") + } + for _, precommit := range precommits { + currPrecommit := precommit + if minPrecommit == nil { + minPrecommit = &currPrecommit + } else if currPrecommit.Precommit.TargetNumber <= minPrecommit.Precommit.TargetNumber { + minPrecommit = &currPrecommit + } + } + + baseHash := minPrecommit.Precommit.TargetHash + visitedHashes := make(map[Hash]struct{}) + for _, signed := range precommits { + msg := grandpa.NewMessage(signed.Precommit) + isValidSignature := primitives.CheckMessageSignature[Hash, N]( + msg, + signed.ID, + signed.Signature, + primitives.RoundNumber(j.Justification.Round), + primitives.SetID(setID), + ) + + if !isValidSignature { + return fmt.Errorf("%w: invalid signature for precommit in grandpa justification", + errBadJustification) + } + + if baseHash == signed.Precommit.TargetHash { + continue + } + + route, err := ancestryChain.Ancestry(baseHash, signed.Precommit.TargetHash) + if err != nil { + return fmt.Errorf("%w: invalid precommit ancestry proof in grandpa justification", + errBadJustification) + } + + // ancestry starts from parent HashField but the precommit target HashField has been + // visited + visitedHashes[signed.Precommit.TargetHash] = struct{}{} + for _, hash := range route { + visitedHashes[hash] = struct{}{} + } + } + + ancestryHashes := make(map[Hash]struct{}) + for _, header := range j.Justification.VoteAncestries { + hash := header.Hash() + ancestryHashes[hash] = struct{}{} + } + + if len(visitedHashes) != len(ancestryHashes) { + return fmt.Errorf("%w: invalid precommit ancestries in grandpa justification with unused headers", + errBadJustification) + } + + // Check if maps are equal + if !reflect.DeepEqual(ancestryHashes, visitedHashes) { + return fmt.Errorf("%w: invalid precommit ancestries in grandpa justification with unused headers", + errBadJustification) + } + + return nil +} + +// Target is the target block NumberField and HashField that this justifications proves finality for +func (j *GrandpaJustification[Hash, N]) Target() HashNumber[Hash, N] { + return HashNumber[Hash, N]{ + Number: j.Justification.Commit.TargetNumber, + Hash: j.Justification.Commit.TargetHash, + } +} + +// ancestryChain a utility trait implementing `grandpa.Chain` using a given set of headers. +// This is useful when validating commits, using the given set of headers to +// verify a valid ancestry route to the target commit block. +type ancestryChain[Hash runtime.Hash, N runtime.Number] struct { + ancestry map[Hash]runtime.Header[N, Hash] +} + +func newAncestryChain[Hash runtime.Hash, N runtime.Number]( + headers []runtime.Header[N, Hash], +) ancestryChain[Hash, N] { + ancestry := make(map[Hash]runtime.Header[N, Hash]) + for _, header := range headers { + hash := header.Hash() + ancestry[hash] = header + } + return ancestryChain[Hash, N]{ + ancestry: ancestry, + } +} + +func (ac ancestryChain[Ordered, N]) Ancestry(base Ordered, block Ordered) ([]Ordered, error) { + route := make([]Ordered, 0) + currentHash := block + + for { + if currentHash == base { + break + } + + br, ok := ac.ancestry[currentHash] + if !ok { + return nil, fmt.Errorf("%w", errBlockNotDescendentOfBase) + } + block = br.ParentHash() + currentHash = block + route = append(route, currentHash) + } + + if len(route) != 0 { + route = route[:len(route)-1] + } + return route, nil +} + +func (ac ancestryChain[Ordered, N]) IsEqualOrDescendantOf(base Ordered, block Ordered) bool { + if base == block { + return true + } + + _, err := ac.Ancestry(base, block) + return err == nil +} diff --git a/internal/client/consensus/grandpa/justification_test.go b/internal/client/consensus/grandpa/justification_test.go new file mode 100644 index 0000000000..b5fbb84d75 --- /dev/null +++ b/internal/client/consensus/grandpa/justification_test.go @@ -0,0 +1,523 @@ +// Copyright 2023 ChainSafe Systems (ON) +// SPDX-License-Identifier: LGPL-3.0-only + +package grandpa + +import ( + "reflect" + "testing" + + primitives "github.com/ChainSafe/gossamer/internal/primitives/consensus/grandpa" + ced25519 "github.com/ChainSafe/gossamer/internal/primitives/core/ed25519" + "github.com/ChainSafe/gossamer/internal/primitives/core/hash" + "github.com/ChainSafe/gossamer/internal/primitives/keyring/ed25519" + "github.com/ChainSafe/gossamer/internal/primitives/runtime" + "github.com/ChainSafe/gossamer/internal/primitives/runtime/generic" + grandpa "github.com/ChainSafe/gossamer/pkg/finality-grandpa" + "github.com/ChainSafe/gossamer/pkg/scale" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func makePrecommit(t *testing.T, + targetHash string, + targetNumber uint64, + round uint64, //nolint:unparam + setID uint64, + voter ed25519.Keyring, +) grandpa.SignedPrecommit[hash.H256, uint64, primitives.AuthoritySignature, primitives.AuthorityID] { + t.Helper() + + precommit := grandpa.Precommit[hash.H256, uint64]{ + TargetHash: hash.H256(targetHash), + TargetNumber: targetNumber, + } + msg := grandpa.NewMessage(precommit) + encoded := primitives.NewLocalizedPayload(primitives.RoundNumber(round), primitives.SetID(setID), msg) + signature := voter.Sign(encoded) + + return grandpa.SignedPrecommit[hash.H256, uint64, primitives.AuthoritySignature, primitives.AuthorityID]{ + Precommit: grandpa.Precommit[hash.H256, uint64]{ + TargetHash: hash.H256(targetHash), + TargetNumber: targetNumber, + }, + Signature: signature, + ID: voter.Pair().Public().(ced25519.Public), + } +} + +func TestJustificationEncoding(t *testing.T) { + var hashA = "a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" //nolint:lll + var precommits []grandpa.SignedPrecommit[hash.H256, uint64, primitives.AuthoritySignature, primitives.AuthorityID] + precommit := makePrecommit(t, hashA, 1, 1, 1, ed25519.Alice) + precommits = append(precommits, precommit) + + expAncestries := make([]runtime.Header[uint64, hash.H256], 0) + expAncestries = append(expAncestries, generic.NewHeader[uint64, hash.H256, runtime.BlakeTwo256]( + 100, + hash.H256(""), + hash.H256(""), + hash.H256(hashA), + runtime.Digest{}), + ) + + expected := primitives.GrandpaJustification[hash.H256, uint64]{ + Round: 2, + Commit: primitives.Commit[hash.H256, uint64]{ + TargetHash: hash.H256( + "b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", //nolint:lll + ), + TargetNumber: 1, + Precommits: precommits, + }, + VoteAncestries: expAncestries, + } + + encodedJustification, err := scale.Marshal(expected) + require.NoError(t, err) + + justification, err := decodeJustification[hash.H256, uint64, runtime.BlakeTwo256](encodedJustification) + require.NoError(t, err) + require.Equal(t, expected, justification.Justification) +} + +func TestDecodeGrandpaJustificationVerifyFinalizes(t *testing.T) { + var a hash.H256 = "a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" //nolint:lll + + // Invalid Encoding + invalidEncoding := []byte{21} + _, err := DecodeGrandpaJustificationVerifyFinalizes[hash.H256, uint64, runtime.BlakeTwo256]( + invalidEncoding, + HashNumber[hash.H256, uint64]{}, + 2, + grandpa.VoterSet[string]{}) + require.Error(t, err) + + // Invalid target + justification := primitives.GrandpaJustification[hash.H256, uint64]{ + Commit: primitives.Commit[hash.H256, uint64]{ + TargetHash: a, + TargetNumber: 1, + }, + } + + encWrongTarget, err := scale.Marshal(justification) + require.NoError(t, err) + _, err = DecodeGrandpaJustificationVerifyFinalizes[hash.H256, uint64, runtime.BlakeTwo256]( + encWrongTarget, + HashNumber[hash.H256, uint64]{}, + 2, + grandpa.VoterSet[string]{}) + require.Error(t, err) + require.ErrorContains(t, err, "invalid commit target in grandpa justification") + + headerB := generic.NewHeader[uint64, hash.H256, runtime.BlakeTwo256]( + 2, + hash.H256(""), + hash.H256(""), + a, + runtime.Digest{}) + + hederList := []runtime.Header[uint64, hash.H256]{headerB} + + var precommits []grandpa.SignedPrecommit[hash.H256, uint64, primitives.AuthoritySignature, primitives.AuthorityID] + precommits = append(precommits, makePrecommit(t, string(a), 1, 1, 1, ed25519.Alice)) + precommits = append(precommits, makePrecommit(t, string(a), 1, 1, 1, ed25519.Bob)) + precommits = append(precommits, makePrecommit(t, string(headerB.Hash()), 2, 1, 1, ed25519.Charlie)) + + expectedJustification := primitives.GrandpaJustification[hash.H256, uint64]{ + Round: 1, + Commit: primitives.Commit[hash.H256, uint64]{ + TargetHash: a, + TargetNumber: 1, + Precommits: precommits, + }, + VoteAncestries: hederList, + } + + encodedJustification, err := scale.Marshal(expectedJustification) + require.NoError(t, err) + + target := HashNumber[hash.H256, uint64]{ + Hash: a, + Number: 1, + } + + idWeights := make([]grandpa.IDWeight[string], 0) + for i := 1; i <= 4; i++ { + var id ced25519.Public + switch i { + case 1: + id = ed25519.Alice.Pair().Public().(ced25519.Public) + case 2: + id = ed25519.Bob.Pair().Public().(ced25519.Public) + case 3: + id = ed25519.Charlie.Pair().Public().(ced25519.Public) + case 4: + id = ed25519.Ferdie.Pair().Public().(ced25519.Public) + } + idWeights = append(idWeights, grandpa.IDWeight[string]{ + ID: string(id[:]), Weight: 1, + }) + } + voters := grandpa.NewVoterSet(idWeights) + + newJustification, err := DecodeGrandpaJustificationVerifyFinalizes[hash.H256, uint64, runtime.BlakeTwo256]( + encodedJustification, + target, + 1, + *voters) + require.NoError(t, err) + require.Equal(t, expectedJustification, newJustification.Justification) +} + +func TestJustification_verify(t *testing.T) { + // Nil voter case + auths := make(primitives.AuthorityList, 0) + justification := GrandpaJustification[hash.H256, uint64]{} + err := justification.Verify(2, auths) + require.ErrorIs(t, err, errInvalidAuthoritiesSet) + + // happy path + for i := 1; i <= 4; i++ { + var id ced25519.Public + switch i { + case 1: + id = ed25519.Alice.Pair().Public().(ced25519.Public) + case 2: + id = ed25519.Bob.Pair().Public().(ced25519.Public) + case 3: + id = ed25519.Charlie.Pair().Public().(ced25519.Public) + case 4: + id = ed25519.Ferdie.Pair().Public().(ced25519.Public) + } + auths = append(auths, primitives.AuthorityIDWeight{ + AuthorityID: id, + AuthorityWeight: 1, + }) + } + + var a hash.H256 = "a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" //nolint:lll + headerB := generic.NewHeader[uint64, hash.H256, runtime.BlakeTwo256]( + 2, + hash.H256(""), + hash.H256(""), + a, + runtime.Digest{}) + + headerList := []runtime.Header[uint64, hash.H256]{headerB} + + var precommits []grandpa.SignedPrecommit[hash.H256, uint64, primitives.AuthoritySignature, primitives.AuthorityID] + precommits = append(precommits, makePrecommit(t, string(a), 1, 1, 2, ed25519.Alice)) + precommits = append(precommits, makePrecommit(t, string(a), 1, 1, 2, ed25519.Bob)) + precommits = append(precommits, makePrecommit(t, string(headerB.Hash()), 2, 1, 2, ed25519.Charlie)) + + validJustification := GrandpaJustification[hash.H256, uint64]{ + Justification: primitives.GrandpaJustification[hash.H256, uint64]{ + Round: 1, + Commit: primitives.Commit[hash.H256, uint64]{ + TargetHash: a, + TargetNumber: 1, + Precommits: precommits, + }, + VoteAncestries: headerList, + }, + } + + err = validJustification.Verify(2, auths) + require.NoError(t, err) +} + +func TestJustification_verifyWithVoterSet(t *testing.T) { + // 1) invalid commit + idWeights := make([]grandpa.IDWeight[string], 0) + for i := 1; i <= 4; i++ { + var id ced25519.Public + switch i { + case 1: + id = ed25519.Alice.Pair().Public().(ced25519.Public) + case 2: + id = ed25519.Bob.Pair().Public().(ced25519.Public) + case 3: + id = ed25519.Charlie.Pair().Public().(ced25519.Public) + case 4: + id = ed25519.Ferdie.Pair().Public().(ced25519.Public) + } + idWeights = append(idWeights, grandpa.IDWeight[string]{ + ID: string(id[:]), Weight: 1, + }) + } + voters := grandpa.NewVoterSet(idWeights) + + invalidJustification := GrandpaJustification[hash.H256, uint64]{ + primitives.GrandpaJustification[hash.H256, uint64]{ + Commit: primitives.Commit[hash.H256, uint64]{ + TargetHash: "B", + TargetNumber: 2, + }, + }, + } + + err := invalidJustification.verifyWithVoterSet(2, *voters) + require.ErrorIs(t, err, errBadJustification) + require.Equal(t, err.Error(), "bad justification for header: invalid commit in grandpa justification") + + // 2) visitedHashes != ancestryHashes + headerA := generic.NewHeader[uint64, hash.H256, runtime.BlakeTwo256]( + 1, + hash.H256(""), + hash.H256(""), + hash.H256(""), + runtime.Digest{}) + + headerB := generic.NewHeader[uint64, hash.H256, runtime.BlakeTwo256]( + 2, + hash.H256(""), + hash.H256(""), + headerA.Hash(), + runtime.Digest{}) + + headerList := []runtime.Header[uint64, hash.H256]{ + headerA, + headerB, + } + + var precommits []grandpa.SignedPrecommit[hash.H256, uint64, primitives.AuthoritySignature, primitives.AuthorityID] + precommits = append(precommits, makePrecommit(t, string(headerA.Hash()), 1, 1, 2, ed25519.Alice)) + precommits = append(precommits, makePrecommit(t, string(headerA.Hash()), 1, 1, 2, ed25519.Bob)) + precommits = append(precommits, makePrecommit(t, string(headerB.Hash()), 2, 1, 2, ed25519.Charlie)) + + validJustification := GrandpaJustification[hash.H256, uint64]{ + primitives.GrandpaJustification[hash.H256, uint64]{ + Commit: primitives.Commit[hash.H256, uint64]{ + TargetHash: headerA.Hash(), + TargetNumber: 1, + Precommits: precommits, + }, + VoteAncestries: headerList, + Round: 1, + }, + } + + err = validJustification.verifyWithVoterSet(2, *voters) + require.ErrorIs(t, err, errBadJustification) + require.Equal(t, err.Error(), "bad justification for header: "+ + "invalid precommit ancestries in grandpa justification with unused headers") + + // Valid case + headerList = []runtime.Header[uint64, hash.H256]{ + headerB, + } + + validJustification = GrandpaJustification[hash.H256, uint64]{ + primitives.GrandpaJustification[hash.H256, uint64]{ + Commit: primitives.Commit[hash.H256, uint64]{ + TargetHash: headerA.Hash(), + TargetNumber: 1, + Precommits: precommits, + }, + VoteAncestries: headerList, + Round: 1, + }, + } + + err = validJustification.verifyWithVoterSet(2, *voters) + require.NoError(t, err) +} + +func Test_newAncestryChain(t *testing.T) { + dummyHeader := generic.NewHeader[uint64, hash.H256, runtime.BlakeTwo256]( + 1, + hash.H256(""), + hash.H256(""), + hash.H256(""), + runtime.Digest{}) + + expAncestryMap := make(map[hash.H256]runtime.Header[uint64, hash.H256]) + expAncestryMap[dummyHeader.Hash()] = dummyHeader + type testCase struct { + name string + headers []runtime.Header[uint64, hash.H256] + want ancestryChain[hash.H256, uint64] + } + tests := []testCase{ + { + name: "noInputHeaders", + headers: []runtime.Header[uint64, hash.H256]{}, + want: ancestryChain[hash.H256, uint64]{ + ancestry: make(map[hash.H256]runtime.Header[uint64, hash.H256]), + }, + }, + { + name: "validInput", + headers: []runtime.Header[uint64, hash.H256]{ + dummyHeader, + }, + want: ancestryChain[hash.H256, uint64]{ + ancestry: expAncestryMap, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := newAncestryChain[hash.H256, uint64](tt.headers); !reflect.DeepEqual(got, tt.want) { + t.Errorf("newAncestryChain() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestAncestryChain_Ancestry(t *testing.T) { + headerA := generic.NewHeader[uint64, hash.H256, runtime.BlakeTwo256]( + 1, + hash.H256(""), + hash.H256(""), + hash.H256(""), + runtime.Digest{}) + + headerB := generic.NewHeader[uint64, hash.H256, runtime.BlakeTwo256]( + 2, + hash.H256(""), + hash.H256(""), + headerA.Hash(), + runtime.Digest{}) + + headerC := generic.NewHeader[uint64, hash.H256, runtime.BlakeTwo256]( + 3, + hash.H256(""), + hash.H256(""), + headerB.Hash(), + runtime.Digest{}) + + invalidParentHeader := generic.NewHeader[uint64, hash.H256, runtime.BlakeTwo256]( + 2, + hash.H256(""), + hash.H256(""), + hash.H256("invalid"), + runtime.Digest{}) + + headerList := []runtime.Header[uint64, hash.H256]{ + headerA, + headerB, + headerC, + } + invalidHeaderList := []runtime.Header[uint64, hash.H256]{ + invalidParentHeader, + } + validAncestryMap := newAncestryChain[hash.H256, uint64](headerList) + invalidAncestryMap := newAncestryChain[hash.H256, uint64](invalidHeaderList) + + type testCase struct { + name string + chain ancestryChain[hash.H256, uint64] + base hash.H256 + block hash.H256 + want []hash.H256 + expErr error + } + tests := []testCase{ + { + name: "baseEqualsBlock", + chain: validAncestryMap, + base: headerA.Hash(), + block: headerA.Hash(), + want: []hash.H256{}, + }, + { + name: "baseEqualsBlock", + chain: validAncestryMap, + base: headerA.Hash(), + block: "notDescendant", + expErr: errBlockNotDescendentOfBase, + }, + { + name: "invalidParentHashField", + chain: invalidAncestryMap, + base: headerA.Hash(), + block: "notDescendant", + expErr: errBlockNotDescendentOfBase, + }, + { + name: "validRoute", + chain: validAncestryMap, + base: headerA.Hash(), + block: headerC.Hash(), + want: []hash.H256{headerB.Hash()}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := tt.chain.Ancestry(tt.base, tt.block) + assert.ErrorIs(t, err, tt.expErr) + assert.Equal(t, tt.want, got) + }) + } +} + +func TestAncestryChain_IsEqualOrDescendantOf(t *testing.T) { + headerA := generic.NewHeader[uint64, hash.H256, runtime.BlakeTwo256]( + 1, + hash.H256(""), + hash.H256(""), + hash.H256(""), + runtime.Digest{}) + + headerB := generic.NewHeader[uint64, hash.H256, runtime.BlakeTwo256]( + 2, + hash.H256(""), + hash.H256(""), + headerA.Hash(), + runtime.Digest{}) + + headerC := generic.NewHeader[uint64, hash.H256, runtime.BlakeTwo256]( + 3, + hash.H256(""), + hash.H256(""), + headerB.Hash(), + runtime.Digest{}) + + headerList := []runtime.Header[uint64, hash.H256]{ + headerA, + headerB, + headerC, + } + + validAncestryMap := newAncestryChain[hash.H256, uint64](headerList) + + type testCase struct { + name string + chain ancestryChain[hash.H256, uint64] + base hash.H256 + block hash.H256 + want bool + } + tests := []testCase{ + { + name: "baseEqualsBlock", + chain: validAncestryMap, + base: headerA.Hash(), + block: headerA.Hash(), + want: true, + }, + { + name: "baseEqualsBlock", + chain: validAncestryMap, + base: headerA.Hash(), + block: "someInvalidBLock", + want: false, + }, + { + name: "validRoute", + chain: validAncestryMap, + base: headerA.Hash(), + block: headerC.Hash(), + want: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := tt.chain.IsEqualOrDescendantOf(tt.base, tt.block) + assert.Equal(t, tt.want, got) + }) + } +} diff --git a/internal/primitives/consensus/grandpa/app/app.go b/internal/primitives/consensus/grandpa/app/app.go new file mode 100644 index 0000000000..9ffa7079bc --- /dev/null +++ b/internal/primitives/consensus/grandpa/app/app.go @@ -0,0 +1,29 @@ +// Copyright 2024 ChainSafe Systems (ON) +// SPDX-License-Identifier: LGPL-3.0-only + +package app + +import ( + "fmt" + + "github.com/ChainSafe/gossamer/internal/primitives/core/crypto" + "github.com/ChainSafe/gossamer/internal/primitives/core/ed25519" +) + +// Public key used in grandpa +type Public = ed25519.Public + +var _ crypto.Public[Signature] = Public{} + +// NewPublic is constructor for Public +func NewPublic(data []byte) (Public, error) { + if len(data) != 32 { + return Public{}, fmt.Errorf("invalid public key from data: %v", data) + } + pub := Public{} + copy(pub[:], data) + return pub, nil +} + +// Signature is signature type used in grandpa +type Signature = ed25519.Signature diff --git a/internal/primitives/consensus/grandpa/grandpa.go b/internal/primitives/consensus/grandpa/grandpa.go new file mode 100644 index 0000000000..43bc7cd95f --- /dev/null +++ b/internal/primitives/consensus/grandpa/grandpa.go @@ -0,0 +1,98 @@ +// Copyright 2024 ChainSafe Systems (ON) +// SPDX-License-Identifier: LGPL-3.0-only + +package grandpa + +import ( + "github.com/ChainSafe/gossamer/internal/log" + "github.com/ChainSafe/gossamer/internal/primitives/consensus/grandpa/app" + "github.com/ChainSafe/gossamer/internal/primitives/runtime" + grandpa "github.com/ChainSafe/gossamer/pkg/finality-grandpa" + "github.com/ChainSafe/gossamer/pkg/scale" + "golang.org/x/exp/constraints" +) + +var logger = log.NewFromGlobal(log.AddContext("consensus", "grandpa")) + +// AuthorityID is the identity of a Grandpa authority. +type AuthorityID = app.Public + +// NewAuthorityID is constructor for AuthorityID +func NewAuthorityID(data []byte) (AuthorityID, error) { + return app.NewPublic(data) +} + +// AuthoritySignature is the signature for a Grandpa authority. +type AuthoritySignature = app.Signature + +// GrandpaEngineID is the ConsensusEngineID of GRANDPA. +var GrandpaEngineID = runtime.ConsensusEngineID{'F', 'R', 'N', 'K'} + +// AuthorityWeight is the weight of an authority. +type AuthorityWeight uint64 + +// AuthorityIndex is the index of an authority. +type AuthorityIndex uint64 + +// SetID is the monotonic identifier of a GRANDPA set of authorities. +type SetID uint64 + +// RoundNumber is the round indicator. +type RoundNumber uint64 + +// AuthorityIDWeight is struct containing AuthorityID and AuthorityWeight +type AuthorityIDWeight struct { + AuthorityID + AuthorityWeight +} + +// AuthorityList is a list of Grandpa authorities with associated weights. +type AuthorityList []AuthorityIDWeight + +// SignedMessage is a signed message. +type SignedMessage[H, N any] grandpa.SignedMessage[H, N, AuthoritySignature, AuthorityID] + +// Commit is a commit message for this chain's block type. +type Commit[H, N any] grandpa.Commit[H, N, AuthoritySignature, AuthorityID] + +// GrandpaJustification is A GRANDPA justification for block finality, it includes +// a commit message and an ancestry proof including all headers routing all +// precommit target blocks to the commit target block. Due to the current voting +// strategy the precommit targets should be the same as the commit target, since +// honest voters don't vote past authority set change blocks. +// +// This is meant to be stored in the db and passed around the network to other +// nodes, and are used by syncing nodes to prove authority set handoffs. +type GrandpaJustification[Ordered runtime.Hash, N runtime.Number] struct { + Round uint64 + Commit Commit[Ordered, N] + VoteAncestries []runtime.Header[N, Ordered] +} + +// CheckMessageSignature will check a message signature by encoding the message as +// a localised payload and verifying the provided signature using the expected +// authority id. +func CheckMessageSignature[H comparable, N constraints.Unsigned]( + message grandpa.Message[H, N], + id AuthorityID, + signature AuthoritySignature, + round RoundNumber, + setID SetID) bool { + + buf := NewLocalizedPayload(round, setID, message) + valid := id.Verify(signature, buf) + + if !valid { + logger.Debugf("Bad signature on message from %v", id) + } + return valid +} + +// LocalizedPayload will encode round message localised to a given round and set id. +func NewLocalizedPayload(round RoundNumber, setID SetID, message any) []byte { + return scale.MustMarshal(struct { + Message any + RoundNumber + SetID + }{message, round, setID}) +} diff --git a/internal/primitives/consensus/grandpa/grandpa_test.go b/internal/primitives/consensus/grandpa/grandpa_test.go new file mode 100644 index 0000000000..8717972bfe --- /dev/null +++ b/internal/primitives/consensus/grandpa/grandpa_test.go @@ -0,0 +1,65 @@ +// Copyright 2024 ChainSafe Systems (ON) +// SPDX-License-Identifier: LGPL-3.0-only + +package grandpa + +import ( + "testing" + + ced25519 "github.com/ChainSafe/gossamer/internal/primitives/core/ed25519" + "github.com/ChainSafe/gossamer/internal/primitives/core/hash" + "github.com/ChainSafe/gossamer/internal/primitives/keyring/ed25519" + grandpa "github.com/ChainSafe/gossamer/pkg/finality-grandpa" + "github.com/stretchr/testify/require" +) + +func makePrecommit(t *testing.T, + precommit grandpa.Precommit[hash.H256, uint64], + round uint64, + setID uint64, + voter ed25519.Keyring, +) grandpa.SignedPrecommit[hash.H256, uint64, AuthoritySignature, AuthorityID] { + t.Helper() + msg := grandpa.NewMessage(precommit) + encoded := NewLocalizedPayload(RoundNumber(round), SetID(setID), msg) + signature := voter.Sign(encoded) + + return grandpa.SignedPrecommit[hash.H256, uint64, AuthoritySignature, AuthorityID]{ + Precommit: precommit, + Signature: signature, + ID: voter.Pair().Public().(ced25519.Public), + } +} + +func TestCheckMessageSignature(t *testing.T) { + precommit := grandpa.Precommit[hash.H256, uint64]{ + TargetHash: hash.H256("a"), + TargetNumber: 1, + } + signedPrecommit := makePrecommit(t, precommit, 1, 1, ed25519.Alice) + valid := CheckMessageSignature[hash.H256, uint64]( + grandpa.NewMessage(precommit), signedPrecommit.ID, signedPrecommit.Signature, 1, 1) + require.True(t, valid) + valid = CheckMessageSignature[hash.H256, uint64]( + grandpa.NewMessage(precommit), signedPrecommit.ID, signedPrecommit.Signature, 2, 1) + require.False(t, valid) + + signedPrecommit = makePrecommit(t, precommit, 2, 1, ed25519.Alice) + valid = CheckMessageSignature[hash.H256, uint64]( + grandpa.NewMessage(precommit), signedPrecommit.ID, signedPrecommit.Signature, 2, 1) + require.True(t, valid) + valid = CheckMessageSignature[hash.H256, uint64]( + grandpa.NewMessage(precommit), signedPrecommit.ID, signedPrecommit.Signature, 1, 1) + require.False(t, valid) + + signedPrecommit = makePrecommit(t, precommit, 3, 3, ed25519.Bob) + valid = CheckMessageSignature[hash.H256, uint64]( + grandpa.NewMessage(precommit), signedPrecommit.ID, signedPrecommit.Signature, 3, 3) + require.True(t, valid) + valid = CheckMessageSignature[hash.H256, uint64]( + grandpa.NewMessage(precommit), ed25519.Bob.Pair().Public().(ced25519.Public), signedPrecommit.Signature, 3, 3) + require.True(t, valid) + valid = CheckMessageSignature[hash.H256, uint64]( + grandpa.NewMessage(precommit), ed25519.Alice.Pair().Public().(ced25519.Public), signedPrecommit.Signature, 3, 3) + require.False(t, valid) +} diff --git a/internal/primitives/core/crypto/crypto.go b/internal/primitives/core/crypto/crypto.go new file mode 100644 index 0000000000..bcdd7427e8 --- /dev/null +++ b/internal/primitives/core/crypto/crypto.go @@ -0,0 +1,214 @@ +// Copyright 2024 ChainSafe Systems (ON) +// SPDX-License-Identifier: LGPL-3.0-only + +package crypto + +import ( + "fmt" + "regexp" + "strconv" + "strings" + + "github.com/ChainSafe/gossamer/internal/primitives/core/hashing" + "github.com/ChainSafe/gossamer/pkg/scale" +) + +// DevPhrase is the root phrase for our publicly known keys. +const DevPhrase = "bottom drive obey lake curtain smoke basket hold race lonely fit walk" + +// DeriveJunction is a since derivation junction description. It is the single parameter +// used when creating a new secret key from an existing secret key and, in the case of +// `SoftRaw` and `SoftIndex` a new public key from an existing public key. +type DeriveJunction struct { + inner any +} +type DeriveJunctions interface { + DeriveJunctionSoft | DeriveJunctionHard +} + +func (dj DeriveJunction) Value() any { + if dj.inner == nil { + panic("nil inner for DeriveJunction") + } + return dj.inner +} + +// DeriveJunctionSoft is a soft (vanilla) derivation. Public keys have a correspondent derivation. +type DeriveJunctionSoft [32]byte + +// DeriveJunctionHard is a hard ("hardened") derivation. Public keys do not have a correspondent derivation. +type DeriveJunctionHard [32]byte + +// Harden will consume self to return a hard derive junction with the same chain code. +func (dj *DeriveJunction) Harden() DeriveJunction { + switch inner := dj.inner.(type) { + case DeriveJunctionSoft: + dj.inner = DeriveJunctionHard(inner) + } + return *dj +} + +// NewDeriveJunctionSoft creates a new soft (vanilla) DeriveJunction from a given, encodable, value. +func NewDeriveJunctionSoft(index any) (DeriveJunctionSoft, error) { + var cc = [32]byte{} + data, err := scale.Marshal(index) + if err != nil { + return DeriveJunctionSoft{}, err + } + + if len(data) > 32 { + cc = hashing.BlakeTwo256(data) + } else { + copy(cc[:], data) + } + return DeriveJunctionSoft(cc), nil +} + +// NewDeriveJunctionFromString is constructor of DeriveJunction from string representation. +func NewDeriveJunctionFromString(j string) DeriveJunction { + hard := false + trimmed := strings.TrimPrefix(j, "/") + if trimmed != j { + hard = true + } + code := trimmed + + var res DeriveJunction + n, err := strconv.Atoi(code) + if err == nil { + soft, err := NewDeriveJunctionSoft(n) + if err != nil { + panic(err) + } + res = DeriveJunction{ + inner: soft, + } + } else { + soft, err := NewDeriveJunctionSoft(code) + if err != nil { + panic(err) + } + res = DeriveJunction{ + inner: soft, + } + } + + if hard { + return res.Harden() + } else { + return res + } +} + +// NewDeriveJunction is constructor for DeriveJunction +func NewDeriveJunction[V DeriveJunctions](value V) DeriveJunction { + return DeriveJunction{ + inner: value, + } +} + +var secretPhraseRegex = regexp.MustCompile(`^(?P[\d\w ]+)?(?P(//?[^/]+)*)(///(?P.*))?$`) + +var junctionRegex = regexp.MustCompile(`/(/?[^/]+)`) + +// Trait used for types that are really just a fixed-length array. +type Bytes interface { + // Return a `Vec` filled with raw data. + Bytes() []byte +} + +// Trait suitable for typical cryptographic key public type. +type Public[Signature any] interface { + Bytes + + // Verify a signature on a message. Returns true if the signature is good. + Verify(sig Signature, message []byte) bool +} + +// SecretURI A secret uri (`SURI`) that can be used to generate a key pair. +// +// The `SURI` can be parsed from a string. The string is interpreted in the following way: +// +// - If `string` is a possibly `0x` prefixed 64-digit hex string, then it will be interpreted +// directly as a secret key (aka "seed" in `subkey`). +// - If `string` is a valid BIP-39 key phrase of 12, 15, 18, 21 or 24 words, then the key will +// be derived from it. In this case: +// - the phrase may be followed by one or more items delimited by `/` characters. +// - the path may be followed by `///`, in which case everything after the `///` is treated +// +// as a password. +// - If `string` begins with a `/` character it is prefixed with the public `DevPhrase` +// and interpreted as above. +// +// In this case they are interpreted as HDKD junctions; purely numeric items are interpreted as +// integers, non-numeric items as strings. Junctions prefixed with `/` are interpreted as soft +// junctions, and with `//` as hard junctions. +// +// There is no correspondence mapping between `SURI` strings and the keys they represent. +// Two different non-identical strings can actually lead to the same secret being derived. +// Notably, integer junction indices may be legally prefixed with arbitrary number of zeros. +// Similarly an empty password (ending the `SURI` with `///`) is perfectly valid and will +// generally be equivalent to no password at all. +type SecretURI struct { + // The phrase to derive the private key. + // This can either be a 64-bit hex string or a BIP-39 key phrase. + Phrase string + // Optional password as given as part of the uri. + Password *string + // The junctions as part of the uri. + Junctions []DeriveJunction +} + +// NewSecretURI is contructor for SecretURI +func NewSecretURI(s string) (SecretURI, error) { + matches := secretPhraseRegex.FindStringSubmatch(s) + if matches == nil { + return SecretURI{}, fmt.Errorf("invalid format") + } + + var ( + junctions []DeriveJunction + phrase = DevPhrase + password *string + ) + for i, name := range secretPhraseRegex.SubexpNames() { + if i == 0 { + continue + } + switch name { + case "path": + junctionMatches := junctionRegex.FindAllString(matches[i], -1) + for _, jm := range junctionMatches { + junctions = append(junctions, NewDeriveJunctionFromString(jm)) + } + case "phrase": + if matches[i] != "" { + phrase = matches[i] + } + case "password": + if matches[i] != "" { + pw := matches[i] + password = &pw + } + } + } + return SecretURI{ + Phrase: phrase, + Password: password, + Junctions: junctions, + }, nil +} + +// Pair is an interface suitable for typical cryptographic PKI key pair type. +// +// For now it just specifies how to create a key from a phrase and derivation path. +type Pair[Seed, Signature any] interface { + // Derive a child key from a series of given junctions. + Derive(path []DeriveJunction, seed *Seed) (Pair[Seed, Signature], Seed, error) + + // Sign a message. + Sign(message []byte) Signature + + // Get the public key. + Public() Public[Signature] +} diff --git a/internal/primitives/core/ed25519/ed25519.go b/internal/primitives/core/ed25519/ed25519.go new file mode 100644 index 0000000000..d7db6c21c7 --- /dev/null +++ b/internal/primitives/core/ed25519/ed25519.go @@ -0,0 +1,275 @@ +// Copyright 2024 ChainSafe Systems (ON) +// SPDX-License-Identifier: LGPL-3.0-only + +package ed25519 + +import ( + gocrypto "crypto" + "crypto/ed25519" + "crypto/rand" + "encoding/hex" + "fmt" + "strings" + + "github.com/ChainSafe/go-schnorrkel" + "github.com/ChainSafe/gossamer/internal/primitives/core/crypto" + "github.com/ChainSafe/gossamer/internal/primitives/core/hashing" + "github.com/ChainSafe/gossamer/pkg/scale" + "github.com/tyler-smith/go-bip39" +) + +// A secret seed. +type seed [32]byte + +// A Public key. +type Public [32]byte + +// Bytes returns a byte slice +func (p Public) Bytes() []byte { + return p[:] +} + +// Verify a signature on a message. Returns true if the signature is good. +func (p Public) Verify(sig Signature, message []byte) bool { + return ed25519.Verify(p[:], message, sig[:]) +} + +// NewPublic creates a new instance from the given 32-byte `data`. +// +// NOTE: No checking goes on to ensure this is a real public key. Only use it if +// you are certain that the array actually is a pubkey. +func NewPublic(data [32]byte) Public { + return Public(data) +} + +var _ crypto.Public[Signature] = Public{} + +// Derive a single hard junction. +func deriveHardJunction(secretSeed seed, cc [32]byte) seed { + tuple := struct { + ID string + SecretSeed seed + CC [32]byte + }{"Ed25519HDKD", secretSeed, cc} + encoded := scale.MustMarshal(tuple) + return hashing.BlakeTwo256(encoded) +} + +// Pair is a key pair. +type Pair struct { + public gocrypto.PublicKey + secret ed25519.PrivateKey +} + +// Derive a child key from a series of given junctions. +func (p Pair) Derive(path []crypto.DeriveJunction, seed *[32]byte) (crypto.Pair[[32]byte, Signature], [32]byte, error) { + var acc [32]byte + copy(acc[:], p.secret.Seed()) + for _, j := range path { + switch cc := j.Value().(type) { + case crypto.DeriveJunctionSoft: + return Pair{}, [32]byte{}, fmt.Errorf("soft key in path") + case crypto.DeriveJunctionHard: + acc = deriveHardJunction(acc, cc) + } + } + pair := NewPairFromSeed(acc) + return pair, acc, nil +} + +// Seed is the seed for this key. +func (p Pair) Seed() [32]byte { + var seed [32]byte + copy(seed[:], p.secret.Seed()) + return seed +} + +// Public will return the public key. +func (p Pair) Public() crypto.Public[Signature] { + pubKey, ok := p.public.(ed25519.PublicKey) + if !ok { + panic("huh?") + } + if len(pubKey) != 32 { + panic("huh?") + } + var pub Public + copy(pub[:], pubKey) + return pub +} + +// Sign a message. +func (p Pair) Sign(message []byte) Signature { + signed := ed25519.Sign(p.secret, message) + if len(signed) != 64 { + panic("huh?") + } + var sig Signature + copy(sig[:], signed) + return sig +} + +// NewGeneratedPair will generate new secure (random) key pair. +// +// This is only for ephemeral keys really, since you won't have access to the secret key +// for storage. If you want a persistent key pair, use `generate_with_phrase` instead. +func NewGeneratedPair() (Pair, [32]byte) { + seedSlice := make([]byte, 32) + _, err := rand.Read(seedSlice) + if err != nil { + panic(err) + } + + var seed [32]byte + copy(seed[:], seedSlice) + return NewPairFromSeed(seed), seed +} + +// NewGeneratedPairWithPhrase will generate new secure (random) key pair and provide the recovery phrase. +// +// You can recover the same key later with `from_phrase`. +// +// This is generally slower than `generate()`, so prefer that unless you need to persist +// the key from the current session. +func NewGeneratedPairWithPhrase(password *string) (Pair, string, [32]byte) { + entropy, err := bip39.NewEntropy(128) + if err != nil { + panic(err) + } + phrase, err := bip39.NewMnemonic(entropy) + if err != nil { + panic(err) + } + pair, seed, err := NewPairFromPhrase(phrase, password) + if err != nil { + panic(err) + } + return pair, phrase, seed +} + +// NewPairFromPhrase returns the KeyPair from the English BIP39 seed `phrase`, or `None` if it's invalid. +func NewPairFromPhrase(phrase string, password *string) (pair Pair, seed [32]byte, err error) { + pass := "" + if password != nil { + pass = *password + } + bigSeed, err := schnorrkel.SeedFromMnemonic(phrase, pass) + if err != nil { + return Pair{}, [32]byte{}, err + } + + if !(32 <= len(bigSeed)) { + panic("huh?") + } + + seedSlice := bigSeed[:][0:32] + copy(seed[:], seedSlice) + return NewPairFromSeedSlice(seedSlice), seed, nil +} + +// NewPairFromSeed will generate new key pair from the provided `seed`. +// +// @WARNING: THIS WILL ONLY BE SECURE IF THE `seed` IS SECURE. If it can be guessed +// by an attacker then they can also derive your key. +func NewPairFromSeed(seed [32]byte) Pair { + return NewPairFromSeedSlice(seed[:]) +} + +// NewPairFromSeedSlice will make a new key pair from secret seed material. The slice must be the correct size or +// it will return `None`. +// +// @WARNING: THIS WILL ONLY BE SECURE IF THE `seed` IS SECURE. If it can be guessed +// by an attacker then they can also derive your key. +func NewPairFromSeedSlice(seedSlice []byte) Pair { + secret := ed25519.NewKeyFromSeed(seedSlice) + public := secret.Public() + return Pair{ + public: public, + secret: secret, + } +} + +// NewPairFromStringWithSeed interprets the string `s` in order to generate a key Pair. Returns +// both the pair and an optional seed, in the case that the pair can be expressed as a direct +// derivation from a seed (some cases, such as Sr25519 derivations with path components, cannot). +// +// This takes a helper function to do the key generation from a phrase, password and +// junction iterator. +// +// - If `s` is a possibly `0x` prefixed 64-digit hex string, then it will be interpreted +// directly as a secret key (aka "seed" in `subkey`). +// - If `s` is a valid BIP-39 key phrase of 12, 15, 18, 21 or 24 words, then the key will +// be derived from it. In this case: +// - the phrase may be followed by one or more items delimited by `/` characters. +// - the path may be followed by `///`, in which case everything after the `///` is treated +// +// as a password. +// - If `s` begins with a `/` character it is prefixed with the Substrate public `DevPhrase` +// and +// +// interpreted as above. +// +// In this case they are interpreted as HDKD junctions; purely numeric items are interpreted as +// integers, non-numeric items as strings. Junctions prefixed with `/` are interpreted as soft +// junctions, and with `//` as hard junctions. +// +// There is no correspondence mapping between SURI strings and the keys they represent. +// Two different non-identical strings can actually lead to the same secret being derived. +// Notably, integer junction indices may be legally prefixed with arbitrary number of zeros. +// Similarly an empty password (ending the SURI with `///`) is perfectly valid and will +// generally be equivalent to no password at all. +// +// `nil` is returned if no matches are found. +func NewPairFromStringWithSeed(s string, passwordOverride *string) ( + pair crypto.Pair[[32]byte, Signature], seed [32]byte, err error, +) { + sURI, err := crypto.NewSecretURI(s) + if err != nil { + return Pair{}, [32]byte{}, err + } + var password *string + if passwordOverride != nil { + password = passwordOverride + } else { + password = sURI.Password + } + + var ( + root Pair + // seed []byte + ) + trimmedPhrase := strings.TrimPrefix(sURI.Phrase, "0x") + if trimmedPhrase != sURI.Phrase { + seedBytes, err := hex.DecodeString(trimmedPhrase) + if err != nil { + return Pair{}, [32]byte{}, err + } + root = NewPairFromSeedSlice(seedBytes) + copy(seed[:], seedBytes) + } else { + root, seed, err = NewPairFromPhrase(sURI.Phrase, password) + if err != nil { + return Pair{}, [32]byte{}, err + } + } + return root.Derive(sURI.Junctions, &seed) +} + +// NewPairFromString interprets the string `s` in order to generate a key pair. +func NewPairFromString(s string, passwordOverride *string) (crypto.Pair[[32]byte, Signature], error) { + pair, _, err := NewPairFromStringWithSeed(s, passwordOverride) + return pair, err +} + +var _ crypto.Pair[[32]byte, Signature] = Pair{} + +// Signature is a signature (a 512-bit value). +type Signature [64]byte + +// NewSignatureFromRaw constructors a new instance from the given 64-byte `data`. +// +// NOTE: No checking goes on to ensure this is a real signature. Only use it if +// you are certain that the array actually is a signature. +func NewSignatureFromRaw(data [64]byte) Signature { + return Signature(data) +} diff --git a/internal/primitives/core/ed25519/ed25519_test.go b/internal/primitives/core/ed25519/ed25519_test.go new file mode 100644 index 0000000000..124a00828a --- /dev/null +++ b/internal/primitives/core/ed25519/ed25519_test.go @@ -0,0 +1,144 @@ +// Copyright 2024 ChainSafe Systems (ON) +// SPDX-License-Identifier: LGPL-3.0-only + +package ed25519_test + +import ( + "encoding/hex" + "fmt" + "testing" + + "github.com/ChainSafe/gossamer/internal/primitives/core/crypto" + "github.com/ChainSafe/gossamer/internal/primitives/core/ed25519" + "github.com/stretchr/testify/require" +) + +func mustHexDecodeString32(t *testing.T, s string) [32]byte { + t.Helper() + seedSlice, err := hex.DecodeString(s) + require.NoError(t, err) + + var seed [32]byte + copy(seed[:], seedSlice) + return seed +} +func mustHexDecodeString64(t *testing.T, s string) [64]byte { + t.Helper() + seedSlice, err := hex.DecodeString(s) + require.NoError(t, err) + + var seed [64]byte + copy(seed[:], seedSlice) + return seed +} + +var password string = "password" + +func TestDefaultPhraseShouldBeUsed(t *testing.T) { + pair, err := ed25519.NewPairFromString("//Alice///password", nil) + require.NoError(t, err) + + pair1, err := ed25519.NewPairFromString( + fmt.Sprintf("%s//Alice", crypto.DevPhrase), &password, + ) + require.NoError(t, err) + + require.Equal(t, pair, pair1) +} + +func TestNewPairFromString_DifferentAliases(t *testing.T) { + pair, err := ed25519.NewPairFromString("//Alice///password", nil) + require.NoError(t, err) + + pair1, err := ed25519.NewPairFromString("//Bob///password", nil) + require.NoError(t, err) + + require.NotEqual(t, pair, pair1) +} + +func TestSeedAndDeriveShouldWork(t *testing.T) { + seed := mustHexDecodeString32(t, "9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60") + pair := ed25519.NewPairFromSeed(seed) + require.Equal(t, pair.Seed(), seed) + + path := []crypto.DeriveJunction{crypto.NewDeriveJunction(crypto.DeriveJunctionHard{})} + derived, _, err := pair.Derive(path, nil) + require.NoError(t, err) + + expected := mustHexDecodeString32(t, "ede3354e133f9c8e337ddd6ee5415ed4b4ffe5fc7d21e933f4930a3730e5b21c") + require.Equal(t, expected, derived.(ed25519.Pair).Seed()) +} + +func TestVectorShouldWork(t *testing.T) { + seed := mustHexDecodeString32(t, "9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60") + expected := mustHexDecodeString32(t, "d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a") + + pair := ed25519.NewPairFromSeed(seed) + public := pair.Public() + require.Equal(t, public, ed25519.NewPublic(expected)) + + signature := mustHexDecodeString64(t, + "e5564300c360ac729086e2cc806e828a84877f1eb8e5d974d873e065224901555fb8821590a33bacc61e39701cf9b46bd25bf5f0595bbe24655141438e7a100b") //nolint: lll + message := []byte("") + require.Equal(t, ed25519.NewSignatureFromRaw(signature), pair.Sign(message)) + require.True(t, public.Verify(signature, message)) +} + +func TestVectorByStringShouldWork(t *testing.T) { + pair, err := ed25519.NewPairFromString("0x9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60", nil) + require.NoError(t, err) + public := pair.Public() + require.Equal(t, ed25519.NewPublic( + mustHexDecodeString32(t, "d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a"), + ), public) + + signature := mustHexDecodeString64(t, + "e5564300c360ac729086e2cc806e828a84877f1eb8e5d974d873e065224901555fb8821590a33bacc61e39701cf9b46bd25bf5f0595bbe24655141438e7a100b") //nolint: lll + message := []byte("") + require.Equal(t, ed25519.NewSignatureFromRaw(signature), pair.Sign(message)) + require.True(t, public.Verify(signature, message)) +} + +func TestGeneratedPairShouldWork(t *testing.T) { + pair, _ := ed25519.NewGeneratedPair() + public := pair.Public() + message := []byte("Something important") + signature := pair.Sign(message) + require.True(t, public.Verify(signature, message)) + require.False(t, public.Verify(signature, []byte("Something else"))) +} + +func TestSeededPairShouldWork(t *testing.T) { + pair := ed25519.NewPairFromSeedSlice([]byte("12345678901234567890123456789012")) + public := pair.Public() + require.Equal(t, public, ed25519.NewPublic( + mustHexDecodeString32(t, "2f8c6129d816cf51c374bc7f08c3e63ed156cf78aefb4a6550d97b87997977ee"), + )) + message := mustHexDecodeString32(t, "2f8c6129d816cf51c374bc7f08c3e63ed156cf78aefb4a6550d97b87997977ee") + signature := pair.Sign(message[:]) + require.True(t, public.Verify(signature, message[:])) + require.False(t, public.Verify(signature, []byte("Other Message"))) +} + +func TestGenerateWithPhraseRecoveryPossible(t *testing.T) { + pair1, phrase, _ := ed25519.NewGeneratedPairWithPhrase(nil) + pair2, _, err := ed25519.NewPairFromPhrase(phrase, nil) + require.NoError(t, err) + require.Equal(t, pair1.Public(), pair2.Public()) +} + +func TestGenerateWithPasswordPhraseRecoverPossible(t *testing.T) { + password := "password" + pair1, phrase, _ := ed25519.NewGeneratedPairWithPhrase(&password) + pair2, _, err := ed25519.NewPairFromPhrase(phrase, &password) + require.NoError(t, err) + require.Equal(t, pair1.Public(), pair2.Public()) +} + +func TestPasswordDoesSomething(t *testing.T) { + password := "password" + pair1, phrase, _ := ed25519.NewGeneratedPairWithPhrase(&password) + pair2, _, err := ed25519.NewPairFromPhrase(phrase, nil) + require.NoError(t, err) + require.NotEqual(t, pair1.Public(), pair2.Public()) +} diff --git a/internal/primitives/core/hash/hash.go b/internal/primitives/core/hash/hash.go new file mode 100644 index 0000000000..e9235736f0 --- /dev/null +++ b/internal/primitives/core/hash/hash.go @@ -0,0 +1,65 @@ +// Copyright 2024 ChainSafe Systems (ON) +// SPDX-License-Identifier: LGPL-3.0-only + +package hash + +import ( + "crypto/rand" + "encoding/binary" + "fmt" + "io" + + "github.com/ChainSafe/gossamer/pkg/scale" +) + +// H256 is a fixed-size uninterpreted hash type with 32 bytes (256 bits) size. +type H256 string + +// Bytes returns a byte slice +func (h256 H256) Bytes() []byte { + return []byte(h256) +} + +// String returns string representation of H256 +func (h256 H256) String() string { + return fmt.Sprintf("%v", h256.Bytes()) +} + +// MarshalSCALE fulfils the SCALE interface for encoding +func (h256 H256) MarshalSCALE() ([]byte, error) { + var arr [32]byte + copy(arr[:], []byte(h256)) + return scale.Marshal(arr) +} + +// UnmarshalSCALE fulfils the SCALE interface for decoding +func (h256 *H256) UnmarshalSCALE(r io.Reader) error { + var arr [32]byte + decoder := scale.NewDecoder(r) + err := decoder.Decode(&arr) + if err != nil { + return err + } + if arr != [32]byte{} { + *h256 = H256(arr[:]) + } + return nil +} + +// NewH256FromLowUint64BigEndian is constructor for H256 from a uint64 +func NewH256FromLowUint64BigEndian(v uint64) H256 { + b := make([]byte, 8) + binary.BigEndian.PutUint64(b, v) + full := append(b, make([]byte, 24)...) + return H256(full) +} + +// NewRandomH256 is constructor for a random H256 +func NewRandomH256() H256 { + token := make([]byte, 32) + _, err := rand.Read(token) + if err != nil { + panic(err) + } + return H256(token) +} diff --git a/internal/primitives/core/hashing/hashing.go b/internal/primitives/core/hashing/hashing.go new file mode 100644 index 0000000000..6cb31194e9 --- /dev/null +++ b/internal/primitives/core/hashing/hashing.go @@ -0,0 +1,24 @@ +// Copyright 2024 ChainSafe Systems (ON) +// SPDX-License-Identifier: LGPL-3.0-only + +package hashing + +import ( + "golang.org/x/crypto/blake2b" +) + +// BlakeTwo256 returns a Blake2 256-bit hash of the input data +func BlakeTwo256(data []byte) [32]byte { + h, err := blake2b.New256(nil) + if err != nil { + panic(err) + } + _, err = h.Write(data) + if err != nil { + panic(err) + } + encoded := h.Sum(nil) + var arr [32]byte + copy(arr[:], encoded) + return arr +} diff --git a/internal/primitives/keyring/ed25519/ed25519.go b/internal/primitives/keyring/ed25519/ed25519.go new file mode 100644 index 0000000000..556cfe66ec --- /dev/null +++ b/internal/primitives/keyring/ed25519/ed25519.go @@ -0,0 +1,58 @@ +// Copyright 2024 ChainSafe Systems (ON) +// SPDX-License-Identifier: LGPL-3.0-only + +package ed25519 + +import ( + "fmt" + + "github.com/ChainSafe/gossamer/internal/primitives/core/ed25519" +) + +type Keyring uint + +const ( + Alice Keyring = iota + Bob + Charlie + Dave + Eve + Ferdie + One + Two +) + +func (k Keyring) Sign(msg []byte) ed25519.Signature { + return k.Pair().Sign(msg) +} + +func (k Keyring) Pair() ed25519.Pair { + pair, err := ed25519.NewPairFromString(fmt.Sprintf("//%s", k), nil) + if err != nil { + panic("static values are known good; qed") + } + return pair.(ed25519.Pair) +} + +func (k Keyring) String() string { + switch k { + case Alice: + return "Alice" + case Bob: + return "Bob" + case Charlie: + return "Charlie" + case Dave: + return "Dave" + case Eve: + return "Eve" + case Ferdie: + return "Ferdie" + case One: + return "One" + case Two: + return "Two" + default: + panic("unsupported Keyring") + } +} diff --git a/internal/primitives/runtime/digest.go b/internal/primitives/runtime/digest.go new file mode 100644 index 0000000000..7ed6358c60 --- /dev/null +++ b/internal/primitives/runtime/digest.go @@ -0,0 +1,80 @@ +// Copyright 2024 ChainSafe Systems (ON) +// SPDX-License-Identifier: LGPL-3.0-only + +package runtime + +// Digest item that is able to encode/decode 'system' digest items and +// provide opaque access to other items. +type DigestItemTypes interface { + PreRuntime | Consensus | Seal | Other | RuntimeEnvironmentUpdated +} + +// Digest item that is able to encode/decode 'system' digest items and +// provide opaque access to other items. +// TODO: implement this as scale.VaryingDataType +type DigestItem any + +// NewDigestItem is constructor for DigestItem +func NewDigestItem[T DigestItemTypes](item T) DigestItem { + return NewDigestItem(item) +} + +// A pre-runtime digest. +// +// These are messages from the consensus engine to the runtime, although +// the consensus engine can (and should) read them itself to avoid +// code and state duplication. It is erroneous for a runtime to produce +// these, but this is not (yet) checked. +// +// NOTE: the runtime is not allowed to panic or fail in an `on_initialize` +// call if an expected `PreRuntime` digest is not present. It is the +// responsibility of a external block verifier to check this. Runtime API calls +// will initialize the block without pre-runtime digests, so initialization +// cannot fail when they are missing. +type PreRuntime struct { + ConsensusEngineID + Bytes []byte +} + +// A message from the runtime to the consensus engine. This should *never* +// be generated by the native code of any consensus engine, but this is not +// checked (yet). +type Consensus struct { + ConsensusEngineID + Bytes []byte +} + +// Put a Seal on it. This is only used by native code, and is never seen +// by runtimes. +type Seal struct { + ConsensusEngineID + Bytes []byte +} + +// Some other thing. Unsupported and experimental. +type Other []byte + +// An indication for the light clients that the runtime execution +// environment is updated. +type RuntimeEnvironmentUpdated struct{} + +// Digest is a header digest. +type Digest struct { + // A list of logs in the digest. + Logs []DigestItem +} + +// Push new digest item. +func (d *Digest) Push(item DigestItem) { + d.Logs = append(d.Logs, item) +} + +// Pop a digest item. +func (d *Digest) Pop() DigestItem { + if len(d.Logs) == 0 { + return nil + } + item := d.Logs[len(d.Logs)-1] + d.Logs = d.Logs[:len(d.Logs)-1] + return item +} diff --git a/internal/primitives/runtime/generic/block.go b/internal/primitives/runtime/generic/block.go new file mode 100644 index 0000000000..a08acf42b6 --- /dev/null +++ b/internal/primitives/runtime/generic/block.go @@ -0,0 +1,72 @@ +// Copyright 2024 ChainSafe Systems (ON) +// SPDX-License-Identifier: LGPL-3.0-only + +package generic + +import ( + "github.com/ChainSafe/gossamer/internal/primitives/core/hash" + "github.com/ChainSafe/gossamer/internal/primitives/runtime" +) + +// Something to identify a block. +type BlockID any + +// BlockIDTypes is the interface constraint of `BlockID`. +type BlockIDTypes[H, N any] interface { + BlockIDHash[H] | BlockIDNumber[N] +} + +// NewBlockID is the constructor for `BlockID`. +func NewBlockID[H, N any, T BlockIDTypes[H, N]](blockID T) BlockID { + return BlockID(blockID) +} + +// BlockIDHash is id by block header hash. +type BlockIDHash[H any] struct { + Inner H +} + +// BlockIDNumber is id by block number. +type BlockIDNumber[N any] struct { + Inner N +} + +// Block is a block. +type Block[N runtime.Number, H runtime.Hash, Hasher runtime.Hasher[H]] struct { + // The block header. + header runtime.Header[N, H] + // The accompanying extrinsics. + extrinsics []runtime.Extrinsic +} + +// Header returns the header. +func (b Block[N, H, Hasher]) Header() runtime.Header[N, H] { + return b.header +} + +// Extrinsics returns the block extrinsics. +func (b Block[N, H, Hasher]) Extrinsics() []runtime.Extrinsic { + return b.extrinsics +} + +// Deconstruct returns both header and extrinsics. +func (b Block[N, H, Hasher]) Deconstruct() (header runtime.Header[N, H], extrinsics []runtime.Extrinsic) { + return b.Header(), b.Extrinsics() +} + +// Hash returns the block hash. +func (b Block[N, H, Hasher]) Hash() H { + hasher := *new(Hasher) + return hasher.HashEncoded(b.header) +} + +// NewBlock is the constructor for `Block`. +func NewBlock[N runtime.Number, H runtime.Hash, Hasher runtime.Hasher[H]]( + header runtime.Header[N, H], extrinsics []runtime.Extrinsic) Block[N, H, Hasher] { + return Block[N, H, Hasher]{ + header: header, + extrinsics: extrinsics, + } +} + +var _ runtime.Block[uint, hash.H256] = Block[uint, hash.H256, runtime.BlakeTwo256]{} diff --git a/internal/primitives/runtime/generic/header.go b/internal/primitives/runtime/generic/header.go new file mode 100644 index 0000000000..ef10c50ab4 --- /dev/null +++ b/internal/primitives/runtime/generic/header.go @@ -0,0 +1,133 @@ +// Copyright 2024 ChainSafe Systems (ON) +// SPDX-License-Identifier: LGPL-3.0-only + +package generic + +import ( + "io" + + "github.com/ChainSafe/gossamer/internal/primitives/core/hash" + "github.com/ChainSafe/gossamer/internal/primitives/runtime" + "github.com/ChainSafe/gossamer/pkg/scale" +) + +// Header is a block header, and implements a compatible encoding to `sp_runtime::generic::Header` +type Header[N runtime.Number, H runtime.Hash, Hasher runtime.Hasher[H]] struct { + // The parent hash. + parentHash H + // The block number. + number N + // The state trie merkle root + stateRoot H + // The merkle root of the extrinsics. + extrinsicsRoot H + // A chain-specific digest of data useful for light clients or referencing auxiliary data. + digest runtime.Digest +} + +// Number returns the block number. +func (h Header[N, H, Hasher]) Number() N { + return h.number +} + +// SetNumber sets the block number. +func (h *Header[N, H, Hasher]) SetNumber(number N) { + h.number = number +} + +// ExtrinsicsRoot returns the extrinsics root. +func (h Header[N, H, Hasher]) ExtrinsicsRoot() H { + return h.extrinsicsRoot +} + +// SetExtrinsicsRoot sets the extrinsics root. +func (h *Header[N, H, Hasher]) SetExtrinsicsRoot(root H) { + h.extrinsicsRoot = root +} + +// StateRoot returns the state root. +func (h Header[N, H, Hasher]) StateRoot() H { + return h.stateRoot +} + +// SetStateRoot sets the state root. +func (h *Header[N, H, Hasher]) SetStateRoot(root H) { + h.stateRoot = root +} + +// ParentHash returns the parent hash. +func (h Header[N, H, Hasher]) ParentHash() H { + return h.parentHash +} + +// SetParentHash sets the parent hash. +func (h *Header[N, H, Hasher]) SetParentHash(hash H) { + h.parentHash = hash +} + +// Digest returns the digest. +func (h Header[N, H, Hasher]) Digest() runtime.Digest { + return h.digest +} + +// DigestMut returns a mutable reference to the stored digest. +func (h Header[N, H, Hasher]) DigestMut() *runtime.Digest { + return &h.digest +} + +type encodingHelper[H any] struct { + ParentHash H + // uses compact encoding so we need to cast to uint + // https://github.com/paritytech/substrate/blob/e374a33fe1d99d59eb24a08981090bdb4503e81b/primitives/runtime/src/generic/header.rs#L47 + Number uint + StateRoot H + ExtrinsicsRoot H + Digest runtime.Digest +} + +// MarshalSCALE implements custom SCALE encoding. +func (h Header[N, H, Hasher]) MarshalSCALE() ([]byte, error) { + help := encodingHelper[H]{h.parentHash, uint(h.number), h.stateRoot, h.extrinsicsRoot, h.digest} + return scale.Marshal(help) +} + +// UnmarshalSCALE implements custom SCALE decoding. +func (h *Header[N, H, Hasher]) UnmarshalSCALE(r io.Reader) error { + var header encodingHelper[H] + decoder := scale.NewDecoder(r) + err := decoder.Decode(&header) + if err != nil { + return err + } + h.parentHash = header.ParentHash + h.number = N(header.Number) + h.stateRoot = header.StateRoot + h.extrinsicsRoot = header.ExtrinsicsRoot + h.digest = header.Digest + return nil +} + +// Hash returns the hash of the header. +func (h Header[N, H, Hasher]) Hash() H { + hasher := *new(Hasher) + return hasher.HashEncoded(h) +} + +// NewHeader is the constructor for `Header` +func NewHeader[N runtime.Number, H runtime.Hash, Hasher runtime.Hasher[H]]( + number N, + extrinsicsRoot H, + stateRoot H, + parentHash H, + digest runtime.Digest, +) *Header[N, H, Hasher] { + return &Header[N, H, Hasher]{ + number: number, + extrinsicsRoot: extrinsicsRoot, + stateRoot: stateRoot, + parentHash: parentHash, + digest: digest, + } +} + +var _ runtime.Header[uint64, hash.H256] = &Header[uint64, hash.H256, runtime.BlakeTwo256]{} diff --git a/internal/primitives/runtime/interfaces.go b/internal/primitives/runtime/interfaces.go new file mode 100644 index 0000000000..c239aae471 --- /dev/null +++ b/internal/primitives/runtime/interfaces.go @@ -0,0 +1,105 @@ +// Copyright 2024 ChainSafe Systems (ON) +// SPDX-License-Identifier: LGPL-3.0-only + +package runtime + +import ( + "github.com/ChainSafe/gossamer/internal/primitives/core/hash" + "github.com/ChainSafe/gossamer/internal/primitives/core/hashing" + "github.com/ChainSafe/gossamer/pkg/scale" + "golang.org/x/exp/constraints" +) + +// Number is the header number type +type Number interface { + ~uint | ~uint32 | ~uint64 +} + +// Hash type +type Hash interface { + constraints.Ordered + // Bytes returns a byte slice representation of Hash + Bytes() []byte + // String returns a unique string representation of the hash + String() string +} + +// Hasher is an interface around hashing +type Hasher[H Hash] interface { + // Produce the hash of some byte-slice. + Hash(s []byte) H + + // Produce the hash of some codec-encodable value. + HashEncoded(s any) H +} + +// Blake2-256 Hash implementation. +type BlakeTwo256 struct{} + +// Produce the hash of some byte-slice. +func (bt256 BlakeTwo256) Hash(s []byte) hash.H256 { + h := hashing.BlakeTwo256(s) + return hash.H256(h[:]) +} + +// Produce the hash of some codec-encodable value. +func (bt256 BlakeTwo256) HashEncoded(s any) hash.H256 { + bytes := scale.MustMarshal(s) + return bt256.Hash(bytes) +} + +var _ Hasher[hash.H256] = BlakeTwo256{} + +// Header is the interface for a header. It has types for a `Number`, +// and `Hash`. It provides access to an `ExtrinsicsRoot`, `StateRoot` and +// `ParentHash`, as well as a `Digest` and a block `Number`. +type Header[N Number, H Hash] interface { + // Returns a reference to the header number. + Number() N + // Sets the header number. + SetNumber(number N) + + // Returns a reference to the extrinsics root. + ExtrinsicsRoot() H + // Sets the extrinsic root. + SetExtrinsicsRoot(root H) + + // Returns a reference to the state root. + StateRoot() H + // Sets the state root. + SetStateRoot(root H) + + // Returns a reference to the parent hash. + ParentHash() H + // Sets the parent hash. + SetParentHash(hash H) + + // Returns a reference to the digest. + Digest() Digest + // Get a mutable reference to the digest. + DigestMut() *Digest + + // Returns the hash of the header. + Hash() H +} + +// Block represents a block. It has types for `Extrinsic` pieces of information as well as a `Header`. +// +// You can iterate over each of the `Extrinsics` and retrieve the `Header`. +type Block[N Number, H Hash] interface { + // Returns a reference to the header. + Header() Header[N, H] + // Returns a reference to the list of extrinsics. + Extrinsics() []Extrinsic + // Split the block into header and list of extrinsics. + Deconstruct() (header Header[N, H], extrinsics []Extrinsic) + // Returns the hash of the block. + Hash() H +} + +// Extrinisic is the interface for an `Extrinsic`. +type Extrinsic interface { + // Is this `Extrinsic` signed? + // If no information are available about signed/unsigned, `nil` should be returned. + IsSigned() *bool +} diff --git a/internal/primitives/runtime/runtime.go b/internal/primitives/runtime/runtime.go new file mode 100644 index 0000000000..71b3668935 --- /dev/null +++ b/internal/primitives/runtime/runtime.go @@ -0,0 +1,7 @@ +// Copyright 2024 ChainSafe Systems (ON) +// SPDX-License-Identifier: LGPL-3.0-only + +package runtime + +// Consensus engine unique ID. +type ConsensusEngineID [4]byte diff --git a/lib/grandpa/errors.go b/lib/grandpa/errors.go index 97d21d0bc6..801e46d03f 100644 --- a/lib/grandpa/errors.go +++ b/lib/grandpa/errors.go @@ -83,9 +83,6 @@ var ( // ErrAuthorityNotInSet is returned when a precommit within a justification is signed by a key not in the authority set ErrAuthorityNotInSet = errors.New("authority is not in set") - // errFinalisedBlocksMismatch is returned when we find another block finalised in the same set id and round - errFinalisedBlocksMismatch = errors.New("already have finalised block with the same setID and round") - errVoteToSignatureMismatch = errors.New("votes and authority count mismatch") errVoteBlockMismatch = errors.New("block in vote is not descendant of previously finalised block") errVoteFromSelf = errors.New("got vote from ourselves") diff --git a/lib/grandpa/message_handler.go b/lib/grandpa/message_handler.go index 7bb5e2b1c9..e2dbba21aa 100644 --- a/lib/grandpa/message_handler.go +++ b/lib/grandpa/message_handler.go @@ -9,12 +9,15 @@ import ( "fmt" "github.com/ChainSafe/gossamer/dot/network" - "github.com/ChainSafe/gossamer/dot/types" "github.com/ChainSafe/gossamer/internal/database" + "github.com/ChainSafe/gossamer/internal/primitives/core/hash" + "github.com/ChainSafe/gossamer/internal/primitives/runtime" "github.com/ChainSafe/gossamer/lib/blocktree" "github.com/ChainSafe/gossamer/lib/common" "github.com/ChainSafe/gossamer/lib/crypto/ed25519" - "github.com/ChainSafe/gossamer/pkg/scale" + + client_grandpa "github.com/ChainSafe/gossamer/internal/client/consensus/grandpa" + finality_grandpa "github.com/ChainSafe/gossamer/pkg/finality-grandpa" "github.com/libp2p/go-libp2p/core/peer" ) @@ -401,144 +404,42 @@ func (h *MessageHandler) verifyPreCommitJustification(msg *CatchUpResponse) erro // VerifyBlockJustification verifies the finality justification for a block, returns scale encoded justification with // any extra bytes removed. -func (s *Service) VerifyBlockJustification(hash common.Hash, justification []byte) error { - fj := Justification{} - err := scale.Unmarshal(justification, &fj) - if err != nil { - return err - } - - if hash != fj.Commit.Hash { - return fmt.Errorf("%w: justification %s and block hash %s", - ErrJustificationMismatch, fj.Commit.Hash.Short(), hash.Short()) - } - - setID, err := s.grandpaState.GetSetIDByBlockNumber(uint(fj.Commit.Number)) - if err != nil { - return fmt.Errorf("cannot get set ID from block number: %w", err) - } - - has, err := s.blockState.HasFinalisedBlock(fj.Round, setID) +func (s *Service) VerifyBlockJustification(finalizedHash common.Hash, finalizedNumber uint, encoded []byte) ( + round uint64, setID uint64, err error, +) { + setID, err = s.grandpaState.GetSetIDByBlockNumber(finalizedNumber) if err != nil { - return fmt.Errorf("checking if round and set id has finalised block: %w", err) - } - - if has { - storedFinalisedHash, err := s.blockState.GetFinalisedHash(fj.Round, setID) - if err != nil { - return fmt.Errorf("getting finalised hash: %w", err) - } - if storedFinalisedHash != hash { - return fmt.Errorf("%w, setID=%d and round=%d", errFinalisedBlocksMismatch, setID, fj.Round) - } - - return nil - } - - isDescendant, err := isDescendantOfHighestFinalisedBlock(s.blockState, fj.Commit.Hash) - if err != nil { - return fmt.Errorf("checking if descendant of highest block: %w", err) - } - - if !isDescendant { - return errVoteBlockMismatch + return 0, 0, fmt.Errorf("cannot get set ID from block number: %w", err) } auths, err := s.grandpaState.GetAuthorities(setID) if err != nil { - return fmt.Errorf("cannot get authorities for set ID: %w", err) + return 0, 0, fmt.Errorf("cannot get authorities for set ID: %w", err) } - // threshold is two-thirds the number of authorities, - // uses the current set of authorities to define the threshold - threshold := (2 * len(auths) / 3) - - if len(fj.Commit.Precommits) < threshold { - return ErrMinVotesNotMet - } - - authPubKeys := make([]AuthData, len(fj.Commit.Precommits)) - for i, pcj := range fj.Commit.Precommits { - authPubKeys[i] = AuthData{AuthorityID: pcj.AuthorityID} - } - - equivocatoryVoters := getEquivocatoryVoters(authPubKeys) - - var count int - - logger.Debugf( - "verifying justification: set id %d, round %d, hash %s, number %d, sig count %d", - setID, fj.Round, fj.Commit.Hash, fj.Commit.Number, len(fj.Commit.Precommits)) - - for _, just := range fj.Commit.Precommits { - // check if vote was for descendant of committed block - isDescendant, err := s.blockState.IsDescendantOf(hash, just.Vote.Hash) - if err != nil { - return err - } - - if !isDescendant { - return ErrPrecommitBlockMismatch - } - - publicKey, err := ed25519.NewPublicKey(just.AuthorityID[:]) - if err != nil { - return err - } - - if !isInAuthSet(publicKey, auths) { - return ErrAuthorityNotInSet - } + logger.Debugf("verifying justification within set id %d and authorities %d", setID, len(auths)) - // verify signature for each precommit - msg, err := scale.Marshal(FullVote{ - Stage: precommit, - Vote: just.Vote, - Round: fj.Round, - SetID: setID, - }) - if err != nil { - return err + idsAndWeights := make([]finality_grandpa.IDWeight[string], len(auths)) + for idx, auth := range auths { + idsAndWeights[idx] = finality_grandpa.IDWeight[string]{ + ID: string(auth.Key.Encode()), + Weight: 1, } - - ok, err := publicKey.Verify(msg, just.Signature[:]) - if err != nil { - return err - } - - if !ok { - return ErrInvalidSignature - } - - if _, ok := equivocatoryVoters[just.AuthorityID]; ok { - continue - } - - count++ } - if count+len(equivocatoryVoters) < threshold { - return ErrMinVotesNotMet + voters := finality_grandpa.NewVoterSet(idsAndWeights) + target := client_grandpa.HashNumber[hash.H256, uint32]{ + Hash: hash.H256(finalizedHash.ToBytes()), + Number: uint32(finalizedNumber), } - err = verifyBlockHashAgainstBlockNumber(s.blockState, fj.Commit.Hash, uint(fj.Commit.Number)) + justification, err := client_grandpa.DecodeGrandpaJustificationVerifyFinalizes[hash.H256, uint32, runtime.BlakeTwo256]( + encoded, target, setID, *voters) if err != nil { - return fmt.Errorf("verifying block hash against block number: %w", err) + return 0, 0, fmt.Errorf("decoding and verifying justification: %w", err) } - for _, preCommit := range fj.Commit.Precommits { - err := verifyBlockHashAgainstBlockNumber(s.blockState, preCommit.Vote.Hash, uint(preCommit.Vote.Number)) - if err != nil { - return fmt.Errorf("verifying block hash against block number: %w", err) - } - } - - err = s.blockState.SetFinalisedHash(hash, fj.Round, setID) - if err != nil { - return fmt.Errorf("setting finalised hash: %w", err) - } - - return nil + return justification.Justification.Round, setID, nil } func verifyBlockHashAgainstBlockNumber(bs BlockState, hash common.Hash, number uint) error { @@ -553,13 +454,3 @@ func verifyBlockHashAgainstBlockNumber(bs BlockState, hash common.Hash, number u } return nil } - -func isInAuthSet(auth *ed25519.PublicKey, set []types.GrandpaVoter) bool { - for _, a := range set { - if bytes.Equal(a.Key.Encode(), auth.Encode()) { - return true - } - } - - return false -} diff --git a/lib/grandpa/message_handler_integration_test.go b/lib/grandpa/message_handler_integration_test.go index 89a43c184b..907f080de1 100644 --- a/lib/grandpa/message_handler_integration_test.go +++ b/lib/grandpa/message_handler_integration_test.go @@ -6,7 +6,6 @@ package grandpa import ( - "errors" "fmt" "testing" "time" @@ -712,299 +711,6 @@ func TestMessageHandler_HandleCatchUpResponse(t *testing.T) { require.Equal(t, round+1, gs.state.round) } -func TestMessageHandler_VerifyBlockJustification_WithEquivocatoryVotes(t *testing.T) { - kr, err := keystore.NewEd25519Keyring() - require.NoError(t, err) - aliceKeyPair := kr.Alice().(*ed25519.Keypair) - - auths := []types.GrandpaVoter{ - { - Key: *kr.Alice().Public().(*ed25519.PublicKey), - }, - { - Key: *kr.Bob().Public().(*ed25519.PublicKey), - }, - { - Key: *kr.Charlie().Public().(*ed25519.PublicKey), - }, - { - Key: *kr.Dave().Public().(*ed25519.PublicKey), - }, - { - Key: *kr.Eve().Public().(*ed25519.PublicKey), - }, - { - Key: *kr.Ferdie().Public().(*ed25519.PublicKey), - }, - { - Key: *kr.George().Public().(*ed25519.PublicKey), - }, - { - Key: *kr.Heather().Public().(*ed25519.PublicKey), - }, - { - Key: *kr.Ian().Public().(*ed25519.PublicKey), - }, - } - - gs, st := newTestService(t, aliceKeyPair) - err = st.Grandpa.SetNextChange(auths, 0) - require.NoError(t, err) - - body, err := types.NewBodyFromBytes([]byte{0}) - require.NoError(t, err) - - block := &types.Block{ - Header: *testHeader, - Body: *body, - } - - err = st.Block.AddBlock(block) - require.NoError(t, err) - - setID, err := st.Grandpa.IncrementSetID() - require.NoError(t, err) - require.Equal(t, uint64(1), setID) - - round := uint64(1) - number := uint32(1) - precommits := buildTestJustification(t, 18, round, setID, kr, precommit) - just := newJustification(round, testHash, number, precommits) - data, err := scale.Marshal(*just) - require.NoError(t, err) - err = gs.VerifyBlockJustification(testHash, data) - require.NoError(t, err) -} - -func TestMessageHandler_VerifyBlockJustification(t *testing.T) { - - kr, err := keystore.NewEd25519Keyring() - require.NoError(t, err) - aliceKeyPair := kr.Alice().(*ed25519.Keypair) - - auths := []types.GrandpaVoter{ - { - Key: *kr.Alice().Public().(*ed25519.PublicKey), - }, - { - Key: *kr.Bob().Public().(*ed25519.PublicKey), - }, - { - Key: *kr.Charlie().Public().(*ed25519.PublicKey), - }, - } - - gs, st := newTestService(t, aliceKeyPair) - err = st.Grandpa.SetNextChange(auths, 0) - require.NoError(t, err) - - body, err := types.NewBodyFromBytes([]byte{0}) - require.NoError(t, err) - - block := &types.Block{ - Header: *testHeader, - Body: *body, - } - - err = st.Block.AddBlock(block) - require.NoError(t, err) - - digest2 := types.NewDigest() - prd2, _ := types.NewBabeSecondaryPlainPreDigest(0, 2).ToPreRuntimeDigest() - digest2.Add(*prd2) - - testHeader2 := types.Header{ - ParentHash: testGenesisHeader.Hash(), - Number: 1, - Digest: digest2, - } - - block2 := &types.Block{ - Header: testHeader2, - Body: *body, - } - - err = st.Block.AddBlock(block2) - require.NoError(t, err) - - err = st.Block.SetHeader(&testHeader2) - require.NoError(t, err) - - setID, err := st.Grandpa.IncrementSetID() - require.NoError(t, err) - require.Equal(t, uint64(1), setID) - - round := uint64(1) - number := uint32(1) - precommits := buildTestJustification(t, 2, round, setID, kr, precommit) - just := newJustification(round, testHash, number, precommits) - data, err := scale.Marshal(*just) - require.NoError(t, err) - err = gs.VerifyBlockJustification(testHash, data) - require.NoError(t, err) - - // use wrong hash, shouldn't verify - precommits = buildTestJustification(t, 2, round+1, setID, kr, precommit) - just = newJustification(round+1, testHash, number, precommits) - just.Commit.Precommits[0].Vote.Hash = testHeader2.Hash() - data, err = scale.Marshal(*just) - require.NoError(t, err) - err = gs.VerifyBlockJustification(testHash, data) - require.Equal(t, ErrPrecommitBlockMismatch, err) -} - -func TestMessageHandler_VerifyBlockJustification_invalid(t *testing.T) { - kr, err := keystore.NewEd25519Keyring() - require.NoError(t, err) - aliceKeyPair := kr.Alice().(*ed25519.Keypair) - - auths := []types.GrandpaVoter{ - { - Key: *kr.Alice().Public().(*ed25519.PublicKey), - }, - { - Key: *kr.Bob().Public().(*ed25519.PublicKey), - }, - { - Key: *kr.Charlie().Public().(*ed25519.PublicKey), - }, - } - - gs, st := newTestService(t, aliceKeyPair) - err = st.Grandpa.SetNextChange(auths, 1) - require.NoError(t, err) - - body, err := types.NewBodyFromBytes([]byte{0}) - require.NoError(t, err) - - block := &types.Block{ - Header: *testHeader, - Body: *body, - } - - err = st.Block.AddBlock(block) - require.NoError(t, err) - - setID, err := st.Grandpa.IncrementSetID() - require.NoError(t, err) - require.Equal(t, uint64(1), setID) - - genhash := st.Block.GenesisHash() - round := uint64(2) - number := uint32(2) - - // use wrong hash, shouldn't verify - precommits := buildTestJustification(t, 2, round+1, setID, kr, precommit) - just := newJustification(round+1, testHash, number, precommits) - just.Commit.Precommits[0].Vote.Hash = genhash - data, err := scale.Marshal(*just) - require.NoError(t, err) - err = gs.VerifyBlockJustification(testHash, data) - require.Equal(t, ErrPrecommitBlockMismatch, err) - - // use wrong round, shouldn't verify - precommits = buildTestJustification(t, 2, round+1, setID, kr, precommit) - just = newJustification(round+2, testHash, number, precommits) - data, err = scale.Marshal(*just) - require.NoError(t, err) - err = gs.VerifyBlockJustification(testHash, data) - require.Equal(t, ErrInvalidSignature, err) - - // add authority not in set, shouldn't verify - precommits = buildTestJustification(t, len(auths)+1, round+1, setID, kr, precommit) - just = newJustification(round+1, testHash, number, precommits) - data, err = scale.Marshal(*just) - require.NoError(t, err) - err = gs.VerifyBlockJustification(testHash, data) - require.Equal(t, ErrAuthorityNotInSet, err) - - // not enough signatures, shouldn't verify - precommits = buildTestJustification(t, 1, round+1, setID, kr, precommit) - just = newJustification(round+1, testHash, number, precommits) - data, err = scale.Marshal(*just) - require.NoError(t, err) - err = gs.VerifyBlockJustification(testHash, data) - require.Equal(t, ErrMinVotesNotMet, err) - - // mismatch justification header and block header - precommits = buildTestJustification(t, 1, round+1, setID, kr, precommit) - just = newJustification(round+1, testHash, number, precommits) - data, err = scale.Marshal(*just) - require.NoError(t, err) - otherHeader := types.NewEmptyHeader() - err = gs.VerifyBlockJustification(otherHeader.Hash(), data) - require.ErrorIs(t, err, ErrJustificationMismatch) - - expectedErr := fmt.Sprintf("%s: justification %s and block hash %s", ErrJustificationMismatch, - testHash.Short(), otherHeader.Hash().Short()) - assert.ErrorIs(t, err, ErrJustificationMismatch) - require.EqualError(t, err, expectedErr) -} - -func TestMessageHandler_VerifyBlockJustification_ErrFinalisedBlockMismatch(t *testing.T) { - t.Parallel() - - kr, err := keystore.NewEd25519Keyring() - require.NoError(t, err) - aliceKeyPair := kr.Alice().(*ed25519.Keypair) - - auths := []types.GrandpaVoter{ - { - Key: *kr.Alice().Public().(*ed25519.PublicKey), - }, - { - Key: *kr.Bob().Public().(*ed25519.PublicKey), - }, - { - Key: *kr.Charlie().Public().(*ed25519.PublicKey), - }, - } - - gs, st := newTestService(t, aliceKeyPair) - err = st.Grandpa.SetNextChange(auths, 1) - require.NoError(t, err) - - body, err := types.NewBodyFromBytes([]byte{0}) - require.NoError(t, err) - - block := &types.Block{ - Header: *testHeader, - Body: *body, - } - - err = st.Block.AddBlock(block) - require.NoError(t, err) - - setID := uint64(0) - round := uint64(1) - number := uint32(1) - - err = st.Block.SetFinalisedHash(block.Header.Hash(), round, setID) - require.NoError(t, err) - - var testHeader2 = &types.Header{ - ParentHash: testHeader.Hash(), - Number: 2, - Digest: newTestDigest(), - } - - testHash = testHeader2.Hash() - block2 := &types.Block{ - Header: *testHeader2, - Body: *body, - } - err = st.Block.AddBlock(block2) - require.NoError(t, err) - - // justification fails since there is already a block finalised in this round and set id - precommits := buildTestJustification(t, 18, round, setID, kr, precommit) - just := newJustification(round, testHash, number, precommits) - data, err := scale.Marshal(*just) - require.NoError(t, err) - err = gs.VerifyBlockJustification(testHash, data) - require.ErrorIs(t, err, errFinalisedBlocksMismatch) -} - func Test_getEquivocatoryVoters(t *testing.T) { t.Parallel() @@ -1467,123 +1173,3 @@ func signFakeFullVote( return sig } - -func TestService_VerifyBlockJustification(t *testing.T) { //nolint - kr, err := keystore.NewEd25519Keyring() - require.NoError(t, err) - - precommits := buildTestJustification(t, 2, 1, 0, kr, precommit) - justification := newJustification(1, testHash, 1, precommits) - justificationBytes, err := scale.Marshal(*justification) - require.NoError(t, err) - - type fields struct { - blockStateBuilder func(ctrl *gomock.Controller) BlockState - grandpaStateBuilder func(ctrl *gomock.Controller) GrandpaState - } - type args struct { - hash common.Hash - justification []byte - } - tests := map[string]struct { - fields fields - args args - want []byte - wantErr error - }{ - "invalid_justification": { - fields: fields{ - blockStateBuilder: func(ctrl *gomock.Controller) BlockState { - return nil - }, - grandpaStateBuilder: func(ctrl *gomock.Controller) GrandpaState { - return nil - }, - }, - args: args{ - hash: common.Hash{}, - justification: []byte{1, 2, 3}, - }, - want: nil, - wantErr: errors.New("decoding struct: unmarshalling field at index 1: decoding struct: unmarshalling" + - " field at index 0: EOF"), - }, - "valid_justification": { - fields: fields{ - blockStateBuilder: func(ctrl *gomock.Controller) BlockState { - mockBlockState := NewMockBlockState(ctrl) - mockBlockState.EXPECT().HasFinalisedBlock(uint64(1), uint64(0)).Return(false, nil) - mockBlockState.EXPECT().GetHighestFinalisedHeader().Return(testHeader, nil) - mockBlockState.EXPECT().IsDescendantOf(testHash, testHash). - Return(true, nil).Times(3) - mockBlockState.EXPECT().GetHeader(testHash).Return(testHeader, nil).Times(3) - mockBlockState.EXPECT().SetFinalisedHash(testHash, uint64(1), - uint64(0)).Return(nil) - return mockBlockState - }, - grandpaStateBuilder: func(ctrl *gomock.Controller) GrandpaState { - mockGrandpaState := NewMockGrandpaState(ctrl) - mockGrandpaState.EXPECT().GetSetIDByBlockNumber(uint(1)).Return(uint64(0), nil) - mockGrandpaState.EXPECT().GetAuthorities(uint64(0)).Return([]types.GrandpaVoter{ - {Key: *kr.Alice().Public().(*ed25519.PublicKey), ID: 1}, - {Key: *kr.Bob().Public().(*ed25519.PublicKey), ID: 2}, - {Key: *kr.Charlie().Public().(*ed25519.PublicKey), ID: 3}, - }, nil) - return mockGrandpaState - }, - }, - args: args{ - hash: testHash, - justification: justificationBytes, - }, - want: justificationBytes, - }, - "valid_justification_extra_bytes": { - fields: fields{ - blockStateBuilder: func(ctrl *gomock.Controller) BlockState { - mockBlockState := NewMockBlockState(ctrl) - mockBlockState.EXPECT().HasFinalisedBlock(uint64(1), uint64(0)).Return(false, nil) - mockBlockState.EXPECT().GetHighestFinalisedHeader().Return(testHeader, nil) - mockBlockState.EXPECT().IsDescendantOf(testHash, testHash). - Return(true, nil).Times(3) - mockBlockState.EXPECT().GetHeader(testHash).Return(testHeader, nil).Times(3) - mockBlockState.EXPECT().SetFinalisedHash(testHash, uint64(1), - uint64(0)).Return(nil) - return mockBlockState - }, - grandpaStateBuilder: func(ctrl *gomock.Controller) GrandpaState { - mockGrandpaState := NewMockGrandpaState(ctrl) - mockGrandpaState.EXPECT().GetSetIDByBlockNumber(uint(1)).Return(uint64(0), nil) - mockGrandpaState.EXPECT().GetAuthorities(uint64(0)).Return([]types.GrandpaVoter{ - {Key: *kr.Alice().Public().(*ed25519.PublicKey), ID: 1}, - {Key: *kr.Bob().Public().(*ed25519.PublicKey), ID: 2}, - {Key: *kr.Charlie().Public().(*ed25519.PublicKey), ID: 3}, - }, nil) - return mockGrandpaState - }, - }, - args: args{ - hash: testHash, - justification: append(justificationBytes, []byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14}...), - }, - want: justificationBytes, - }, - } - for name, tt := range tests { - tt := tt - t.Run(name, func(t *testing.T) { - t.Parallel() - ctrl := gomock.NewController(t) - s := &Service{ - blockState: tt.fields.blockStateBuilder(ctrl), - grandpaState: tt.fields.grandpaStateBuilder(ctrl), - } - err := s.VerifyBlockJustification(tt.args.hash, tt.args.justification) - if tt.wantErr != nil { - assert.ErrorContains(t, err, tt.wantErr.Error()) - } else { - require.NoError(t, err) - } - }) - } -} diff --git a/lib/grandpa/message_handler_test.go b/lib/grandpa/message_handler_test.go new file mode 100644 index 0000000000..7019abad1f --- /dev/null +++ b/lib/grandpa/message_handler_test.go @@ -0,0 +1,63 @@ +// Copyright 2024 ChainSafe Systems (ON) +// SPDX-License-Identifier: LGPL-3.0-only + +package grandpa + +import ( + "testing" + + "github.com/ChainSafe/gossamer/dot/types" + "github.com/ChainSafe/gossamer/lib/common" + "github.com/ChainSafe/gossamer/lib/crypto/ed25519" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" +) + +func TestVerify_WestendBlock512_Justification(t *testing.T) { + wndSetID0Voters := make([]types.GrandpaVoter, 0) + wndSetID0Authorities := []string{ + "0x959cebf18fecb305b96fd998c95f850145f52cbbb64b3ef937c0575cc7ebd652", + "0x9fc415cce1d0b2eed702c9e05f476217d23b46a8723fd56f08cddad650be7c2d", + "0xfeca0be2c87141f6074b221c919c0161a1c468d9173c5c1be59b68fab9a0ff93", + } + + for idx, pubkey := range wndSetID0Authorities { + edPubKey, err := ed25519.NewPublicKey(common.MustHexToBytes(pubkey)) + require.NoError(t, err) + + wndSetID0Voters = append(wndSetID0Voters, types.GrandpaVoter{ + ID: uint64(idx), + Key: *edPubKey, + }) + } + + const currentSetID uint64 = 0 + const block512Justification = "0xc9020000000000005895897f12e1a670609929433ac7a69dcae90e0cc2d9c" + + "32c0dce0e2a5e5e614e000200000c5895897f12e1a670609929433ac7a69dcae90e0cc2d9c32c0dce0e2a5e5e" + + "614e000200006216ec969bb5133b13f54a6121ef3a908d0a87d8409e2d471c0cad1c28532b6e27d6a8d746b43" + + "df96c2149915252a846227b060372e3bb6f49e91500d3d8ef0d959cebf18fecb305b96fd998c95f850145f52c" + + "bbb64b3ef937c0575cc7ebd6525895897f12e1a670609929433ac7a69dcae90e0cc2d9c32c0dce0e2a5e5e614" + + "e0002000092820b93ac482089fffc8246b4111da2e2b7adc786938c24eb25fe3b97cd21b946b7e12cb6fa5546" + + "b73c047ffc7c73b17a6a750bc6f2858bb0d0a7fff2fdd2029fc415cce1d0b2eed702c9e05f476217d23b46a87" + + "23fd56f08cddad650be7c2d5895897f12e1a670609929433ac7a69dcae90e0cc2d9c32c0dce0e2a5e5e614e00" + + "02000017a338b777152d2213908ab29f961ebbca04e6bd1e4cfde6cb1a0b7b7f244c2670935cdf4c2acb4dd06" + + "1913848f5865aa887406a3ea0c8d0dcd4d551ff249900feca0be2c87141f6074b221c919c0161a1c468d9173c5c1be59b68fab9a0ff9300" + + ctrl := gomock.NewController(t) + grandpaMockService := NewMockGrandpaState(ctrl) + grandpaMockService.EXPECT().GetSetIDByBlockNumber(uint(512)).Return(currentSetID, nil) + grandpaMockService.EXPECT().GetAuthorities(currentSetID).Return(wndSetID0Voters, nil) + + service := &Service{ + grandpaState: grandpaMockService, + } + + round, setID, err := service.VerifyBlockJustification( + common.MustHexToHash("0x5895897f12e1a670609929433ac7a69dcae90e0cc2d9c32c0dce0e2a5e5e614e"), + 512, + common.MustHexToBytes(block512Justification)) + + require.NoError(t, err) + require.Equal(t, uint64(0), setID) + require.Equal(t, uint64(713), round) +}