Skip to content
This repository has been archived by the owner on Nov 15, 2023. It is now read-only.

Commit

Permalink
Correct BABE randomness by calculating InOut bytes directly in pallet (
Browse files Browse the repository at this point in the history
…#5876)

* vrf: remove Raw* types

* babe: remove Raw* types

* pallet-babe: switch representation of RawVRFOutput to Randomness

* pallet-babe: calculate inout within the pallet

* Remove make_transcript duplication

* Bump spec version

* Fix frame tests

* and_then -> map

* Always enable u64_backend

* Fix nostd compile

* fix import: should not use std

* Remove unused definition of RawVRFOutput

* Remove unused import of RuntimeDebug

Co-authored-by: Gavin Wood <[email protected]>
  • Loading branch information
sorpaas and gavofyork authored May 4, 2020
1 parent 3039413 commit 2b67057
Show file tree
Hide file tree
Showing 13 changed files with 145 additions and 240 deletions.
2 changes: 2 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions bin/node/runtime/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,8 +83,8 @@ pub const VERSION: RuntimeVersion = RuntimeVersion {
// and set impl_version to 0. If only runtime
// implementation changes and behavior does not, then leave spec_version as
// is and increment impl_version.
spec_version: 245,
impl_version: 3,
spec_version: 246,
impl_version: 0,
apis: RUNTIME_API_VERSIONS,
transaction_version: 1,
};
Expand Down
16 changes: 1 addition & 15 deletions client/consensus/babe/src/authorship.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,8 @@

//! BABE authority selection and slot claiming.
use merlin::Transcript;
use sp_consensus_babe::{
AuthorityId, BabeAuthorityWeight, BABE_ENGINE_ID, BABE_VRF_PREFIX,
make_transcript, AuthorityId, BabeAuthorityWeight, BABE_VRF_PREFIX,
SlotNumber, AuthorityPair,
};
use sp_consensus_babe::digests::{
Expand Down Expand Up @@ -119,19 +118,6 @@ pub(super) fn secondary_slot_author(
Some(&expected_author.0)
}

pub(super) fn make_transcript(
randomness: &[u8],
slot_number: u64,
epoch: u64,
) -> Transcript {
let mut transcript = Transcript::new(&BABE_ENGINE_ID);
transcript.append_u64(b"slot number", slot_number);
transcript.append_u64(b"current epoch", epoch);
transcript.append_message(b"chain randomness", randomness);
transcript
}


/// Claim a secondary slot if it is our turn to propose, returning the
/// pre-digest to use when authoring the block, or `None` if it is not our turn
/// to propose.
Expand Down
4 changes: 2 additions & 2 deletions client/consensus/babe/src/verification.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,15 @@
//! Verification for BABE headers.
use sp_runtime::{traits::Header, traits::DigestItemFor};
use sp_core::{Pair, Public};
use sp_consensus_babe::{AuthoritySignature, SlotNumber, AuthorityPair, AuthorityId};
use sp_consensus_babe::{make_transcript, AuthoritySignature, SlotNumber, AuthorityPair, AuthorityId};
use sp_consensus_babe::digests::{
PreDigest, PrimaryPreDigest, SecondaryPlainPreDigest, SecondaryVRFPreDigest,
CompatibleDigestItem
};
use sc_consensus_slots::CheckedHeader;
use log::{debug, trace};
use super::{find_pre_digest, babe_err, Epoch, BlockT, Error};
use super::authorship::{make_transcript, calculate_primary_threshold, check_primary_threshold, secondary_slot_author};
use super::authorship::{calculate_primary_threshold, check_primary_threshold, secondary_slot_author};

/// BABE verification parameters
pub(super) struct VerificationParams<'a, B: 'a + BlockT> {
Expand Down
2 changes: 2 additions & 0 deletions frame/babe/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"]
codec = { package = "parity-scale-codec", version = "1.3.0", default-features = false, features = ["derive"] }
serde = { version = "1.0.101", optional = true }
sp-inherents = { version = "2.0.0-dev", default-features = false, path = "../../primitives/inherents" }
sp-application-crypto = { version = "2.0.0-dev", default-features = false, path = "../../primitives/application-crypto" }
sp-std = { version = "2.0.0-dev", default-features = false, path = "../../primitives/std" }
sp-runtime = { version = "2.0.0-dev", default-features = false, path = "../../primitives/runtime" }
sp-staking = { version = "2.0.0-dev", default-features = false, path = "../../primitives/staking" }
Expand All @@ -36,6 +37,7 @@ std = [
"serde",
"codec/std",
"sp-std/std",
"sp-application-crypto/std",
"frame-support/std",
"sp-runtime/std",
"sp-staking/std",
Expand Down
56 changes: 39 additions & 17 deletions frame/babe/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,13 +34,14 @@ use sp_staking::{
SessionIndex,
offence::{Offence, Kind},
};
use sp_application_crypto::Public;

use codec::{Encode, Decode};
use sp_inherents::{InherentIdentifier, InherentData, ProvideInherent, MakeFatalError};
use sp_consensus_babe::{
BABE_ENGINE_ID, ConsensusLog, BabeAuthorityWeight, SlotNumber,
inherents::{INHERENT_IDENTIFIER, BabeInherentData},
digests::{NextEpochDescriptor, RawPreDigest},
digests::{NextEpochDescriptor, PreDigest},
};
use sp_consensus_vrf::schnorrkel;
pub use sp_consensus_babe::{AuthorityId, VRF_OUTPUT_LENGTH, RANDOMNESS_LENGTH, PUBLIC_KEY_LENGTH};
Expand Down Expand Up @@ -102,7 +103,7 @@ impl EpochChangeTrigger for SameAuthoritiesForever {

const UNDER_CONSTRUCTION_SEGMENT_LENGTH: usize = 256;

type MaybeVrf = Option<schnorrkel::RawVRFOutput>;
type MaybeRandomness = Option<schnorrkel::Randomness>;

decl_storage! {
trait Store for Module<T: Trait> as Babe {
Expand Down Expand Up @@ -147,11 +148,11 @@ decl_storage! {
/// We reset all segments and return to `0` at the beginning of every
/// epoch.
SegmentIndex build(|_| 0): u32;
UnderConstruction: map hasher(twox_64_concat) u32 => Vec<schnorrkel::RawVRFOutput>;
UnderConstruction: map hasher(twox_64_concat) u32 => Vec<schnorrkel::Randomness>;

/// Temporary value (cleared at block finalization) which is `Some`
/// if per-block initialization has already been called for current block.
Initialized get(fn initialized): Option<MaybeVrf>;
Initialized get(fn initialized): Option<MaybeRandomness>;

/// How late the current block is compared to its parent.
///
Expand Down Expand Up @@ -194,8 +195,8 @@ decl_module! {
// that this block was the first in a new epoch, the changeover logic has
// already occurred at this point, so the under-construction randomness
// will only contain outputs from the right epoch.
if let Some(Some(vrf_output)) = Initialized::take() {
Self::deposit_vrf_output(&vrf_output);
if let Some(Some(randomness)) = Initialized::take() {
Self::deposit_randomness(&randomness);
}

// remove temporary "environment" entry from storage
Expand Down Expand Up @@ -238,7 +239,7 @@ impl<T: Trait> FindAuthor<u32> for Module<T> {
{
for (id, mut data) in digests.into_iter() {
if id == BABE_ENGINE_ID {
let pre_digest: RawPreDigest = RawPreDigest::decode(&mut data).ok()?;
let pre_digest: PreDigest = PreDigest::decode(&mut data).ok()?;
return Some(pre_digest.authority_index())
}
}
Expand Down Expand Up @@ -415,17 +416,17 @@ impl<T: Trait> Module<T> {
<frame_system::Module<T>>::deposit_log(log.into())
}

fn deposit_vrf_output(vrf_output: &schnorrkel::RawVRFOutput) {
fn deposit_randomness(randomness: &schnorrkel::Randomness) {
let segment_idx = <SegmentIndex>::get();
let mut segment = <UnderConstruction>::get(&segment_idx);
if segment.len() < UNDER_CONSTRUCTION_SEGMENT_LENGTH {
// push onto current segment: not full.
segment.push(*vrf_output);
segment.push(*randomness);
<UnderConstruction>::insert(&segment_idx, &segment);
} else {
// move onto the next segment and update the index.
let segment_idx = segment_idx + 1;
<UnderConstruction>::insert(&segment_idx, &vec![vrf_output.clone()]);
<UnderConstruction>::insert(&segment_idx, &vec![randomness.clone()]);
<SegmentIndex>::put(&segment_idx);
}
}
Expand All @@ -438,18 +439,18 @@ impl<T: Trait> Module<T> {
return;
}

let maybe_pre_digest: Option<RawPreDigest> = <frame_system::Module<T>>::digest()
let maybe_pre_digest: Option<PreDigest> = <frame_system::Module<T>>::digest()
.logs
.iter()
.filter_map(|s| s.as_pre_runtime())
.filter_map(|(id, mut data)| if id == BABE_ENGINE_ID {
RawPreDigest::decode(&mut data).ok()
PreDigest::decode(&mut data).ok()
} else {
None
})
.next();

let maybe_vrf = maybe_pre_digest.and_then(|digest| {
let maybe_randomness: Option<schnorrkel::Randomness> = maybe_pre_digest.and_then(|digest| {
// on the first non-zero block (i.e. block #1)
// this is where the first epoch (epoch #0) actually starts.
// we need to adjust internal storage accordingly.
Expand Down Expand Up @@ -478,17 +479,38 @@ impl<T: Trait> Module<T> {
Lateness::<T>::put(lateness);
CurrentSlot::put(current_slot);

if let RawPreDigest::Primary(primary) = digest {
if let PreDigest::Primary(primary) = digest {
// place the VRF output into the `Initialized` storage item
// and it'll be put onto the under-construction randomness
// later, once we've decided which epoch this block is in.
Some(primary.vrf_output)
//
// Reconstruct the bytes of VRFInOut using the authority id.
Authorities::get()
.get(primary.authority_index as usize)
.and_then(|author| {
schnorrkel::PublicKey::from_bytes(author.0.as_slice()).ok()
})
.and_then(|pubkey| {
let transcript = sp_consensus_babe::make_transcript(
&Self::randomness(),
current_slot,
EpochIndex::get(),
);

primary.vrf_output.0.attach_input_hash(
&pubkey,
transcript
).ok()
})
.map(|inout| {
inout.make_bytes(&sp_consensus_babe::BABE_VRF_INOUT_CONTEXT)
})
} else {
None
}
});

Initialized::put(maybe_vrf);
Initialized::put(maybe_randomness);

// enact epoch change, if necessary.
T::EpochChangeTrigger::trigger::<T>(now)
Expand Down Expand Up @@ -577,7 +599,7 @@ impl<T: Trait> pallet_session::OneSessionHandler<T::AccountId> for Module<T> {
fn compute_randomness(
last_epoch_randomness: schnorrkel::Randomness,
epoch_index: u64,
rho: impl Iterator<Item=schnorrkel::RawVRFOutput>,
rho: impl Iterator<Item=schnorrkel::Randomness>,
rho_size_hint: Option<usize>,
) -> schnorrkel::Randomness {
let mut s = Vec::with_capacity(40 + rho_size_hint.unwrap_or(0) * VRF_OUTPUT_LENGTH);
Expand Down
41 changes: 30 additions & 11 deletions frame/babe/src/mock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,11 +30,12 @@ use frame_support::{
weights::Weight,
};
use sp_io;
use sp_core::H256;
use sp_consensus_vrf::schnorrkel::{RawVRFOutput, RawVRFProof};
use sp_core::{H256, U256, crypto::Pair};
use sp_consensus_babe::AuthorityPair;
use sp_consensus_vrf::schnorrkel::{VRFOutput, VRFProof};

impl_outer_origin!{
pub enum Origin for Test where system = frame_system {}
pub enum Origin for Test where system = frame_system {}
}

type DummyValidatorId = u64;
Expand Down Expand Up @@ -109,16 +110,20 @@ impl Trait for Test {
type EpochChangeTrigger = crate::ExternalTrigger;
}

pub fn new_test_ext(authorities: Vec<DummyValidatorId>) -> sp_io::TestExternalities {
pub fn new_test_ext(authorities_len: usize) -> (Vec<AuthorityPair>, sp_io::TestExternalities) {
let pairs = (0..authorities_len).map(|i| {
AuthorityPair::from_seed(&U256::from(i).into())
}).collect::<Vec<_>>();

let mut t = frame_system::GenesisConfig::default().build_storage::<Test>().unwrap();
GenesisConfig {
authorities: authorities.into_iter().map(|a| (UintAuthorityId(a).to_public_key(), 1)).collect(),
authorities: pairs.iter().map(|a| (a.public(), 1)).collect(),
}.assimilate_storage::<Test>(&mut t).unwrap();
t.into()
(pairs, t.into())
}

pub fn go_to_block(n: u64, s: u64) {
let pre_digest = make_pre_digest(0, s, RawVRFOutput([1; 32]), RawVRFProof([0xff; 64]));
let pre_digest = make_secondary_plain_pre_digest(0, s);
System::initialize(&n, &Default::default(), &Default::default(), &pre_digest, InitKind::Full);
System::set_block_number(n);
if s > 1 {
Expand All @@ -140,11 +145,11 @@ pub fn progress_to_block(n: u64) {
pub fn make_pre_digest(
authority_index: sp_consensus_babe::AuthorityIndex,
slot_number: sp_consensus_babe::SlotNumber,
vrf_output: RawVRFOutput,
vrf_proof: RawVRFProof,
vrf_output: VRFOutput,
vrf_proof: VRFProof,
) -> Digest {
let digest_data = sp_consensus_babe::digests::RawPreDigest::Primary(
sp_consensus_babe::digests::RawPrimaryPreDigest {
let digest_data = sp_consensus_babe::digests::PreDigest::Primary(
sp_consensus_babe::digests::PrimaryPreDigest {
authority_index,
slot_number,
vrf_output,
Expand All @@ -155,6 +160,20 @@ pub fn make_pre_digest(
Digest { logs: vec![log] }
}

pub fn make_secondary_plain_pre_digest(
authority_index: sp_consensus_babe::AuthorityIndex,
slot_number: sp_consensus_babe::SlotNumber,
) -> Digest {
let digest_data = sp_consensus_babe::digests::PreDigest::SecondaryPlain(
sp_consensus_babe::digests::SecondaryPlainPreDigest {
authority_index,
slot_number,
}
);
let log = DigestItem::PreRuntime(sp_consensus_babe::BABE_ENGINE_ID, digest_data.encode());
Digest { logs: vec![log] }
}

pub type System = frame_system::Module<Test>;
pub type Babe = Module<Test>;
pub type Session = pallet_session::Module<Test>;
Loading

0 comments on commit 2b67057

Please sign in to comment.