#![cfg_attr(not(feature = "std"), no_std)]
#![allow(clippy::large_enum_variant)]
#![allow(clippy::type_complexity)]
#![allow(clippy::too_many_arguments)]
pub use pallet::*;
use crate::weights::WeightInfo;
use bp_header_chain::{justification::GrandpaJustification, InitializationData};
use bp_runtime::{BlockNumberOf, Chain, ChainId, HashOf, HasherOf, HeaderOf};
use bridges::{
header_chain as bp_header_chain, header_chain::ProofTriePointer, runtime as bp_runtime,
};
use finality_grandpa::voter_set::VoterSet;
use frame_support::{ensure, pallet_prelude::*, transactional, StorageHasher};
use frame_system::{ensure_signed, pallet_prelude::BlockNumberFor, RawOrigin};
use sp_consensus_grandpa::{ConsensusLog, GRANDPA_ENGINE_ID};
use sp_core::crypto::ByteArray;
use sp_runtime::traits::{BadOrigin, Header as HeaderT, Zero};
use t3rn_primitives::light_client::LightClientAsyncAPI;
use sp_std::{convert::TryInto, vec, vec::Vec};
use sp_trie::{read_trie_value, LayoutV1, StorageProof};
#[cfg(feature = "testing")]
pub mod mock;
pub mod bridges;
pub mod light_clients;
mod side_effects;
pub mod types;
pub mod weights;
pub type BridgedBlockNumber<T, I> = BlockNumberOf<<T as Config<I>>::BridgedChain>;
pub type BridgedBlockHash<T, I> = HashOf<<T as Config<I>>::BridgedChain>;
pub type BridgedBlockHasher<T, I> = HasherOf<<T as Config<I>>::BridgedChain>;
pub type BridgedHeader<T, I> = HeaderOf<<T as Config<I>>::BridgedChain>;
#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug)]
pub enum VMSource {
EVM([u8; 20]),
WASM([u8; 32]),
}
pub fn to_local_block_number<T: Config<I>, I: 'static>(
block_number: BridgedBlockNumber<T, I>,
) -> Result<BlockNumberFor<T>, DispatchError> {
let local_block_number: BlockNumberFor<T> =
Decode::decode(&mut block_number.encode().as_slice()).map_err(|_e| {
DispatchError::Other(
"LightClient::Grandpa - failed to decode block number from bridged header",
)
})?;
Ok(local_block_number)
}
use crate::types::{
GrandpaHeaderData, ParachainInclusionProof, ParachainRegistrationData,
RelaychainInclusionProof, RelaychainRegistrationData,
};
use frame_system::pallet_prelude::*;
use t3rn_primitives::ExecutionSource;
use t3rn_primitives::light_client::InclusionReceipt;
#[frame_support::pallet]
pub mod pallet {
use super::*;
use num_traits::One;
use sp_runtime::traits::Saturating;
use t3rn_primitives::{light_client::LightClient, GatewayVendor};
#[pallet::config]
pub trait Config<I: 'static = ()>: frame_system::Config {
type BridgedChain: Chain;
#[pallet::constant]
type HeadersToStore: Get<u32>;
type WeightInfo: WeightInfo;
type FastConfirmationOffset: Get<BlockNumberFor<Self>>;
type RationalConfirmationOffset: Get<BlockNumberFor<Self>>;
type FinalizedConfirmationOffset: Get<BlockNumberFor<Self>>;
type EpochOffset: Get<BlockNumberFor<Self>>;
type LightClientAsyncAPI: LightClientAsyncAPI<Self>;
type MyVendor: Get<GatewayVendor>;
type RuntimeEvent: From<Event<Self, I>>
+ IsType<<Self as frame_system::Config>::RuntimeEvent>;
}
#[pallet::event]
#[pallet::generate_deposit(pub(super) fn deposit_event)]
pub enum Event<T: Config<I>, I: 'static = ()> {
HeadersAdded(BridgedBlockNumber<T, I>),
}
#[pallet::pallet]
#[pallet::without_storage_info]
pub struct Pallet<T, I = ()>(pub PhantomData<(T, I)>);
#[pallet::hooks]
impl<T: Config<I>, I: 'static> Hooks<BlockNumberFor<T>> for Pallet<T, I> {}
#[pallet::error]
pub enum Error<T, I = ()> {
EmptyRangeSubmitted,
RangeToLarge,
NoFinalizedHeader,
InvalidAuthoritySet,
InvalidGrandpaJustification,
InvalidRangeLinkage,
InvalidJustificationLinkage,
ParachainEntryNotFound,
StorageRootNotFound,
InclusionDataDecodeError,
InvalidStorageProof,
EventNotIncluded,
HeaderDecodingError,
HeaderDataDecodingError,
StorageRootMismatch,
UnknownHeader,
UnexpectedEventLength,
UnexpectedSource,
EventDecodingFailed,
UnkownSideEffect,
UnsupportedScheduledChange,
Halted,
BlockHeightConversionError,
InvalidPayloadSource,
InvalidSourceFormat,
}
#[pallet::storage]
#[pallet::getter(fn get_initial_hash)]
pub(super) type InitialHash<T: Config<I>, I: 'static = ()> =
StorageValue<_, BridgedBlockHash<T, I>, OptionQuery>;
#[pallet::storage]
#[pallet::getter(fn get_best_block_hash)]
pub(super) type BestFinalizedHash<T: Config<I>, I: 'static = ()> =
StorageValue<_, BridgedBlockHash<T, I>, OptionQuery>;
#[pallet::storage]
#[pallet::getter(fn get_imported_hashes)]
pub(super) type ImportedHashes<T: Config<I>, I: 'static = ()> =
StorageMap<_, Identity, u32, BridgedBlockHash<T, I>>;
#[pallet::storage]
#[pallet::getter(fn get_submissions_counter)]
pub(super) type SubmissionsCounter<T: Config<I>, I: 'static = ()> =
StorageValue<_, BlockNumberFor<T>, ValueQuery>;
#[pallet::storage]
#[pallet::getter(fn get_imported_hashes_pointer)]
pub(super) type ImportedHashesPointer<T: Config<I>, I: 'static = ()> =
StorageValue<_, u32, OptionQuery>;
#[pallet::storage]
#[pallet::getter(fn get_imported_headers)]
pub(super) type ImportedHeaders<T: Config<I>, I: 'static = ()> =
StorageMap<_, Identity, BridgedBlockHash<T, I>, BridgedHeader<T, I>>;
#[pallet::storage]
pub(super) type RelayChainId<T: Config<I>, I: 'static = ()> =
StorageValue<_, ChainId, OptionQuery>;
#[pallet::storage]
pub(super) type CurrentAuthoritySet<T: Config<I>, I: 'static = ()> =
StorageValue<_, bp_header_chain::AuthoritySet, OptionQuery>;
#[pallet::storage]
pub(super) type ParachainIdMap<T: Config<I>, I: 'static = ()> =
StorageMap<_, Identity, ChainId, ParachainRegistrationData>;
#[pallet::storage]
pub(super) type PalletOwner<T: Config<I>, I: 'static = ()> =
StorageValue<_, T::AccountId, OptionQuery>;
#[pallet::storage]
#[pallet::getter(fn is_halted)]
pub(super) type IsHalted<T: Config<I>, I: 'static = ()> = StorageValue<_, bool, ValueQuery>;
#[pallet::storage]
#[pallet::getter(fn ever_initialized)]
pub(super) type EverInitialized<T: Config<I>, I: 'static = ()> =
StorageValue<_, bool, ValueQuery>;
#[pallet::call]
impl<T: Config<I>, I: 'static> Pallet<T, I> {
#[pallet::weight(Weight::from_parts(10_000, 0u64) + T::DbWeight::get().writes(1))]
pub fn submit_headers(
origin: OriginFor<T>,
range: Vec<BridgedHeader<T, I>>,
signed_header: BridgedHeader<T, I>,
justification: GrandpaJustification<BridgedHeader<T, I>>,
) -> DispatchResultWithPostInfo {
let _ = ensure_signed(origin)?;
let pointer_prior = <ImportedHashesPointer<T, I>>::get().unwrap_or_default();
Pallet::<T, I>::verify_and_store_headers(range, signed_header, justification)?;
let pointer_post = <ImportedHashesPointer<T, I>>::get().unwrap_or_default();
if pointer_prior != pointer_post {
let counter = <SubmissionsCounter<T, I>>::get();
match Pallet::<T, I>(PhantomData).get_latest_heartbeat() {
Ok(heartbeat) => {
let verifier = T::MyVendor::get();
T::LightClientAsyncAPI::on_new_epoch(verifier, counter, heartbeat);
},
Err(e) => {
log::error!(
"Failed to get latest heartbeat after submit_headers: {:?}",
e
);
},
}
<SubmissionsCounter<T, I>>::put(
counter
.saturating_add(frame_system::pallet_prelude::BlockNumberFor::<T>::one()),
);
Ok(Pays::No.into())
} else {
Ok(Pays::Yes.into())
}
}
#[pallet::weight(100_000)]
pub fn reset(origin: OriginFor<T>) -> DispatchResultWithPostInfo {
ensure_root(origin)?;
<EverInitialized<T, I>>::kill();
<BestFinalizedHash<T, I>>::kill();
for _ in <ParachainIdMap<T, I>>::drain() {}
for _ in <ImportedHashes<T, I>>::drain() {}
for _ in <ImportedHeaders<T, I>>::drain() {}
<InitialHash<T, I>>::kill();
<ImportedHashesPointer<T, I>>::kill(); <RelayChainId<T, I>>::kill();
<CurrentAuthoritySet<T, I>>::kill();
<IsHalted<T, I>>::kill();
<PalletOwner<T, I>>::kill();
Ok(().into())
}
}
pub(crate) fn try_enact_authority_change_single<T: Config<I>, I: 'static>(
header: &BridgedHeader<T, I>,
current_set_id: sp_consensus_grandpa::SetId,
) -> Result<bool, sp_runtime::DispatchError> {
let mut change_enacted = false;
ensure!(
super::find_forced_change(header).is_none(),
<Error<T, I>>::UnsupportedScheduledChange
);
if let Some(change) = find_scheduled_change(header) {
ensure!(
change.delay == Zero::zero(),
<Error<T, I>>::UnsupportedScheduledChange
);
let next_authorities = bp_header_chain::AuthoritySet {
authorities: change.next_authorities,
set_id: current_set_id + 1,
};
<CurrentAuthoritySet<T, I>>::put(&next_authorities);
change_enacted = true;
log::info!(
"Transitioned from authority set {} to {}! New authorities are: {:?}",
current_set_id,
current_set_id + 1,
next_authorities,
);
};
Ok(change_enacted)
}
pub(crate) fn verify_justification_single<T: Config<I>, I: 'static>(
justification: &GrandpaJustification<BridgedHeader<T, I>>,
hash: BridgedBlockHash<T, I>,
number: BridgedBlockNumber<T, I>,
authority_set: bp_header_chain::AuthoritySet,
) -> Result<(), Error<T, I>> {
use bp_header_chain::justification::verify_justification;
let voter_set =
VoterSet::new(authority_set.authorities).ok_or(Error::<T, I>::InvalidAuthoritySet)?;
let set_id = authority_set.set_id;
verify_justification::<BridgedHeader<T, I>>(
(hash, number),
set_id,
&voter_set,
justification,
)
.map_err(|_| {
log::error!("Received invalid justification for {:?}", hash);
Error::<T, I>::InvalidGrandpaJustification
})
}
pub(crate) fn initialize_relay_chain<T: Config<I>, I: 'static>(
init_params: InitializationData<BridgedHeader<T, I>>,
owner: T::AccountId,
) -> DispatchResult {
can_init_relay_chain::<T, I>()?;
let InitializationData {
header,
authority_list,
set_id,
is_halted,
gateway_id, } = init_params;
let initial_hash = header.hash();
<InitialHash<T, I>>::put(initial_hash);
<BestFinalizedHash<T, I>>::put(initial_hash);
<ImportedHeaders<T, I>>::insert(initial_hash, header);
<ImportedHashesPointer<T, I>>::put(0); <RelayChainId<T, I>>::put(gateway_id);
let authority_set = bp_header_chain::AuthoritySet::new(authority_list, set_id);
<CurrentAuthoritySet<T, I>>::put(authority_set);
<IsHalted<T, I>>::put(is_halted);
<EverInitialized<T, I>>::put(true);
<PalletOwner<T, I>>::put(owner);
Ok(())
}
pub(crate) fn write_and_clean_header_data<T: Config<I>, I: 'static>(
buffer_index: &mut u32,
header: &BridgedHeader<T, I>,
hash: BridgedBlockHash<T, I>,
is_signed_header: bool,
) -> Result<(), &'static str> {
if let Ok(hash) = <ImportedHashes<T, I>>::try_get(
*buffer_index, ) {
<ImportedHeaders<T, I>>::remove(hash);
}
<ImportedHeaders<T, I>>::insert(hash, header.clone());
<ImportedHashes<T, I>>::insert(*buffer_index, hash);
if is_signed_header {
<BestFinalizedHash<T, I>>::put(hash);
}
*buffer_index = (*buffer_index + 1) % T::HeadersToStore::get(); Ok(())
}
pub fn ensure_operational_single<T: Config<I>, I: 'static>() -> Result<(), Error<T, I>> {
if <IsHalted<T, I>>::get() {
Err(<Error<T, I>>::Halted)
} else {
Ok(())
}
}
}
impl<T: Config<I>, I: 'static> Pallet<T, I> {
#[transactional]
pub(crate) fn verify_and_store_headers(
range: Vec<BridgedHeader<T, I>>,
signed_header: BridgedHeader<T, I>,
justification: GrandpaJustification<BridgedHeader<T, I>>,
) -> DispatchResult {
let mut best_finalized_hash =
<BestFinalizedHash<T, I>>::get().ok_or(Error::<T, I>::NoFinalizedHeader)?;
let (signed_hash, signed_number) = (signed_header.hash(), signed_header.number());
let authority_set =
<CurrentAuthoritySet<T, I>>::get().ok_or(Error::<T, I>::InvalidAuthoritySet)?;
let set_id = authority_set.set_id;
verify_justification_single::<T, I>(
&justification,
signed_hash,
*signed_number,
authority_set,
)?;
let _enacted = try_enact_authority_change_single::<T, I>(&signed_header, set_id)?;
let mut buffer_index = <ImportedHashesPointer<T, I>>::get().unwrap_or_default();
for header in range {
if best_finalized_hash == *header.parent_hash() {
write_and_clean_header_data::<T, I>(
&mut buffer_index,
&header,
header.hash(),
false,
)?;
best_finalized_hash = header.hash();
} else {
return Err(Error::<T, I>::InvalidRangeLinkage.into())
}
}
if best_finalized_hash == *signed_header.parent_hash() {
write_and_clean_header_data::<T, I>(
&mut buffer_index,
&signed_header,
signed_hash,
true,
)?;
} else {
return Err(Error::<T, I>::InvalidJustificationLinkage.into())
}
<ImportedHashesPointer<T, I>>::set(Some(buffer_index));
Self::deposit_event(Event::HeadersAdded(*signed_number));
Ok(())
}
pub fn best_finalized_map() -> BridgedHeader<T, I> {
let hash = <BestFinalizedHash<T, I>>::get().unwrap_or_default();
<ImportedHeaders<T, I>>::get(hash).unwrap_or_else(|| {
<BridgedHeader<T, I>>::new(
Default::default(),
Default::default(),
Default::default(),
Default::default(),
Default::default(),
)
})
}
pub fn is_known_header(hash: BridgedBlockHash<T, I>) -> bool {
<ImportedHeaders<T, I>>::contains_key(hash)
}
pub fn parse_finalized_storage_proof<R>(
hash: BridgedBlockHash<T, I>,
storage_proof: sp_trie::StorageProof,
parse: impl FnOnce(bp_runtime::StorageProofChecker<BridgedBlockHasher<T, I>>) -> R,
) -> Result<R, DispatchError> {
let header = <ImportedHeaders<T, I>>::get(hash).ok_or(Error::<T, I>::UnknownHeader)?;
let storage_proof_checker =
bp_runtime::StorageProofChecker::new(*header.state_root(), storage_proof)
.map_err(|_| Error::<T, I>::StorageRootMismatch)?;
Ok(parse(storage_proof_checker))
}
pub fn initialize(
origin: T::RuntimeOrigin,
gateway_id: ChainId,
encoded_registration_data: Vec<u8>,
) -> DispatchResult {
ensure_owner_or_root_single::<T, I>(origin)?;
match <RelayChainId<T, I>>::get() {
Some(relay_chain_id) => {
ensure!(relay_chain_id != gateway_id, "chain_id already initialized");
let parachain_registration_data: ParachainRegistrationData =
Decode::decode(&mut &*encoded_registration_data)
.map_err(|_| "Parachain registration decoding error")?;
ensure!(
parachain_registration_data.relay_gateway_id == relay_chain_id,
"Invalid relay chain id"
);
<ParachainIdMap<T, I>>::insert(gateway_id, parachain_registration_data);
Ok(())
},
None => {
let registration_data: RelaychainRegistrationData<T::AccountId> =
Decode::decode(&mut &*encoded_registration_data)
.map_err(|_| "Decoding Error")?;
let header: BridgedHeader<T, I> =
Decode::decode(&mut ®istration_data.first_header[..])
.map_err(|_| "header decoding error")?;
let init_data = InitializationData {
header,
authority_list: registration_data
.authorities
.iter()
.map(|id| {
sp_consensus_grandpa::AuthorityId::from_slice(&id.encode()).unwrap()
}) .map(|authority| (authority, 1))
.collect::<Vec<_>>(),
set_id: registration_data.authority_set_id,
is_halted: false,
gateway_id,
};
initialize_relay_chain::<T, I>(init_data, registration_data.owner)
},
}
}
pub fn set_owner(
origin: T::RuntimeOrigin,
_gateway_id: ChainId,
encoded_new_owner: Vec<u8>,
) -> Result<(), &'static str> {
ensure_owner_or_root_single::<T, I>(origin)?;
let new_owner: Option<T::AccountId> =
Decode::decode(&mut &*encoded_new_owner).map_err(|_| "New Owner decoding error")?;
match new_owner {
Some(new_owner) => {
PalletOwner::<T, I>::put(&new_owner);
log::info!("Setting pallet Owner to: {:?}", new_owner);
},
None => {
PalletOwner::<T, I>::set(None);
log::info!("Removed Owner of pallet.");
},
}
Ok(())
}
pub fn set_operational(origin: OriginFor<T>, operational: bool) -> Result<(), &'static str> {
ensure_owner_or_root_single::<T, I>(origin)?;
<IsHalted<T, I>>::put(!operational); Ok(())
}
pub fn submit_encoded_headers(encoded_header_data: Vec<u8>) -> Result<(), DispatchError> {
ensure_operational_single::<T, I>()?;
let data: GrandpaHeaderData<BridgedHeader<T, I>> =
Decode::decode(&mut &*encoded_header_data)
.map_err(|_| Error::<T, I>::HeaderDataDecodingError)?;
Pallet::<T, I>::verify_and_store_headers(
data.range,
data.signed_header,
data.justification,
)?;
Ok(())
}
pub fn check_vm_source(
source: ExecutionSource,
message: Vec<u8>,
) -> Result<VMSource, DispatchError> {
if source[0..12] == [0u8; 12] {
ensure!(message.len() >= 22, Error::<T, I>::UnexpectedEventLength);
ensure!(message[1] == 0u8, Error::<T, I>::UnexpectedSource);
let assumed_evm_source_bytes = &message[2..22];
if &source[12..] != assumed_evm_source_bytes {
return Err(Error::<T, I>::UnexpectedSource.into())
}
let source = sp_core::H160::from_slice(&source[12..]);
Ok(VMSource::EVM(source.0))
} else {
ensure!(message.len() >= 34, Error::<T, I>::UnexpectedEventLength);
ensure!(message[1] == 3u8, Error::<T, I>::UnexpectedSource);
let assumed_wasm_source_bytes = &message[2..34];
if &source[..] != assumed_wasm_source_bytes {
return Err(Error::<T, I>::UnexpectedSource.into())
}
let source = sp_core::H256::from_slice(&source[..]);
Ok(VMSource::WASM(source.0))
}
}
pub fn confirm_event_inclusion(
gateway_id: ChainId,
encoded_inclusion_proof: Vec<u8>,
maybe_source: Option<ExecutionSource>,
) -> Result<InclusionReceipt<BlockNumberFor<T>>, DispatchError> {
let is_relaychain = Some(gateway_id) == <RelayChainId<T, I>>::get();
let (payload_proof, encoded_payload, header, header_hash) = if is_relaychain {
let proof: RelaychainInclusionProof<BridgedHeader<T, I>> =
Decode::decode(&mut &*encoded_inclusion_proof)
.map_err(|_| Error::<T, I>::HeaderDataDecodingError)?;
let header = <ImportedHeaders<T, I>>::get(proof.block_hash)
.ok_or(Error::<T, I>::UnknownHeader)?;
(
proof.payload_proof,
proof.encoded_payload,
header,
proof.block_hash,
)
} else {
let proof: ParachainInclusionProof<BridgedHeader<T, I>> =
Decode::decode(&mut &*encoded_inclusion_proof)
.map_err(|_| Error::<T, I>::HeaderDataDecodingError)?;
let header = verify_header_storage_proof::<T, I>(
proof.relay_block_hash,
proof.header_proof,
<ParachainIdMap<T, I>>::get(gateway_id)
.ok_or(Error::<T, I>::ParachainEntryNotFound)?,
)?;
(
proof.payload_proof,
proof.encoded_payload,
header,
proof.relay_block_hash,
)
};
let message =
verify_event_storage_proof::<T, I>(payload_proof, header.clone(), encoded_payload)?;
if let Some(source) = maybe_source {
Self::check_vm_source(source, message.clone())?;
}
Ok(InclusionReceipt::<BlockNumberFor<T>> {
height: to_local_block_number::<T, I>(*header.number())?,
including_header: header_hash.encode(),
message,
})
}
pub fn get_latest_finalized_header() -> Option<Vec<u8>> {
if let Some(header_hash) = <BestFinalizedHash<T, I>>::get() {
return Some(header_hash.encode())
}
None
}
}
pub(crate) fn verify_storage_proof<T: Config<I>, I: 'static>(
header: BridgedHeader<T, I>,
key: Vec<u8>,
proof: StorageProof,
trie_type: ProofTriePointer,
) -> Result<Vec<u8>, &'static str> {
let root = get_header_roots::<T, I>(header, trie_type)?;
let db = proof.into_memory_db::<BridgedBlockHasher<T, I>>();
match read_trie_value::<LayoutV1<BridgedBlockHasher<T, I>>, _>(
&db,
&root,
key.as_ref(),
None,
None,
) {
Ok(Some(value)) => Ok(value),
_ => Err(Error::<T, I>::InvalidStorageProof.into()),
}
}
pub(crate) fn get_header_roots<T: pallet::Config<I>, I>(
header: BridgedHeader<T, I>,
trie_type: ProofTriePointer,
) -> Result<BridgedBlockHash<T, I>, DispatchError> {
match trie_type {
ProofTriePointer::State => Ok(*header.state_root()),
ProofTriePointer::Transaction => Ok(*header.extrinsics_root()),
ProofTriePointer::Receipts => Ok(*header.state_root()),
}
}
pub(crate) fn find_scheduled_change<H: HeaderT>(
header: &H,
) -> Option<sp_consensus_grandpa::ScheduledChange<H::Number>> {
use sp_runtime::generic::OpaqueDigestItemId;
let id = OpaqueDigestItemId::Consensus(&GRANDPA_ENGINE_ID);
let filter_log = |log: ConsensusLog<H::Number>| match log {
ConsensusLog::ScheduledChange(change) => Some(change),
_ => None,
};
header
.digest()
.convert_first(|l| l.try_to(id).and_then(filter_log))
}
fn ensure_owner_or_root_single<T: Config<I>, I: 'static>(
origin: OriginFor<T>,
) -> Result<(), &'static str> {
match origin.into() {
Ok(RawOrigin::Root) => Ok(()),
Ok(RawOrigin::Signed(ref signer))
if <PalletOwner<T, I>>::exists()
&& Some(signer) == <PalletOwner<T, I>>::get().as_ref() =>
Ok(()),
_ => Err(BadOrigin.into()),
}
}
fn can_init_relay_chain<T: Config<I>, I: 'static>() -> DispatchResult {
ensure!(
!<BestFinalizedHash<T, I>>::exists(),
"can_init_relay_chain -- chain_id already initialized"
);
Ok(())
}
fn executed_after_creation<T: Config<I>, I: 'static>(
submission_target_height: BlockNumberFor<T>,
header: &BridgedHeader<T, I>,
) -> Result<(), &'static str> {
let submission_target: BridgedBlockNumber<T, I> =
Decode::decode(&mut &*submission_target_height.encode())
.map_err(|_| "Invalid block number")?;
ensure!(
submission_target < *header.number(),
"Transaction executed before SideEffect creation"
);
Ok(())
}
pub(crate) fn find_forced_change<H: HeaderT>(
header: &H,
) -> Option<(H::Number, sp_consensus_grandpa::ScheduledChange<H::Number>)> {
use sp_runtime::generic::OpaqueDigestItemId;
let id = OpaqueDigestItemId::Consensus(&GRANDPA_ENGINE_ID);
let filter_log = |log: ConsensusLog<H::Number>| match log {
ConsensusLog::ForcedChange(delay, change) => Some((delay, change)),
_ => None,
};
header
.digest()
.convert_first(|l| l.try_to(id).and_then(filter_log))
}
pub(crate) fn verify_event_storage_proof<T: Config<I>, I: 'static>(
storage_proof: StorageProof,
header: BridgedHeader<T, I>,
encoded_payload: Vec<u8>,
) -> Result<Vec<u8>, DispatchError> {
let key: Vec<u8> = [
38, 170, 57, 78, 234, 86, 48, 224, 124, 72, 174, 12, 149, 88, 206, 247, 128, 212, 30, 94,
22, 5, 103, 101, 188, 132, 97, 133, 16, 114, 201, 215,
]
.to_vec();
let verified_block_events =
verify_storage_proof::<T, I>(header, key, storage_proof, ProofTriePointer::Receipts)?;
ensure!(
is_sub(verified_block_events.as_slice(), encoded_payload.as_slice()),
Error::<T, I>::EventNotIncluded
);
Ok(encoded_payload)
}
pub(crate) fn verify_header_storage_proof<T: Config<I>, I: 'static>(
relay_block_hash: BridgedBlockHash<T, I>,
proof: StorageProof,
parachain: ParachainRegistrationData,
) -> Result<BridgedHeader<T, I>, DispatchError> {
let relay_header =
<ImportedHeaders<T, I>>::get(relay_block_hash).ok_or(Error::<T, I>::UnknownHeader)?;
let mut key: Vec<u8> = [
205, 113, 11, 48, 189, 46, 171, 3, 82, 221, 204, 38, 65, 122, 161, 148, 27, 60, 37, 47,
203, 41, 216, 142, 255, 79, 61, 229, 222, 68, 118, 195,
]
.to_vec();
let mut arg = Twox64Concat::hash(parachain.id.encode().as_ref());
key.append(&mut arg); let encoded_header_vec =
verify_storage_proof::<T, I>(relay_header, key, proof, ProofTriePointer::State)?;
let encoded_header: Vec<u8> = Decode::decode(&mut &encoded_header_vec[..])
.map_err(|_| Error::<T, I>::HeaderDecodingError)?;
let header: BridgedHeader<T, I> =
Decode::decode(&mut &*encoded_header).map_err(|_| Error::<T, I>::HeaderDecodingError)?;
Ok(header)
}
pub(crate) fn is_sub<T: PartialEq>(mut haystack: &[T], needle: &[T]) -> bool {
while !haystack.is_empty() {
if haystack.starts_with(needle) {
return true
}
haystack = &haystack[1..];
}
false
}
#[cfg(all(not(feature = "testing"), test))]
pub mod tests {
#[test]
fn panic_without_testing_feature() {
panic!("Please use the feature testing when running tests.\n\nUse: cargo test --features testing\n\n");
}
}
#[cfg(all(feature = "testing", test))]
pub mod tests {
use super::*;
use crate::mock::{
produce_mock_headers_range, run_test, test_header, test_header_range,
test_header_with_correct_parent, AccountId, RuntimeOrigin as Origin, TestHeader,
TestNumber, TestRuntime,
};
use bp_runtime::ChainId;
use bridges::{
header_chain as bp_header_chain, runtime as bp_runtime,
test_utils::{
authorities, authority_list, make_default_justification, make_justification_for_header,
JustificationGeneratorParams, ALICE, BOB, DAVE,
},
};
use codec::Encode;
use frame_support::{assert_noop, assert_ok};
use sp_consensus_grandpa::AuthorityId;
use sp_core::{crypto::AccountId32, H160, H256};
use sp_runtime::{Digest, DigestItem, DispatchError};
use crate::types::GrandpaHeaderData;
fn initialize_relaychain(
origin: Origin,
) -> Result<RelaychainRegistrationData<AccountId>, DispatchError> {
let genesis = test_header_with_correct_parent(0, None);
let init_data = RelaychainRegistrationData::<AccountId> {
authorities: authorities(),
first_header: genesis.encode(),
authority_set_id: 1,
owner: 1u64,
};
initialize_custom_relaychain(origin, *b"pdot", init_data)
}
fn initialize_named_relaychain(
origin: Origin,
gateway_id: ChainId,
) -> Result<RelaychainRegistrationData<AccountId>, DispatchError> {
let genesis = test_header(0);
let init_data = RelaychainRegistrationData::<AccountId> {
authorities: authorities(),
first_header: genesis.encode(),
authority_set_id: 1,
owner: 1u64,
};
initialize_custom_relaychain(origin, gateway_id, init_data)
}
fn initialize_custom_relaychain(
origin: Origin,
gateway_id: ChainId,
init_data: RelaychainRegistrationData<AccountId>,
) -> Result<RelaychainRegistrationData<AccountId>, DispatchError> {
Pallet::<TestRuntime>::initialize(origin, gateway_id, init_data.encode()).map(|_| init_data)
}
fn initialize_parachain(origin: Origin) -> Result<ParachainRegistrationData, DispatchError> {
let _genesis = test_header(0);
let init_data = ParachainRegistrationData {
relay_gateway_id: *b"pdot",
id: 0,
};
initialize_custom_parachain(origin, *b"moon", init_data)
}
fn initialize_named_parachain(
origin: Origin,
gateway_id: ChainId,
) -> Result<ParachainRegistrationData, DispatchError> {
let init_data = ParachainRegistrationData {
relay_gateway_id: *b"pdot",
id: 0,
};
initialize_custom_parachain(origin, gateway_id, init_data)
}
fn initialize_custom_parachain(
origin: Origin,
gateway_id: ChainId,
init_data: ParachainRegistrationData,
) -> Result<ParachainRegistrationData, DispatchError> {
Pallet::<TestRuntime>::initialize(origin, gateway_id, init_data.encode()).map(|_| init_data)
}
pub fn submit_headers(from: u8, to: u8) -> Result<GrandpaHeaderData<TestHeader>, &'static str> {
let data = produce_mock_headers_range(from, to);
Pallet::<TestRuntime>::submit_encoded_headers(data.encode())?;
Ok(data)
}
fn next_block() {
use frame_support::traits::OnInitialize;
let current_number = frame_system::Pallet::<TestRuntime>::block_number();
frame_system::Pallet::<TestRuntime>::set_block_number(current_number + 1);
let _ = <Pallet<TestRuntime> as OnInitialize<u32>>::on_initialize(current_number);
}
fn change_log(delay: u32) -> Digest {
let consensus_log =
ConsensusLog::<TestNumber>::ScheduledChange(sp_consensus_grandpa::ScheduledChange {
next_authorities: vec![(ALICE.into(), 1), (BOB.into(), 1)],
delay,
});
Digest {
logs: vec![DigestItem::Consensus(
GRANDPA_ENGINE_ID,
consensus_log.encode(),
)],
}
}
fn forced_change_log(delay: u32) -> Digest {
let consensus_log = ConsensusLog::<TestNumber>::ForcedChange(
delay,
sp_consensus_grandpa::ScheduledChange {
next_authorities: vec![(ALICE.into(), 1), (BOB.into(), 1)],
delay,
},
);
Digest {
logs: vec![DigestItem::Consensus(
GRANDPA_ENGINE_ID,
consensus_log.encode(),
)],
}
}
#[test]
fn init_root_or_owner_origin_can_initialize_pallet() {
run_test(|| {
assert_noop!(initialize_relaychain(Origin::signed(1)), "Bad origin");
assert_ok!(initialize_relaychain(Origin::root()));
BestFinalizedHash::<TestRuntime>::set(None);
PalletOwner::<TestRuntime>::put(2);
RelayChainId::<TestRuntime>::set(None);
assert_ok!(initialize_relaychain(Origin::signed(2)));
})
}
#[test]
fn can_register_with_valid_data_and_signer() {
run_test(|| {
assert_ok!(initialize_relaychain(Origin::root()));
assert_ok!(initialize_parachain(Origin::root()));
})
}
use hex_literal::hex;
#[test]
fn can_register_again_after_reset_with_valid_data_and_signer() {
run_test(|| {
assert_ok!(initialize_relaychain(Origin::root()));
assert_ok!(initialize_parachain(Origin::root()));
assert_eq!(
InitialHash::<TestRuntime>::get(),
Some(
hex!("dcdd89927d8a348e00257e1ecc8617f45edb5118efff3ea2f9961b2ad9b7690a").into()
)
);
assert_eq!(
ImportedHeaders::<TestRuntime>::iter_keys().collect::<Vec<H256>>(),
vec![
hex!("dcdd89927d8a348e00257e1ecc8617f45edb5118efff3ea2f9961b2ad9b7690a").into()
]
);
assert_eq!(
ImportedHashes::<TestRuntime>::iter_keys().collect::<Vec<u32>>(),
Vec::<u32>::new()
);
assert_ok!(Pallet::<TestRuntime>::reset(Origin::root()));
assert_eq!(InitialHash::<TestRuntime>::get(), None);
assert_eq!(
ImportedHeaders::<TestRuntime>::iter_keys().collect::<Vec<H256>>(),
vec![]
);
assert_eq!(
ImportedHashes::<TestRuntime>::iter_keys().collect::<Vec<u32>>(),
Vec::<u32>::new()
);
assert_ok!(initialize_relaychain(Origin::root()));
assert_ok!(initialize_parachain(Origin::root()));
assert_eq!(
InitialHash::<TestRuntime>::get(),
Some(
hex!("dcdd89927d8a348e00257e1ecc8617f45edb5118efff3ea2f9961b2ad9b7690a").into()
)
);
})
}
#[test]
fn cant_register_duplicate_gateway_ids() {
run_test(|| {
assert_ok!(initialize_relaychain(Origin::root()));
assert_noop!(
initialize_relaychain(Origin::root()),
"chain_id already initialized"
);
})
}
#[test]
fn cant_register_parachain_without_relaychain() {
run_test(|| {
assert_noop!(initialize_parachain(Origin::root()), "Decoding Error");
})
}
#[test]
fn cant_register_parachain_with_wrong_relaychain_id() {
run_test(|| {
assert_ok!(initialize_relaychain(Origin::root()));
let _genesis = test_header(0);
let init_data = ParachainRegistrationData {
relay_gateway_id: *b"roco",
id: 0,
};
assert_noop!(
initialize_custom_parachain(Origin::root(), *b"moon", init_data),
"Invalid relay chain id"
);
})
}
#[test]
fn cant_register_relaychain_as_non_root() {
run_test(|| {
assert_noop!(initialize_relaychain(Origin::signed(1)), "Bad origin");
})
}
#[test]
fn cant_register_parachain_as_non_root() {
run_test(|| {
assert_ok!(initialize_relaychain(Origin::root()));
assert_noop!(initialize_parachain(Origin::signed(0)), "Bad origin");
})
}
#[test]
fn init_storage_entries_are_correctly_initialized() {
let header = test_header(0);
run_test(|| {
assert_eq!(BestFinalizedHash::<TestRuntime>::get(), None);
assert_eq!(BestFinalizedHash::<TestRuntime>::get(), None);
let _init_data = initialize_relaychain(Origin::root()).unwrap();
assert!(<ImportedHeaders<TestRuntime>>::contains_key(header.hash()));
assert_eq!(BestFinalizedHash::<TestRuntime>::get(), Some(header.hash()));
assert_eq!(
CurrentAuthoritySet::<TestRuntime>::get()
.unwrap()
.authorities,
authority_list()
);
assert_eq!(IsHalted::<TestRuntime>::get(), false);
})
}
#[test]
fn init_can_only_initialize_pallet_once() {
run_test(|| {
let _ = initialize_relaychain(Origin::root());
assert_noop!(
initialize_relaychain(Origin::root()),
"chain_id already initialized"
);
})
}
#[test]
fn root_can_reset_pallet() {
run_test(|| {
let _ = initialize_relaychain(Origin::root());
let _ = Pallet::<TestRuntime>::reset(Origin::root());
assert_eq!(EverInitialized::<TestRuntime>::get(), false);
assert_eq!(BestFinalizedHash::<TestRuntime>::get(), None);
assert_eq!(InitialHash::<TestRuntime>::get(), None);
assert_eq!(ImportedHashesPointer::<TestRuntime>::get(), None);
assert_eq!(RelayChainId::<TestRuntime>::get(), None);
assert_eq!(CurrentAuthoritySet::<TestRuntime>::get(), None);
assert_eq!(IsHalted::<TestRuntime>::get(), false);
assert_eq!(PalletOwner::<TestRuntime>::get(), None);
assert_ok!(initialize_relaychain(Origin::root()));
assert_noop!(
Pallet::<TestRuntime>::reset(Origin::signed(1)),
DispatchError::BadOrigin
);
})
}
#[test]
fn pallet_owner_may_change_owner() {
run_test(|| {
let default_gateway: ChainId = *b"pdot";
assert_ok!(Pallet::<TestRuntime>::set_owner(
Origin::root(),
default_gateway,
Some(1u64).encode(),
));
assert_noop!(
Pallet::<TestRuntime>::set_operational(Origin::signed(2), false),
DispatchError::BadOrigin,
);
assert_ok!(Pallet::<TestRuntime>::set_operational(
Origin::root(),
false,
));
let owner: Option<AccountId> = None;
assert_ok!(Pallet::<TestRuntime>::set_owner(
Origin::signed(1),
default_gateway,
owner.encode(),
));
assert_noop!(
Pallet::<TestRuntime>::set_operational(Origin::signed(1), true),
DispatchError::BadOrigin,
);
assert_noop!(
Pallet::<TestRuntime>::set_operational(Origin::signed(2), true),
DispatchError::BadOrigin,
);
assert_ok!(Pallet::<TestRuntime>::set_operational(Origin::root(), true,));
});
}
#[test]
fn pallet_may_be_halted_by_root() {
let _default_gateway: ChainId = *b"pdot";
run_test(|| {
let _ = initialize_relaychain(Origin::root());
assert_ok!(Pallet::<TestRuntime>::set_operational(
Origin::root(),
false,
));
assert_noop!(submit_headers(1, 3), "Halted");
assert_ok!(Pallet::<TestRuntime>::set_operational(Origin::root(), true,));
});
}
#[test]
fn pallet_may_be_halted_by_owner() {
let _default_gateway: ChainId = *b"pdot";
run_test(|| {
PalletOwner::<TestRuntime>::put(2);
assert_ok!(Pallet::<TestRuntime>::set_operational(
Origin::signed(2),
false,
));
assert_ok!(Pallet::<TestRuntime>::set_operational(
Origin::signed(2),
true,
));
assert_noop!(
Pallet::<TestRuntime>::set_operational(Origin::signed(1), false),
DispatchError::BadOrigin,
);
assert_noop!(
Pallet::<TestRuntime>::set_operational(Origin::signed(1), true),
DispatchError::BadOrigin,
);
assert_ok!(Pallet::<TestRuntime>::set_operational(
Origin::signed(2),
false,
));
assert_noop!(
Pallet::<TestRuntime>::set_operational(Origin::signed(1), true),
DispatchError::BadOrigin,
);
});
}
#[test]
fn pallet_rejects_transactions_if_halted() {
run_test(|| {
let _gateway_a: ChainId = *b"pdot";
let _ = initialize_relaychain(Origin::root());
<IsHalted<TestRuntime>>::put(true);
assert_noop!(submit_headers(1, 3), "Halted",);
})
}
#[test]
fn succesfully_imports_headers_with_valid_finality() {
run_test(|| {
let _ = initialize_relaychain(Origin::root());
let data = submit_headers(1, 3).unwrap();
assert_eq!(
<BestFinalizedHash<TestRuntime>>::get(),
Some(data.signed_header.hash())
);
assert!(<ImportedHeaders<TestRuntime>>::contains_key(
data.signed_header.hash()
));
assert!(<ImportedHeaders<TestRuntime>>::contains_key(
data.range[0].hash()
));
assert!(<ImportedHeaders<TestRuntime>>::contains_key(
data.range[1].hash()
));
})
}
#[test]
fn reject_header_range_gap() {
run_test(|| {
let _ = initialize_relaychain(Origin::root());
assert_noop!(
submit_headers(2, 5),
Error::<TestRuntime>::InvalidRangeLinkage
);
assert_ok!(submit_headers(1, 5));
assert_noop!(
submit_headers(5, 10),
Error::<TestRuntime>::InvalidRangeLinkage
);
assert_ok!(submit_headers(6, 10));
})
}
#[test]
fn reject_range_with_invalid_range_linkage() {
run_test(|| {
let _ = initialize_relaychain(Origin::root());
assert_ok!(submit_headers(1, 5));
let headers: Vec<TestHeader> = test_header_range(10);
let signed_header: &TestHeader = headers.last().unwrap();
let justification = make_default_justification(&signed_header.clone());
let mut range: Vec<TestHeader> = headers[6..10].to_vec();
range[1] = range[2].clone();
let data = GrandpaHeaderData::<TestHeader> {
signed_header: signed_header.clone(),
range,
justification,
};
assert_noop!(
Pallet::<TestRuntime>::submit_encoded_headers(data.encode()),
Error::<TestRuntime>::InvalidRangeLinkage
);
})
}
#[test]
fn reject_range_with_invalid_grandpa_linkage() {
run_test(|| {
let _ = initialize_relaychain(Origin::root());
assert_ok!(submit_headers(1, 5));
let headers: Vec<TestHeader> = test_header_range(10);
let signed_header: &TestHeader = headers.last().unwrap();
let justification = make_default_justification(&signed_header.clone());
let range: Vec<TestHeader> = headers[6..9].to_vec();
let data = GrandpaHeaderData::<TestHeader> {
signed_header: signed_header.clone(),
range,
justification,
};
assert_noop!(
Pallet::<TestRuntime>::submit_encoded_headers(data.encode()),
Error::<TestRuntime>::InvalidJustificationLinkage
);
})
}
#[test]
fn rejects_justification_that_skips_authority_set_transition() {
run_test(|| {
let _ = initialize_relaychain(Origin::root());
let headers: Vec<TestHeader> = test_header_range(5);
let signed_header: &TestHeader = headers.last().unwrap();
let mut range: Vec<TestHeader> = headers[1..5].to_vec();
range.reverse();
let params = JustificationGeneratorParams::<TestHeader> {
set_id: 2,
header: signed_header.clone(),
..Default::default()
};
let justification = make_justification_for_header(params);
let data = GrandpaHeaderData::<TestHeader> {
signed_header: signed_header.clone(),
range,
justification,
};
assert_noop!(
Pallet::<TestRuntime>::submit_encoded_headers(data.encode()),
Error::<TestRuntime>::InvalidGrandpaJustification
);
})
}
#[test]
fn does_not_import_header_with_invalid_finality_proof() {
run_test(|| {
let _ = initialize_relaychain(Origin::root());
let headers: Vec<TestHeader> = test_header_range(5);
let signed_header: &TestHeader = headers.last().unwrap();
let mut range: Vec<TestHeader> = headers[1..5].to_vec();
range.reverse();
let mut justification = make_default_justification(&signed_header.clone());
justification.round = 42;
let justification = justification;
let data = GrandpaHeaderData::<TestHeader> {
signed_header: signed_header.clone(),
range,
justification,
};
assert_noop!(
Pallet::<TestRuntime>::submit_encoded_headers(data.encode()),
Error::<TestRuntime>::InvalidGrandpaJustification
);
})
}
#[test]
fn disallows_invalid_justification() {
run_test(|| {
let genesis = test_header(0);
let default_gateway: ChainId = *b"pdot";
let different_authorities: Vec<AuthorityId> = vec![ALICE.into(), DAVE.into()];
let init_data = RelaychainRegistrationData::<AccountId> {
authorities: different_authorities,
first_header: genesis.encode(),
authority_set_id: 1,
owner: 1,
};
assert_ok!(initialize_custom_relaychain(
Origin::root(),
default_gateway,
init_data,
));
let headers: Vec<TestHeader> = test_header_range(5);
let signed_header: &TestHeader = headers.last().unwrap();
let mut range: Vec<TestHeader> = headers[1..5].to_vec();
range.reverse();
let justification = make_default_justification(&signed_header.clone());
let data = GrandpaHeaderData::<TestHeader> {
signed_header: signed_header.clone(),
range,
justification,
};
assert_noop!(
Pallet::<TestRuntime>::submit_encoded_headers(data.encode()),
Error::<TestRuntime>::InvalidGrandpaJustification
);
})
}
#[test]
fn importing_header_ensures_that_chain_is_extended() {
run_test(|| {
let _ = initialize_relaychain(Origin::root());
assert_ok!(submit_headers(1, 5));
assert_noop!(
submit_headers(4, 8),
Error::<TestRuntime>::InvalidRangeLinkage
);
assert_ok!(submit_headers(6, 10));
})
}
#[test]
fn importing_header_enacts_new_authority_set() {
run_test(|| {
let _ = initialize_relaychain(Origin::root());
let next_set_id = 2;
let next_authorities = vec![(ALICE.into(), 1), (BOB.into(), 1)];
let headers: Vec<TestHeader> = test_header_range(2);
let mut signed_header = headers[2].clone();
let range: Vec<TestHeader> = headers[1..2].to_vec();
signed_header.digest = change_log(0);
let justification = make_default_justification(&signed_header);
let data = GrandpaHeaderData::<TestHeader> {
signed_header: signed_header.clone(),
range,
justification,
};
assert_ok!(Pallet::<TestRuntime>::submit_encoded_headers(data.encode()));
assert_eq!(
<BestFinalizedHash<TestRuntime>>::get(),
Some(signed_header.hash())
);
assert!(<ImportedHeaders<TestRuntime>>::contains_key(
signed_header.hash()
));
assert_eq!(
<CurrentAuthoritySet<TestRuntime>>::get(),
Some(bp_header_chain::AuthoritySet::new(
next_authorities,
next_set_id
)),
);
})
}
#[test]
fn importing_header_rejects_header_with_scheduled_change_delay() {
run_test(|| {
let _ = initialize_relaychain(Origin::root());
let headers: Vec<TestHeader> = test_header_range(2);
let mut signed_header = headers[2].clone();
let mut range: Vec<TestHeader> = headers[1..2].to_vec();
range.reverse();
signed_header.digest = change_log(1);
let justification = make_default_justification(&signed_header);
let data = GrandpaHeaderData::<TestHeader> {
signed_header,
range,
justification,
};
assert_noop!(
Pallet::<TestRuntime>::submit_encoded_headers(data.encode()),
<Error<TestRuntime>>::UnsupportedScheduledChange
);
})
}
#[test]
fn importing_header_rejects_header_with_forced_changes() {
run_test(|| {
let _ = initialize_relaychain(Origin::root());
let headers: Vec<TestHeader> = test_header_range(2);
let mut signed_header = headers[2].clone();
let mut range: Vec<TestHeader> = headers[1..2].to_vec();
range.reverse();
signed_header.digest = change_log(1);
let justification = make_default_justification(&signed_header);
let data = GrandpaHeaderData::<TestHeader> {
signed_header,
range,
justification,
};
assert_noop!(
Pallet::<TestRuntime>::submit_encoded_headers(data.encode()),
<Error<TestRuntime>>::UnsupportedScheduledChange
);
})
}
#[test]
fn confirm_valid_evm_source_from_encoded_evm_event() {
run_test(|| {
#[derive(Encode, Decode, Clone, PartialEq, Eq, Debug)]
struct Log {
address: H160,
topics: Vec<H256>,
data: Vec<u8>,
}
#[derive(Encode, Decode, Clone, PartialEq, Eq, Debug)]
enum EvmEventsMock {
Log(Log),
}
let evm_source: ExecutionSource = hex_literal::hex!(
"0000000000000000000000003333333333333333333333333333333333333333"
);
let evm_address = H160::from_slice(
hex_literal::hex!("3333333333333333333333333333333333333333").as_slice(),
);
let mut message = EvmEventsMock::Log(Log {
address: evm_address,
topics: vec![],
data: vec![],
})
.encode();
message.insert(0, 120u8);
assert_eq!(
Pallet::<TestRuntime>::check_vm_source(evm_source, message),
Ok(VMSource::EVM(evm_address.0))
);
});
}
#[test]
fn confirm_valid_account_id_source_from_encoded_wasm_event() {
run_test(|| {
#[derive(Encode, Decode, Clone, PartialEq, Eq, Debug)]
enum WasmEventsMock {
Instantiated {
deployer: AccountId32,
contract: AccountId32,
},
Terminated {
contract: AccountId32,
beneficiary: AccountId32,
},
CodeStored {
code_hash: H256,
},
ContractEmitted {
contract: AccountId32,
data: Vec<u8>,
},
}
let wasm_source: ExecutionSource = hex_literal::hex!(
"4444444400000000000000003333333333333333333333333333333333333333"
);
let wasm_address: AccountId32 = wasm_source.into();
let mut message = WasmEventsMock::ContractEmitted {
contract: wasm_address,
data: vec![1, 2, 3],
}
.encode();
message.insert(0, 121u8);
assert_eq!(
Pallet::<TestRuntime>::check_vm_source(wasm_source, message),
Ok(VMSource::WASM(wasm_source))
);
});
}
#[test]
fn parse_finalized_storage_proof_rejects_proof_on_unknown_header() {
run_test(|| {
assert_noop!(
Pallet::<TestRuntime>::parse_finalized_storage_proof(
Default::default(),
sp_trie::StorageProof::new(vec![]),
|_| (),
),
Error::<TestRuntime>::UnknownHeader,
);
});
}
#[test]
fn parse_finalized_storage_accepts_valid_proof() {
run_test(|| {
let (state_root, storage_proof) = bp_runtime::craft_valid_storage_proof();
let mut header = test_header(2);
header.set_state_root(state_root);
let hash = header.hash();
<BestFinalizedHash<TestRuntime>>::put(hash);
<ImportedHeaders<TestRuntime>>::insert(hash, header);
assert_ok!(
Pallet::<TestRuntime>::parse_finalized_storage_proof(hash, storage_proof, |_| (),),
(),
);
});
}
#[test]
fn should_prune_headers_over_headers_to_keep_parameter() {
run_test(|| {
let _ = initialize_relaychain(Origin::root());
let headers = test_header_range(111);
assert_ok!(submit_headers(1, 5));
assert_eq!(
<ImportedHeaders<TestRuntime>>::contains_key(headers[1].hash(),),
true
);
assert_ok!(submit_headers(6, 7));
assert_eq!(
<ImportedHeaders<TestRuntime>>::contains_key(headers[3].hash(),),
true
); assert_eq!(
<ImportedHeaders<TestRuntime>>::contains_key(headers[2].hash(),),
false
); assert_eq!(
<ImportedHeaders<TestRuntime>>::contains_key(headers[1].hash(),),
false
); assert_ok!(submit_headers(8, 10));
assert_eq!(
<ImportedHeaders<TestRuntime>>::contains_key(headers[5].hash(),),
false
);
assert_eq!(
<ImportedHeaders<TestRuntime>>::contains_key(headers[4].hash(),),
false
);
assert_eq!(
<ImportedHeaders<TestRuntime>>::contains_key(headers[3].hash(),),
false
);
assert_eq!(
<ImportedHeaders<TestRuntime>>::contains_key(headers[6].hash(),),
true
);
assert_ok!(submit_headers(11, 15));
assert_eq!(
<ImportedHeaders<TestRuntime>>::contains_key(headers[10].hash(),),
false
);
assert_eq!(
<ImportedHeaders<TestRuntime>>::contains_key(headers[11].hash(),),
true
);
assert_eq!(
<ImportedHeaders<TestRuntime>>::contains_key(headers[12].hash(),),
true
);
})
}
}