Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
a095d40
Make parachain relayer relay a single sourceID
doubledup Apr 13, 2023
62b73d0
Switch to parachainID in outbound queue
doubledup Apr 14, 2023
7610dc9
Set up sourceID config for parachain relayer
doubledup Apr 14, 2023
13e1ced
s/SourceID/LaneID/g
doubledup Apr 14, 2023
8ad9875
s/parachain_id/origin/g
doubledup Apr 14, 2023
0f43390
Add handler to submit() and Message
doubledup Apr 14, 2023
66cdefb
Serialize origin as a number
doubledup Apr 14, 2023
4e2f2ad
Merge branch 'main' into david/sno-425
doubledup Apr 14, 2023
52fd689
Fix message fields in benchmark
doubledup Apr 14, 2023
40bc9c4
Make Bob's id different from Alice's
doubledup Apr 14, 2023
b58bb24
Remove unused test imports
doubledup Apr 14, 2023
c127ca7
Remove endianness TODO
doubledup Apr 17, 2023
3d7176c
Leave return value as nil slice
doubledup Apr 17, 2023
22a095a
Replace sort with reverse
doubledup Apr 17, 2023
530454b
Clean up block scanning loop
doubledup Apr 17, 2023
6dc3628
Tweak scanForOutboundQueueProofs
doubledup Apr 17, 2023
ff458c2
Handle partially relayed message blocks
doubledup Apr 17, 2023
194edfd
Set lane-id for parachain relayer
doubledup Apr 17, 2023
b39f554
Merge branch 'main' into david/sno-425
doubledup Apr 17, 2023
5663790
Remove extra block number decrement
doubledup Apr 18, 2023
850551f
Error when proof & digest item hashes are mismatched
doubledup Apr 18, 2023
38a16ca
Rename alice & bob parachain ids
doubledup Apr 18, 2023
520e9de
Replace digest item commitment with hash
doubledup Apr 18, 2023
9f9ba92
Remove unused hashSides field
doubledup Apr 18, 2023
651093c
Fix proof RPC method name
doubledup Apr 18, 2023
68cf78a
s/sourceID/laneID/g
doubledup Apr 18, 2023
69300fa
Finish renaming test paraID
doubledup Apr 18, 2023
c65fd9d
Update Cumulus submodule
doubledup Apr 18, 2023
ee8e7f1
Merge branch 'main' into david/sno-425
doubledup Apr 18, 2023
5bf7ef0
Remove rogue hashSides reference
doubledup Apr 18, 2023
b6bd532
Fix lane-id reference for parachain relayer config
doubledup Apr 18, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion core/packages/test/config/parachain-relay.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
"BasicInboundChannel": null
},
"beefy-activation-block": 0,
"basicChannelSourceIDs": []
"lane-id": 1000
},
"sink": {
"ethereum": {
Expand Down
4 changes: 2 additions & 2 deletions core/packages/test/scripts/start-relayer.sh
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ config_relayer(){
--arg k1 "$(address_for BasicInboundChannel)" \
--arg k2 "$(address_for BeefyClient)" \
--arg eth_endpoint_ws $eth_endpoint_ws \
--arg basic_parachain_account_ids $basic_parachain_account_ids \
--arg laneID $ASSET_HUB_PARAID \
--arg eth_gas_limit $eth_gas_limit \
'
.source.contracts.BasicInboundChannel = $k1
Expand All @@ -31,7 +31,7 @@ config_relayer(){
| .source.ethereum.endpoint = $eth_endpoint_ws
| .sink.ethereum.endpoint = $eth_endpoint_ws
| .sink.ethereum."gas-limit" = $eth_gas_limit
| .source.basicChannelAccounts = ($basic_parachain_account_ids | split(","))
| .source."lane-id" = $laneID
' \
config/parachain-relay.json > $output_dir/parachain-relay.json

Expand Down
2 changes: 1 addition & 1 deletion cumulus
1 change: 1 addition & 0 deletions parachain/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions parachain/pallets/inbound-queue/src/test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,14 @@ use frame_support::{
assert_noop, assert_ok,
dispatch::DispatchError,
parameter_types,
traits::{tokens::WithdrawConsequence, ConstU64, Everything, GenesisBuild},
traits::{ConstU64, Everything, GenesisBuild},
};
use sp_core::{H160, H256};
use sp_keyring::AccountKeyring as Keyring;
use sp_runtime::{
testing::Header,
traits::{BlakeTwo256, IdentifyAccount, IdentityLookup, Verify},
ArithmeticError, MultiSignature, TokenError,
ArithmeticError, MultiSignature,
};
use sp_std::convert::From;

Expand Down
5 changes: 4 additions & 1 deletion parachain/pallets/outbound-queue/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ sp-std = { git = "https://github.com/paritytech/substrate.git", branch = "master
sp-io = { git = "https://github.com/paritytech/substrate.git", branch = "master", default-features = false }
sp-runtime = { git = "https://github.com/paritytech/substrate.git", branch = "master", default-features = false }

polkadot-parachain = { git = "https://github.com/paritytech/polkadot", branch = "master", default-features = false }

snowbridge-core = { path = "../../primitives/core", default-features = false }
snowbridge-outbound-queue-merkle-proof = { path = "merkle-proof", default-features = false }
ethabi = { git = "https://github.com/Snowfork/ethabi-decode.git", package = "ethabi-decode", branch = "master", default-features = false }
Expand All @@ -49,7 +51,8 @@ std = [
"sp-io/std",
"snowbridge-core/std",
"snowbridge-outbound-queue-merkle-proof/std",
"ethabi/std"
"ethabi/std",
"polkadot-parachain/std"
]
runtime-benchmarks = [
"snowbridge-core/runtime-benchmarks",
Expand Down
5 changes: 3 additions & 2 deletions parachain/pallets/outbound-queue/src/benchmarking.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
//! BasicOutboundChannel pallet benchmarking
use super::*;

use frame_benchmarking::{account, benchmarks, impl_benchmark_test_suite};
use frame_benchmarking::{benchmarks, impl_benchmark_test_suite};
use frame_support::traits::OnInitialize;

#[allow(unused_imports)]
Expand All @@ -21,8 +21,9 @@ benchmarks! {
for _ in 0 .. m {
let payload: Vec<u8> = (0..).take(p as usize).collect();
<MessageQueue<T>>::try_append(Message {
source_id: account("", 0, 0),
origin: 1000.into(),
nonce: 0u64,
handler: 0,
payload: payload.try_into().unwrap(),
}).unwrap();
}
Expand Down
47 changes: 20 additions & 27 deletions parachain/pallets/outbound-queue/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,10 @@ mod test;
use codec::{Decode, Encode, MaxEncodedLen};
use ethabi::{self, Token};
use frame_support::{
dispatch::DispatchResult, ensure, pallet_prelude::Member, traits::Get, weights::Weight,
BoundedVec, CloneNoBound, Parameter, PartialEqNoBound, RuntimeDebugNoBound,
dispatch::DispatchResult, ensure, traits::Get, weights::Weight, BoundedVec, CloneNoBound,
PartialEqNoBound, RuntimeDebugNoBound,
};
use polkadot_parachain::primitives::Id as ParaId;
use scale_info::TypeInfo;
use sp_core::H256;
use sp_io::offchain_index::set;
Expand All @@ -29,28 +30,24 @@ pub use weights::WeightInfo;
Encode, Decode, CloneNoBound, PartialEqNoBound, RuntimeDebugNoBound, MaxEncodedLen, TypeInfo,
)]
#[scale_info(skip_type_params(M))]
#[codec(mel_bound(SourceId: MaxEncodedLen))]
pub struct Message<SourceId, M: Get<u32>>
where
SourceId: Parameter + Member + MaxEncodedLen,
{
pub struct Message<M: Get<u32>> {
/// ID of source parachain
source_id: SourceId,
origin: ParaId,
/// Unique nonce to prevent replaying messages
#[codec(compact)]
nonce: u64,
/// Handler to dispatch the message to
handler: u16,
/// Payload for target application.
payload: BoundedVec<u8, M>,
}

impl<SourceId, M: Get<u32>> Into<Token> for Message<SourceId, M>
where
SourceId: Decode + Parameter + Member + MaxEncodedLen, //+ TypeInfo,
{
impl<M: Get<u32>> Into<Token> for Message<M> {
fn into(self) -> Token {
Token::Tuple(vec![
Token::Bytes(self.source_id.encode()),
Token::Uint(u32::from(self.origin).into()),
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Had to add an explicit conversion here because Uint has a From<u32> but no From<polkadot_parachain::primitives::Id>

Token::Uint(self.nonce.into()),
Token::Uint(self.handler.into()),
Token::Bytes(self.payload.to_vec()),
])
}
Expand Down Expand Up @@ -80,9 +77,6 @@ pub mod pallet {

type Hashing: Hash<Output = H256>;

/// ID of message source
type SourceId: Parameter + Member + PartialEq + MaxEncodedLen;

/// Max bytes in a message payload
#[pallet::constant]
type MaxMessagePayloadSize: Get<u32>;
Expand All @@ -99,7 +93,7 @@ pub mod pallet {
#[pallet::generate_deposit(pub(super) fn deposit_event)]
pub enum Event<T: Config> {
MessageAccepted(u64),
Committed { hash: H256, data: Vec<Message<T::SourceId, T::MaxMessagePayloadSize>> },
Committed { hash: H256, data: Vec<Message<T::MaxMessagePayloadSize>> },
}

#[pallet::error]
Expand All @@ -121,12 +115,12 @@ pub mod pallet {
#[pallet::storage]
pub(super) type MessageQueue<T: Config> = StorageValue<
_,
BoundedVec<Message<T::SourceId, T::MaxMessagePayloadSize>, T::MaxMessagesPerCommit>,
BoundedVec<Message<T::MaxMessagePayloadSize>, T::MaxMessagesPerCommit>,
ValueQuery,
>;

#[pallet::storage]
pub type Nonce<T: Config> = StorageMap<_, Twox64Concat, T::SourceId, u64, ValueQuery>;
pub type Nonce<T: Config> = StorageMap<_, Twox64Concat, ParaId, u64, ValueQuery>;

#[pallet::genesis_config]
pub struct GenesisConfig<T: Config> {
Expand Down Expand Up @@ -169,7 +163,7 @@ pub mod pallet {

impl<T: Config> Pallet<T> {
/// Submit message on the outbound channel
pub fn submit(source_id: &T::SourceId, payload: &[u8]) -> DispatchResult {
pub fn submit(origin: &ParaId, handler: u16, payload: &[u8]) -> DispatchResult {
ensure!(
<MessageQueue<T>>::decode_len().unwrap_or(0) <
T::MaxMessagesPerCommit::get() as usize,
Expand All @@ -178,18 +172,19 @@ pub mod pallet {

let message_payload =
payload.to_vec().try_into().map_err(|_| Error::<T>::PayloadTooLarge)?;
let nonce = <Nonce<T>>::get(source_id);
let nonce = <Nonce<T>>::get(origin);
let next_nonce = nonce.checked_add(1).ok_or(Error::<T>::Overflow)?;

<MessageQueue<T>>::try_append(Message {
source_id: source_id.clone(),
origin: origin.clone(),
nonce,
handler,
payload: message_payload,
})
.map_err(|_| Error::<T>::QueueSizeLimitReached)?;
Self::deposit_event(Event::MessageAccepted(nonce));

<Nonce<T>>::set(source_id, next_nonce);
<Nonce<T>>::set(origin, next_nonce);

Ok(())
}
Expand All @@ -210,7 +205,7 @@ pub mod pallet {
return T::WeightInfo::on_commit_no_messages()
}

// Store these for the on_commit call at the end
// Store these to return the on_commit weight
let message_count = message_queue.len() as u32;
let average_payload_size = Self::average_payload_size(&message_queue);

Expand All @@ -236,9 +231,7 @@ pub mod pallet {
return T::WeightInfo::on_commit(message_count, average_payload_size)
}

fn average_payload_size(
messages: &[Message<T::SourceId, T::MaxMessagePayloadSize>],
) -> u32 {
fn average_payload_size(messages: &[Message<T::MaxMessagePayloadSize>]) -> u32 {
let sum: usize = messages.iter().fold(0, |acc, x| acc + (*x).payload.len());
// We overestimate message payload size rather than underestimate.
// So add 1 here to account for integer division truncation.
Expand Down
37 changes: 18 additions & 19 deletions parachain/pallets/outbound-queue/src/test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@ use frame_support::{
assert_noop, assert_ok, parameter_types,
traits::{Everything, GenesisBuild, OnInitialize},
};
use polkadot_parachain::primitives::Id as ParaId;
use sp_core::H256;
use sp_keyring::AccountKeyring as Keyring;
use sp_runtime::{
testing::Header,
traits::{BlakeTwo256, IdentifyAccount, IdentityLookup, Keccak256, Verify},
Expand Down Expand Up @@ -69,7 +69,6 @@ parameter_types! {
}

impl outbound_channel::Config for Test {
type SourceId = AccountId;
type RuntimeEvent = RuntimeEvent;
type Hashing = Keccak256;
type MaxMessagePayloadSize = MaxMessagePayloadSize;
Expand Down Expand Up @@ -101,26 +100,26 @@ fn run_to_block(n: u64) {
#[test]
fn test_submit() {
new_tester().execute_with(|| {
let source_id: &AccountId = &Keyring::Bob.into();
let parachain_id: &ParaId = &ParaId::new(1000);

assert_ok!(BasicOutboundChannel::submit(source_id, &vec![0, 1, 2]));
assert_ok!(BasicOutboundChannel::submit(parachain_id, 0, &vec![0, 1, 2]));

assert_eq!(<Nonce<Test>>::get(source_id), 1);
assert_eq!(<Nonce<Test>>::get(parachain_id), 1);
assert_eq!(<MessageQueue<Test>>::get().len(), 1);
});
}

#[test]
fn test_submit_exceeds_queue_limit() {
new_tester().execute_with(|| {
let source_id: &AccountId = &Keyring::Bob.into();
let parachain_id: &ParaId = &ParaId::new(1000);

let max_messages = MaxMessagesPerCommit::get();
(0..max_messages)
.for_each(|_| BasicOutboundChannel::submit(source_id, &vec![0, 1, 2]).unwrap());
.for_each(|_| BasicOutboundChannel::submit(parachain_id, 0, &vec![0, 1, 2]).unwrap());

assert_noop!(
BasicOutboundChannel::submit(source_id, &vec![0, 1, 2]),
BasicOutboundChannel::submit(parachain_id, 0, &vec![0, 1, 2]),
Error::<Test>::QueueSizeLimitReached,
);
})
Expand All @@ -129,7 +128,7 @@ fn test_submit_exceeds_queue_limit() {
#[test]
fn test_submit_exceeds_payload_limit() {
new_tester().execute_with(|| {
let source_id: &AccountId = &Keyring::Bob.into();
let parachain_id: &ParaId = &ParaId::new(1000);

let max_payload_bytes = MaxMessagePayloadSize::get() - 1;

Expand All @@ -139,7 +138,7 @@ fn test_submit_exceeds_payload_limit() {
payload.push(10);

assert_noop!(
BasicOutboundChannel::submit(source_id, payload.as_slice()),
BasicOutboundChannel::submit(parachain_id, 0, payload.as_slice()),
Error::<Test>::PayloadTooLarge,
);
})
Expand All @@ -148,30 +147,30 @@ fn test_submit_exceeds_payload_limit() {
#[test]
fn test_commit_single_user() {
new_tester().execute_with(|| {
let source_id: &AccountId = &Keyring::Bob.into();
let parachain_id: &ParaId = &ParaId::new(1000);

assert_ok!(BasicOutboundChannel::submit(source_id, &vec![0, 1, 2]));
assert_ok!(BasicOutboundChannel::submit(parachain_id, 0, &vec![0, 1, 2]));
run_to_block(2);
BasicOutboundChannel::commit(Weight::MAX);

assert_eq!(<Nonce<Test>>::get(source_id), 1);
assert_eq!(<Nonce<Test>>::get(parachain_id), 1);
assert_eq!(<MessageQueue<Test>>::get().len(), 0);
})
}

#[test]
fn test_commit_multi_user() {
new_tester().execute_with(|| {
let alice: &AccountId = &Keyring::Alice.into();
let bob: &AccountId = &Keyring::Bob.into();
let parachain0: &ParaId = &ParaId::new(1000);
let parachain1: &ParaId = &ParaId::new(1001);

assert_ok!(BasicOutboundChannel::submit(alice, &vec![0, 1, 2]));
assert_ok!(BasicOutboundChannel::submit(bob, &vec![0, 1, 2]));
assert_ok!(BasicOutboundChannel::submit(parachain0, 0, &vec![0, 1, 2]));
assert_ok!(BasicOutboundChannel::submit(parachain1, 0, &vec![0, 1, 2]));
run_to_block(2);
BasicOutboundChannel::commit(Weight::MAX);

assert_eq!(<Nonce<Test>>::get(alice), 1);
assert_eq!(<Nonce<Test>>::get(bob), 1);
assert_eq!(<Nonce<Test>>::get(parachain0), 1);
assert_eq!(<Nonce<Test>>::get(parachain1), 1);
assert_eq!(<MessageQueue<Test>>::get().len(), 0);
})
}
Loading