Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
a0a8dc0
Attempt to point to new dependencies. Weird compile errors. Will revi…
JoshOrndorff May 3, 2021
b37ecb1
geting closer
JoshOrndorff May 4, 2021
0a04b63
More updates to Cargo.lock. Back to weird compile errors.
JoshOrndorff May 4, 2021
c6c3ac8
patch author inherent (even though it will be pruned soon)
JoshOrndorff May 4, 2021
ac7405c
EVM to right version
JoshOrndorff May 4, 2021
c2d1699
Temporarily disable author inherent, to work around some errors.
JoshOrndorff May 4, 2021
f0d2dcf
change scale codec dep made a difference
JoshOrndorff May 4, 2021
345efc2
Moar progress (reenable author inherent)
JoshOrndorff May 4, 2021
8c03178
tiny comment improvement
JoshOrndorff May 4, 2021
c2ef3ec
Fix pallet mocks
JoshOrndorff May 4, 2021
cb4697d
Fix long-standing sudo omission - no noticeable differente though. weird
JoshOrndorff May 4, 2021
4467989
It was ParachainSystem that needed Event<T> Thanks @thiolliere
JoshOrndorff May 4, 2021
432ad4d
runtime compiles
JoshOrndorff May 4, 2021
0201056
everything compiles (still some cleanup)
JoshOrndorff May 4, 2021
7a22bfb
Author inherent from nimbus
JoshOrndorff May 4, 2021
114d10d
bump cumulonimbus
JoshOrndorff May 4, 2021
a43fb57
🎵 bumpity-bump-bump ⛄
JoshOrndorff May 4, 2021
c76f23f
remove rand hack
JoshOrndorff May 4, 2021
66f9093
Author slot filter from nimbus
JoshOrndorff May 5, 2021
b5d8c5e
Oops, update client-side too
JoshOrndorff May 5, 2021
5da1a77
prune dead code and dealias
JoshOrndorff May 5, 2021
7a6b95e
Bump some versions and update types so we don't get confused.
JoshOrndorff May 5, 2021
acb46f0
Implement runtime api (but don't call it yet)
JoshOrndorff May 5, 2021
818eba1
s/author_inherent/pallet_author_inherent/
notlesh May 5, 2021
cae4326
checkpoint: nimbus poc worker.
JoshOrndorff May 5, 2021
a445342
finish temporary nimbus poc worker (got some wacky naming going on th…
JoshOrndorff May 5, 2021
4b77cd4
Hack command.rs to work with polkadot launch
JoshOrndorff May 5, 2021
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1,801 changes: 1,105 additions & 696 deletions Cargo.lock

Large diffs are not rendered by default.

5 changes: 1 addition & 4 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,8 +1,5 @@
[workspace]
members = [
'runtime',
'node',
]
members = ['runtime', 'node']

[profile.release]
panic = 'unwind'
35 changes: 35 additions & 0 deletions client/nimbus-poc/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
[package]
name = "nimbus-poc"
description = "Temporary solution to make slot prediction work before wiring in keystore"
version = "0.1.0"
edition = "2018"

[dependencies]
# Substrate deps
sp-consensus = { git = "https://github.com/paritytech/substrate", branch = "rococo-v1" }
sp-inherents = { git = "https://github.com/paritytech/substrate", branch = "rococo-v1" }
sp-runtime = { git = "https://github.com/paritytech/substrate", branch = "rococo-v1" }
sp-core = { git = "https://github.com/paritytech/substrate", branch = "rococo-v1" }
sp-blockchain = { git = "https://github.com/paritytech/substrate", branch = "rococo-v1" }
sp-block-builder = { git = "https://github.com/paritytech/substrate", branch = "rococo-v1" }
sp-api = { git = "https://github.com/paritytech/substrate", branch = "rococo-v1" }
sc-client-api = { git = "https://github.com/paritytech/substrate", branch = "rococo-v1" }
substrate-prometheus-endpoint = { git = "https://github.com/paritytech/substrate", branch = "rococo-v1" }
sp-keystore = { git = "https://github.com/paritytech/substrate", branch = "rococo-v1" }

# Polkadot dependencies
polkadot-service = { git = "https://github.com/paritytech/polkadot", branch = "rococo-v1" }

# Cumulus dependencies
author-filter-api = { git = "https://github.com/purestake/cumulus", branch = "nimbus" }
cumulus-client-consensus-common = { git = "https://github.com/purestake/cumulus", branch = "nimbus" }
cumulus-primitives-core = { git = "https://github.com/purestake/cumulus", branch = "nimbus" }
cumulus-primitives-parachain-inherent = { git = "https://github.com/purestake/cumulus", branch = "nimbus" }

# Other deps
futures = { version = "0.3.8", features = ["compat"] }
codec = { package = "parity-scale-codec", version = "2.0.0", features = ["derive"] }
tracing = "0.1.22"
async-trait = "0.1.42"
parking_lot = "0.9"
log = "0.4"
158 changes: 158 additions & 0 deletions client/nimbus-poc/src/import_queue.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,158 @@
// Copyright 2019 Parity Technologies (UK) Ltd.
// This file is part of Cumulus.

// Cumulus is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.

// Cumulus is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.

// You should have received a copy of the GNU General Public License
// along with Cumulus. If not, see <http://www.gnu.org/licenses/>.

use std::{marker::PhantomData, sync::Arc};

use sp_api::ProvideRuntimeApi;
use sp_block_builder::BlockBuilder as BlockBuilderApi;
use sp_blockchain::Result as ClientResult;
use sp_consensus::{
error::Error as ConsensusError,
import_queue::{BasicQueue, CacheKeyId, Verifier as VerifierT},
BlockImport, BlockImportParams, BlockOrigin, ForkChoiceStrategy,
};
use sp_inherents::InherentDataProviders;
use sp_runtime::{
generic::BlockId,
traits::{Block as BlockT, Header as HeaderT},
Justifications,
};
use log::debug;

/// A verifier that checks the inherents and
/// TODO compares two digests. The first comes from the runtime which contains the author inherent data
/// the second will, in the future be a signature, but for now is just inserted at seal time by
// the consensus engine to mock this stuff out.
struct Verifier<Client, Block> {
client: Arc<Client>,
inherent_data_providers: InherentDataProviders,
_marker: PhantomData<Block>,
}

#[async_trait::async_trait]
impl<Client, Block> VerifierT<Block> for Verifier<Client, Block>
where
Block: BlockT,
Client: ProvideRuntimeApi<Block> + Send + Sync,
<Client as ProvideRuntimeApi<Block>>::Api: BlockBuilderApi<Block>,
{
async fn verify(
&mut self,
origin: BlockOrigin,
mut header: Block::Header,
justifications: Option<Justifications>,
mut body: Option<Vec<Block::Extrinsic>>,
) -> Result<
(
BlockImportParams<Block, ()>,
Option<Vec<(CacheKeyId, Vec<u8>)>>,
),
String,
> {

debug!(target: crate::LOG_TARGET, "🪲 Header hash before popping digest {:?}", header.hash());
// Grab the digest from the seal
// Even though we do literally nothing with it, we can go ahead and pop it off already
let seal = match header.digest_mut().pop() {
Some(x) => x,
None => return Err("HeaderUnsealed".into()),
};

debug!(target: crate::LOG_TARGET, "🪲 Header hash after popping digest {:?}", header.hash());
//let signing_author = seal...

//Grab the digest from the runtime
//let claimed_author = header.digest.logs.find(...)

// if signing_author != claimed_author {
// // TODO actually verify the signature
// reutrn Err("Invalid signature")
// }

// This part copied from Basti. I guess this is the inherent checking.
if let Some(inner_body) = body.take() {
let inherent_data = self
.inherent_data_providers
.create_inherent_data()
.map_err(|e| e.into_string())?;

let block = Block::new(header.clone(), inner_body);

let inherent_res = self
.client
.runtime_api()
.check_inherents(
&BlockId::Hash(*header.parent_hash()),
block.clone(),
inherent_data,
)
.map_err(|e| format!("{:?}", e))?;

if !inherent_res.ok() {
inherent_res.into_errors().try_for_each(|(i, e)| {
Err(self.inherent_data_providers.error_to_string(&i, &e))
})?;
}

let (_, inner_body) = block.deconstruct();
body = Some(inner_body);
}

let mut block_import_params = BlockImportParams::new(origin, header);
block_import_params.post_digests.push(seal);
block_import_params.body = body;
block_import_params.justifications = justifications;

// Best block is determined by the relay chain, or if we are doing the intial sync
// we import all blocks as new best.
block_import_params.fork_choice = Some(ForkChoiceStrategy::Custom(
origin == BlockOrigin::NetworkInitialSync,
));

debug!(target: crate::LOG_TARGET, "🪲 Just finished verifier. posthash from params is {:?}", &block_import_params.post_hash());

Ok((block_import_params, None))
}
}

/// Start an import queue for a Cumulus collator that does not uses any special authoring logic.
pub fn import_queue<Client, Block: BlockT, I>(
client: Arc<Client>,
block_import: I,
inherent_data_providers: InherentDataProviders,
spawner: &impl sp_core::traits::SpawnEssentialNamed,
registry: Option<&substrate_prometheus_endpoint::Registry>,
) -> ClientResult<BasicQueue<Block, I::Transaction>>
where
I: BlockImport<Block, Error = ConsensusError> + Send + Sync + 'static,
I::Transaction: Send,
Client: ProvideRuntimeApi<Block> + Send + Sync + 'static,
<Client as ProvideRuntimeApi<Block>>::Api: BlockBuilderApi<Block>,
{
let verifier = Verifier {
client,
inherent_data_providers,
_marker: PhantomData,
};

Ok(BasicQueue::new(
verifier,
Box::new(block_import),
None,
spawner,
registry,
))
}
Loading