Skip to content
This repository was archived by the owner on Nov 15, 2023. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
e4364e3
First experiments with Sassafras equivocations report
davxy Sep 12, 2022
b267418
Preparation for sassafras client tests
davxy Sep 20, 2022
1292e86
Cleanup and first working client test with multiple peers
davxy Sep 23, 2022
d0c8bc2
Merge branch 'davxy-sassafras-protocol' into davxy/sassafras-protocol…
davxy Sep 24, 2022
0a20351
Dummy commit
davxy Sep 24, 2022
326ac44
Conflicts resolution
davxy Sep 24, 2022
5b43f28
Test code refactory
davxy Sep 27, 2022
2dd6086
Better submit-tickets extrinsic tag
davxy Sep 27, 2022
829b90b
Refactory of client tests
davxy Oct 6, 2022
a2d66b6
Aux data revert implementation
davxy Oct 6, 2022
ca4b563
Handle skipped epochs on block-import
davxy Oct 7, 2022
e7be289
Skipped epoch test and fix
davxy Oct 11, 2022
7690a5c
Fix to epoch start slot computation
davxy Oct 14, 2022
6c20538
Minor tweaks
davxy Oct 17, 2022
2361064
Trivial comments refactory
davxy Oct 24, 2022
a26a31e
Do not alter original epoch changes node on epoch skip
davxy Oct 24, 2022
f3ebc2b
Insert tickets aux data after block import
davxy Oct 24, 2022
84bfdff
Tests environment refactory
davxy Oct 24, 2022
cfe639e
Use in-memory keystore for tests
davxy Oct 24, 2022
51e81a2
Push lock file
davxy Oct 24, 2022
04e92f6
Use test accounts keyring
davxy Oct 24, 2022
193134a
Test for secondary slots claims
davxy Oct 24, 2022
8bd8aed
Improved tests after epoch changes tree fix
davxy Oct 25, 2022
76c24bd
Tests for blocks verification
davxy Oct 29, 2022
950020b
Next epoch tickets incremental sort
davxy Oct 29, 2022
94e9ee8
Incremental sortition test
davxy Oct 29, 2022
d1a7edd
Set proper tickets tx longevity
davxy Oct 29, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Aux data revert implementation
  • Loading branch information
davxy committed Oct 6, 2022
commit a2d66b624e2813ff1e57dc361319236bef42c49e
3 changes: 2 additions & 1 deletion bin/node-sassafras/node/src/command.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,8 @@ pub fn run() -> sc_cli::Result<()> {
runner.async_run(|config| {
let PartialComponents { client, task_manager, backend, .. } =
service::new_partial(&config)?;
let aux_revert = Box::new(|client, _, blocks| {
let aux_revert = Box::new(|client, backend, blocks| {
sc_consensus_sassafras::revert(backend, blocks)?;
sc_finality_grandpa::revert(client, blocks)?;
Ok(())
});
Expand Down
75 changes: 73 additions & 2 deletions client/consensus/sassafras/src/aux_schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,20 @@
// along with this program. If not, see <https://www.gnu.org/licenses/>.

//! Schema for auxiliary data persistence.
//!
//! TODO-SASS-P2 : RENAME FROM aux_schema.rs => aux_data.rs

use std::{collections::HashSet, sync::Arc};

use scale_codec::{Decode, Encode};

use sc_client_api::backend::AuxStore;
use sc_consensus_epochs::{EpochChangesFor, SharedEpochChanges};
use sp_blockchain::{Error as ClientError, Result as ClientResult};

use sc_client_api::{blockchain::Backend as _, Backend as BackendT};
use sp_blockchain::{Error as ClientError, HeaderBackend, HeaderMetadata, Result as ClientResult};
use sp_consensus_sassafras::SassafrasBlockWeight;
use sp_runtime::traits::Block as BlockT;
use sp_runtime::traits::{Block as BlockT, NumberFor, SaturatedConversion, Zero};

use crate::Epoch;

Expand Down Expand Up @@ -99,3 +105,68 @@ pub fn load_block_weight<H: Encode, B: AuxStore>(
) -> ClientResult<Option<SassafrasBlockWeight>> {
load_decode(backend, block_weight_key(block_hash).as_slice())
}

/// Reverts protocol aux data from the best block to at most the last finalized block.
///
/// Epoch-changes and block weights announced after the revert point are removed.
pub fn revert<Block, Backend>(backend: Arc<Backend>, blocks: NumberFor<Block>) -> ClientResult<()>
where
Block: BlockT,
Backend: BackendT<Block>,
{
let blockchain = backend.blockchain();
let best_number = blockchain.info().best_number;
let finalized = blockchain.info().finalized_number;

let revertible = blocks.min(best_number - finalized);
if revertible == Zero::zero() {
return Ok(())
}

let revert_up_to_number = best_number - revertible;
let revert_up_to_hash = blockchain.hash(revert_up_to_number)?.ok_or(ClientError::Backend(
format!("Unexpected hash lookup failure for block number: {}", revert_up_to_number),
))?;

// Revert epoch changes tree.

// This config is only used on-genesis.
let epoch_changes = load_epoch_changes::<Block, _>(&*backend)?;
let mut epoch_changes = epoch_changes.shared_data();

if revert_up_to_number == Zero::zero() {
// Special case, no epoch changes data were present on genesis.
*epoch_changes = EpochChangesFor::<Block, Epoch>::new();
} else {
let descendent_query = sc_consensus_epochs::descendent_query(blockchain);
epoch_changes.revert(descendent_query, revert_up_to_hash, revert_up_to_number);
}

// Remove block weights added after the revert point.

let mut weight_keys = HashSet::with_capacity(revertible.saturated_into());

let leaves = backend.blockchain().leaves()?.into_iter().filter(|&leaf| {
sp_blockchain::tree_route(blockchain, revert_up_to_hash, leaf)
.map(|route| route.retracted().is_empty())
.unwrap_or_default()
});

for mut hash in leaves {
loop {
let meta = blockchain.header_metadata(hash)?;
if meta.number <= revert_up_to_number || !weight_keys.insert(block_weight_key(hash)) {
// We've reached the revert point or an already processed branch, stop here.
break
}
hash = meta.parent;
}
}

let weight_keys: Vec<_> = weight_keys.iter().map(|val| val.as_slice()).collect();

// Write epoch changes and remove weights in one shot.
write_epoch_changes::<Block, _, _>(&epoch_changes, |values| {
AuxStore::insert_aux(&*backend, values, weight_keys.iter())
})
}
2 changes: 2 additions & 0 deletions client/consensus/sassafras/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,9 @@ mod block_import;
mod tests;
mod verification;

// Export core components.
pub use authorship::{start_sassafras, SassafrasParams, SassafrasWorker};
pub use aux_schema::revert;
pub use block_import::{block_import, SassafrasBlockImport};
pub use verification::SassafrasVerifier;

Expand Down
25 changes: 7 additions & 18 deletions client/consensus/sassafras/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,10 @@
use super::*;

use futures::executor::block_on;
use std::{cell::RefCell, sync::Arc};
use std::sync::Arc;

use sc_block_builder::{BlockBuilder, BlockBuilderProvider};
use sc_client_api::TransactionFor;
use sc_consensus::{BlockImport, BoxBlockImport, BoxJustificationImport};
use sc_block_builder::BlockBuilderProvider;
use sc_consensus::{BlockImport, BoxJustificationImport};
use sc_keystore::LocalKeystore;
use sc_network_test::*;
use sp_application_crypto::key_types::SASSAFRAS;
Expand Down Expand Up @@ -188,13 +187,8 @@ impl TestEnvironment {

// Propose and import a new Sassafras block on top of the given parent.
// This skips verification.
fn propose_and_import_block(
//<Transaction: Send + 'static>(
&mut self,
parent_id: BlockId,
slot: Option<Slot>,
) -> Hash {
let mut parent = self.client.header(&parent_id).unwrap().unwrap();
fn propose_and_import_block(&mut self, parent_id: BlockId, slot: Option<Slot>) -> Hash {
let parent = self.client.header(&parent_id).unwrap().unwrap();

let proposer = block_on(self.init(&parent)).unwrap();

Expand Down Expand Up @@ -269,12 +263,7 @@ impl TestEnvironment {
// Propose and import n valid Sassafras blocks that are built on top of the given parent.
// The proposer takes care of producing epoch change digests according to the epoch
// duration (which is set to 6 slots in the test runtime).
fn propose_and_import_blocks(
//)<Transaction: Send + 'static>(
&mut self,
mut parent_id: BlockId,
n: usize,
) -> Vec<Hash> {
fn propose_and_import_blocks(&mut self, mut parent_id: BlockId, n: usize) -> Vec<Hash> {
let mut hashes = Vec::with_capacity(n);

for _ in 0..n {
Expand Down Expand Up @@ -382,7 +371,7 @@ fn revert_prunes_epoch_changes_and_removes_weights() {
assert_eq!(epoch_changes.shared_data().tree().roots().count(), 1);

// Revert canon chain to block #10 (best(21) - 11)
// crate::revert(client.clone(), backend, 11).unwrap();
crate::aux_schema::revert(backend, 11).unwrap();

// Load and check epoch changes.

Expand Down