Skip to content
This repository was archived by the owner on Nov 15, 2023. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
BlockId removal: refactor: HeaderBackend::header
It changes the arguments of:
- `HeaderBackend::header`,
- `Client::header`,
- `PeersClient::header`
- `ChainApi::block_header`

methods from: `BlockId<Block>` to: `Block::Hash`

This PR is part of BlockId::Number refactoring analysis (paritytech/substrate#11292)
  • Loading branch information
michalkucharczyk committed Dec 8, 2022
commit 4f484d7a0747ea0af8e36d1d2e577d7a20e02d4b
2 changes: 1 addition & 1 deletion bin/node/bench/src/construct.rs
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ impl core::Benchmark for ConstructionBenchmark {
proposer_factory.init(
&context
.client
.header(&BlockId::number(0))
.header(context.client.chain_info().genesis_hash)
.expect("Database error querying block #0")
.expect("Block #0 should exist"),
),
Expand Down
5 changes: 2 additions & 3 deletions bin/node/cli/src/service.rs
Original file line number Diff line number Diff line change
Expand Up @@ -640,9 +640,8 @@ mod tests {
Ok((node, setup_handles.unwrap()))
},
|service, &mut (ref mut block_import, ref babe_link)| {
let parent_id = BlockId::number(service.client().chain_info().best_number);
let parent_header = service.client().header(&parent_id).unwrap().unwrap();
let parent_hash = parent_header.hash();
let parent_hash = service.client().chain_info().best_hash;
let parent_header = service.client().header(parent_hash).unwrap().unwrap();
let parent_number = *parent_header.number();

futures::executor::block_on(service.transaction_pool().maintain(
Expand Down
7 changes: 3 additions & 4 deletions bin/node/inspect/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -147,18 +147,17 @@ impl<TBlock: Block, TPrinter: PrettyPrinter<TBlock>> Inspector<TBlock, TPrinter>
.block_body(hash)?
.ok_or_else(|| Error::NotFound(not_found.clone()))?;
let header =
self.chain.header(id)?.ok_or_else(|| Error::NotFound(not_found.clone()))?;
self.chain.header(hash)?.ok_or_else(|| Error::NotFound(not_found.clone()))?;
TBlock::new(header, body)
},
BlockAddress::Hash(hash) => {
let id = BlockId::hash(hash);
let not_found = format!("Could not find block {:?}", id);
let not_found = format!("Could not find block {:?}", BlockId::<TBlock>::Hash(hash));
let body = self
.chain
.block_body(hash)?
.ok_or_else(|| Error::NotFound(not_found.clone()))?;
let header =
self.chain.header(id)?.ok_or_else(|| Error::NotFound(not_found.clone()))?;
self.chain.header(hash)?.ok_or_else(|| Error::NotFound(not_found.clone()))?;
TBlock::new(header, body)
},
})
Expand Down
10 changes: 4 additions & 6 deletions client/api/src/in_mem.rs
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@ impl<Block: BlockT> Blockchain<Block> {
/// Set an existing block as head.
pub fn set_head(&self, hash: Block::Hash) -> sp_blockchain::Result<()> {
let header = self
.header(BlockId::Hash(hash))?
.header(hash)?
.ok_or_else(|| sp_blockchain::Error::UnknownBlock(format!("{}", hash)))?;

self.apply_head(&header)
Expand Down Expand Up @@ -336,11 +336,9 @@ impl<Block: BlockT> Blockchain<Block> {
impl<Block: BlockT> HeaderBackend<Block> for Blockchain<Block> {
fn header(
&self,
id: BlockId<Block>,
hash: Block::Hash,
) -> sp_blockchain::Result<Option<<Block as BlockT>::Header>> {
Ok(self
.id(id)
.and_then(|hash| self.storage.read().blocks.get(&hash).map(|b| b.header().clone())))
Ok(self.storage.read().blocks.get(&hash).map(|b| b.header().clone()))
}

fn info(&self) -> blockchain::Info<Block> {
Expand Down Expand Up @@ -387,7 +385,7 @@ impl<Block: BlockT> HeaderMetadata<Block> for Blockchain<Block> {
&self,
hash: Block::Hash,
) -> Result<CachedHeaderMetadata<Block>, Self::Error> {
self.header(BlockId::hash(hash))?
self.header(hash)?
.map(|header| CachedHeaderMetadata::from(&header))
.ok_or_else(|| {
sp_blockchain::Error::UnknownBlock(format!("header not found: {}", hash))
Expand Down
2 changes: 1 addition & 1 deletion client/authority-discovery/src/worker/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ impl ProvideRuntimeApi<Block> for TestApi {
impl<Block: BlockT> HeaderBackend<Block> for TestApi {
fn header(
&self,
_id: BlockId<Block>,
_hash: Block::Hash,
) -> std::result::Result<Option<Block::Header>, sp_blockchain::Error> {
Ok(None)
}
Expand Down
36 changes: 14 additions & 22 deletions client/basic-authorship/src/basic_authorship.rs
Original file line number Diff line number Diff line change
Expand Up @@ -617,8 +617,7 @@ mod tests {
block_on(
txpool.maintain(chain_event(
client
.header(&BlockId::Number(0u64))
.expect("header get error")
.expect_header(BlockId::Number(0u64))
.expect("there should be header"),
)),
);
Expand All @@ -628,7 +627,7 @@ mod tests {

let cell = Mutex::new((false, time::Instant::now()));
let proposer = proposer_factory.init_with_now(
&client.header(&BlockId::number(0)).unwrap().unwrap(),
&client.expect_header(BlockId::number(0)).unwrap(),
Box::new(move || {
let mut value = cell.lock();
if !value.0 {
Expand Down Expand Up @@ -672,7 +671,7 @@ mod tests {

let cell = Mutex::new((false, time::Instant::now()));
let proposer = proposer_factory.init_with_now(
&client.header(&BlockId::number(0)).unwrap().unwrap(),
&client.expect_header(BlockId::number(0)).unwrap(),
Box::new(move || {
let mut value = cell.lock();
if !value.0 {
Expand Down Expand Up @@ -705,15 +704,13 @@ mod tests {
);

let genesis_hash = client.info().best_hash;
let block_id = BlockId::Hash(genesis_hash);

block_on(txpool.submit_at(&BlockId::number(0), SOURCE, vec![extrinsic(0)])).unwrap();

block_on(
txpool.maintain(chain_event(
client
.header(&BlockId::Number(0u64))
.expect("header get error")
.expect_header(BlockId::Number(0u64))
.expect("there should be header"),
)),
);
Expand All @@ -722,7 +719,7 @@ mod tests {
ProposerFactory::new(spawner.clone(), client.clone(), txpool.clone(), None, None);

let proposer = proposer_factory.init_with_now(
&client.header(&block_id).unwrap().unwrap(),
&client.header(genesis_hash).unwrap().unwrap(),
Box::new(move || time::Instant::now()),
);

Expand All @@ -734,7 +731,7 @@ mod tests {
assert_eq!(proposal.block.extrinsics().len(), 1);

let api = client.runtime_api();
api.execute_block(&block_id, proposal.block).unwrap();
api.execute_block(&BlockId::Hash(genesis_hash), proposal.block).unwrap();

let state = backend.state_at(genesis_hash).unwrap();

Expand Down Expand Up @@ -791,7 +788,7 @@ mod tests {
expected_block_extrinsics,
expected_pool_transactions| {
let proposer = proposer_factory.init_with_now(
&client.header(&BlockId::number(number)).unwrap().unwrap(),
&client.expect_header(BlockId::number(number)).unwrap(),
Box::new(move || time::Instant::now()),
);

Expand All @@ -813,8 +810,7 @@ mod tests {
block_on(
txpool.maintain(chain_event(
client
.header(&BlockId::Number(0u64))
.expect("header get error")
.expect_header(BlockId::Number(0u64))
.expect("there should be header"),
)),
);
Expand All @@ -827,8 +823,7 @@ mod tests {
block_on(
txpool.maintain(chain_event(
client
.header(&BlockId::Number(1))
.expect("header get error")
.expect_header(BlockId::Number(1))
.expect("there should be header"),
)),
);
Expand All @@ -851,8 +846,7 @@ mod tests {
client.clone(),
);
let genesis_header = client
.header(&BlockId::Number(0u64))
.expect("header get error")
.expect_header(BlockId::Number(0u64))
.expect("there should be header");

let extrinsics_num = 5;
Expand Down Expand Up @@ -966,8 +960,7 @@ mod tests {
block_on(
txpool.maintain(chain_event(
client
.header(&BlockId::Number(0u64))
.expect("header get error")
.expect_header(BlockId::Number(0u64))
.expect("there should be header"),
)),
);
Expand All @@ -978,7 +971,7 @@ mod tests {

let cell = Mutex::new(time::Instant::now());
let proposer = proposer_factory.init_with_now(
&client.header(&BlockId::number(0)).unwrap().unwrap(),
&client.expect_header(BlockId::number(0)).unwrap(),
Box::new(move || {
let mut value = cell.lock();
let old = *value;
Expand Down Expand Up @@ -1029,8 +1022,7 @@ mod tests {
block_on(
txpool.maintain(chain_event(
client
.header(&BlockId::Number(0u64))
.expect("header get error")
.expect_header(BlockId::Number(0u64))
.expect("there should be header"),
)),
);
Expand All @@ -1043,7 +1035,7 @@ mod tests {
let cell = Arc::new(Mutex::new((0, time::Instant::now())));
let cell2 = cell.clone();
let proposer = proposer_factory.init_with_now(
&client.header(&BlockId::number(0)).unwrap().unwrap(),
&client.expect_header(BlockId::number(0)).unwrap(),
Box::new(move || {
let mut value = cell.lock();
let (called, old) = *value;
Expand Down
4 changes: 2 additions & 2 deletions client/consensus/aura/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -967,7 +967,7 @@ mod tests {
compatibility_mode: Default::default(),
};

let head = client.header(&BlockId::Number(0)).unwrap().unwrap();
let head = client.expect_header(BlockId::Number(0)).unwrap();

let res = runtime
.block_on(worker.on_slot(SlotInfo {
Expand All @@ -981,6 +981,6 @@ mod tests {
.unwrap();

// The returned block should be imported and we should be able to get its header by now.
assert!(client.header(&BlockId::Hash(res.block.hash())).unwrap().is_some());
assert!(client.header(res.block.hash()).unwrap().is_some());
}
}
4 changes: 2 additions & 2 deletions client/consensus/babe/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1417,7 +1417,7 @@ where
let parent_hash = *block.header.parent_hash();
let parent_header = self
.client
.header(BlockId::Hash(parent_hash))
.header(parent_hash)
.map_err(|e| ConsensusError::ChainLookup(e.to_string()))?
.ok_or_else(|| {
ConsensusError::ChainLookup(
Expand Down Expand Up @@ -1654,7 +1654,7 @@ where

let finalized_slot = {
let finalized_header = client
.header(BlockId::Hash(info.finalized_hash))
.header(info.finalized_hash)
.map_err(|e| ConsensusError::ClientImport(e.to_string()))?
.expect(
"best finalized hash was given by client; finalized headers must exist in db; qed",
Expand Down
40 changes: 20 additions & 20 deletions client/consensus/babe/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -678,18 +678,18 @@ fn propose_and_import_blocks<Transaction: Send + 'static>(
client: &PeersFullClient,
proposer_factory: &mut DummyFactory,
block_import: &mut BoxBlockImport<TestBlock, Transaction>,
parent_id: BlockId<TestBlock>,
parent_hash: Hash,
n: usize,
runtime: &Runtime,
) -> Vec<Hash> {
let mut hashes = Vec::with_capacity(n);
let mut parent_header = client.header(&parent_id).unwrap().unwrap();
let mut parent_header = client.header(parent_hash).unwrap().unwrap();

for _ in 0..n {
let block_hash =
propose_and_import_block(&parent_header, None, proposer_factory, block_import, runtime);
hashes.push(block_hash);
parent_header = client.header(&BlockId::Hash(block_hash)).unwrap().unwrap();
parent_header = client.header(block_hash).unwrap().unwrap();
}

hashes
Expand All @@ -713,7 +713,7 @@ fn importing_block_one_sets_genesis_epoch() {

let mut block_import = data.block_import.lock().take().expect("import set up during init");

let genesis_header = client.header(&BlockId::Number(0)).unwrap().unwrap();
let genesis_header = client.header(client.chain_info().genesis_hash).unwrap().unwrap();

let block_hash = propose_and_import_block(
&genesis_header,
Expand Down Expand Up @@ -779,10 +779,10 @@ fn revert_prunes_epoch_changes_and_removes_weights() {
// \ revert *---- G(#13) ---- H(#19) ---#20 < fork #3
// \ to #10
// *-----E(#7)---#11 < fork #1
let canon = propose_and_import_blocks_wrap(BlockId::Number(0), 21);
let fork1 = propose_and_import_blocks_wrap(BlockId::Hash(canon[0]), 10);
let fork2 = propose_and_import_blocks_wrap(BlockId::Hash(canon[7]), 10);
let fork3 = propose_and_import_blocks_wrap(BlockId::Hash(canon[11]), 8);
let canon = propose_and_import_blocks_wrap(client.chain_info().genesis_hash, 21);
let fork1 = propose_and_import_blocks_wrap(canon[0], 10);
let fork2 = propose_and_import_blocks_wrap(canon[7], 10);
let fork3 = propose_and_import_blocks_wrap(canon[11], 8);

// We should be tracking a total of 9 epochs in the fork tree
assert_eq!(epoch_changes.shared_data().tree().iter().count(), 8);
Expand Down Expand Up @@ -854,7 +854,7 @@ fn revert_not_allowed_for_finalized() {
)
};

let canon = propose_and_import_blocks_wrap(BlockId::Number(0), 3);
let canon = propose_and_import_blocks_wrap(client.chain_info().genesis_hash, 3);

// Finalize best block
client.finalize_block(canon[2], None, false).unwrap();
Expand Down Expand Up @@ -912,12 +912,12 @@ fn importing_epoch_change_block_prunes_tree() {

// Create and import the canon chain and keep track of fork blocks (A, C, D)
// from the diagram above.
let canon = propose_and_import_blocks_wrap(BlockId::Number(0), 30);
let canon = propose_and_import_blocks_wrap(client.chain_info().genesis_hash, 30);

// Create the forks
let fork_1 = propose_and_import_blocks_wrap(BlockId::Hash(canon[0]), 10);
let fork_2 = propose_and_import_blocks_wrap(BlockId::Hash(canon[12]), 15);
let fork_3 = propose_and_import_blocks_wrap(BlockId::Hash(canon[18]), 10);
let fork_1 = propose_and_import_blocks_wrap(canon[0], 10);
let fork_2 = propose_and_import_blocks_wrap(canon[12], 15);
let fork_3 = propose_and_import_blocks_wrap(canon[18], 10);

// We should be tracking a total of 9 epochs in the fork tree
assert_eq!(epoch_changes.shared_data().tree().iter().count(), 9);
Expand All @@ -928,7 +928,7 @@ fn importing_epoch_change_block_prunes_tree() {
// We finalize block #13 from the canon chain, so on the next epoch
// change the tree should be pruned, to not contain F (#7).
client.finalize_block(canon[12], None, false).unwrap();
propose_and_import_blocks_wrap(BlockId::Hash(client.chain_info().best_hash), 7);
propose_and_import_blocks_wrap(client.chain_info().best_hash, 7);

let nodes: Vec<_> = epoch_changes.shared_data().tree().iter().map(|(h, _, _)| *h).collect();

Expand All @@ -941,7 +941,7 @@ fn importing_epoch_change_block_prunes_tree() {

// finalizing block #25 from the canon chain should prune out the second fork
client.finalize_block(canon[24], None, false).unwrap();
propose_and_import_blocks_wrap(BlockId::Hash(client.chain_info().best_hash), 8);
propose_and_import_blocks_wrap(client.chain_info().best_hash, 8);

let nodes: Vec<_> = epoch_changes.shared_data().tree().iter().map(|(h, _, _)| *h).collect();

Expand Down Expand Up @@ -973,7 +973,7 @@ fn verify_slots_are_strictly_increasing() {
mutator: Arc::new(|_, _| ()),
};

let genesis_header = client.header(&BlockId::Number(0)).unwrap().unwrap();
let genesis_header = client.header(client.chain_info().genesis_hash).unwrap().unwrap();

// we should have no issue importing this block
let b1 = propose_and_import_block(
Expand All @@ -984,7 +984,7 @@ fn verify_slots_are_strictly_increasing() {
&runtime,
);

let b1 = client.header(&BlockId::Hash(b1)).unwrap().unwrap();
let b1 = client.header(b1).unwrap().unwrap();

// we should fail to import this block since the slot number didn't increase.
// we will panic due to the `PanickingBlockImport` defined above.
Expand Down Expand Up @@ -1077,9 +1077,9 @@ fn obsolete_blocks_aux_data_cleanup() {
// G --- A1 --- A2 --- A3 --- A4 ( < fork1 )
// \-----C4 --- C5 ( < fork3 )

let fork1_hashes = propose_and_import_blocks_wrap(BlockId::Number(0), 4);
let fork2_hashes = propose_and_import_blocks_wrap(BlockId::Number(0), 2);
let fork3_hashes = propose_and_import_blocks_wrap(BlockId::Number(3), 2);
let fork1_hashes = propose_and_import_blocks_wrap(client.chain_info().genesis_hash, 4);
let fork2_hashes = propose_and_import_blocks_wrap(client.chain_info().genesis_hash, 2);
let fork3_hashes = propose_and_import_blocks_wrap(fork1_hashes[2], 2);

// Check that aux data is present for all but the genesis block.
assert!(aux_data_check(&[client.chain_info().genesis_hash], false));
Expand Down
Loading