Skip to content
This repository was archived by the owner on Nov 15, 2023. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from 30 commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
de742d7
update primitives
rphmeier Aug 13, 2020
6c019fa
correct parent_head field
rphmeier Aug 13, 2020
427938f
make hrmp field pub
rphmeier Aug 13, 2020
01e72da
refactor validation data: runtime
rphmeier Aug 13, 2020
7500247
refactor validation data: messages
rphmeier Aug 13, 2020
5df917b
add arguments to full_validation_data runtime API
rphmeier Aug 13, 2020
d55d3e3
port runtime API
rphmeier Aug 13, 2020
3e807d9
mostly port over candidate validation
rphmeier Aug 13, 2020
cab7f23
remove some parameters from ValidationParams
rphmeier Aug 13, 2020
174b7f1
guide: update candidate validation
rphmeier Aug 13, 2020
0734164
update candidate outputs
rphmeier Aug 13, 2020
378e4d7
update ValidationOutputs in primitives
rphmeier Aug 13, 2020
5d068fd
port over candidate validation
rphmeier Aug 13, 2020
f02fbf3
add a new test for no-transient behavior
rphmeier Aug 13, 2020
60e0f55
update util runtime API wrappers
rphmeier Aug 13, 2020
8b40845
candidate backing
rphmeier Aug 13, 2020
2a275c7
fix missing imports
rphmeier Aug 14, 2020
6a81c63
change some fields of validation data around
rphmeier Aug 17, 2020
1b0f77d
runtime API impl
rphmeier Aug 17, 2020
54c7029
update candidate validation
rphmeier Aug 17, 2020
41ced5e
fix backing tests
rphmeier Aug 17, 2020
02a2a51
grumbles from review
rphmeier Aug 17, 2020
b46787a
fix av-store tests
rphmeier Aug 17, 2020
16a1a42
fix some more crates
rphmeier Aug 17, 2020
9ccb9a3
fix provisioner tests
rphmeier Aug 17, 2020
c8e0305
fix availability distribution tests
rphmeier Aug 17, 2020
3dccc79
Merge branch 'master' into rh-validation-data-refactor-impl
rphmeier Aug 17, 2020
caee3c0
port collation-generation to new validation data
rphmeier Aug 17, 2020
46385b2
Merge branch 'master' into rh-validation-data-refactor-impl
rphmeier Aug 17, 2020
46e390c
fix overseer tests
rphmeier Aug 17, 2020
5097f55
Update roadmap/implementers-guide/src/node/utility/candidate-validati…
rphmeier Aug 18, 2020
a7702aa
Merge branch 'master' into rh-validation-data-refactor-impl
rphmeier Aug 18, 2020
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 0 additions & 14 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,6 @@ members = [
"parachain/test-parachains",
"parachain/test-parachains/adder",
"parachain/test-parachains/adder/collator",
"parachain/test-parachains/code-upgrader",
]

[badges]
Expand Down
106 changes: 39 additions & 67 deletions node/collation-generation/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,13 @@ use polkadot_node_subsystem::{
FromOverseer, SpawnedSubsystem, Subsystem, SubsystemContext, SubsystemError, SubsystemResult,
};
use polkadot_node_subsystem_util::{
self as util, request_availability_cores_ctx, request_global_validation_data_ctx,
request_local_validation_data_ctx, request_validators_ctx,
self as util, request_availability_cores_ctx, request_full_validation_data_ctx,
request_validators_ctx,
};
use polkadot_primitives::v1::{
collator_signature_payload, validation_data_hash, AvailableData, CandidateCommitments,
CandidateDescriptor, CandidateReceipt, CoreState, GlobalValidationData, Hash,
LocalValidationData, OccupiedCoreAssumption, PoV,
collator_signature_payload, AvailableData, CandidateCommitments,
CandidateDescriptor, CandidateReceipt, CoreState, Hash, OccupiedCoreAssumption,
PersistedValidationData, PoV,
};
use sp_core::crypto::Pair;
use std::sync::Arc;
Expand Down Expand Up @@ -186,13 +186,11 @@ async fn handle_new_activations<Context: SubsystemContext>(
for relay_parent in activated.iter().copied() {
// double-future magic happens here: the first layer of requests takes a mutable borrow of the context, and
// returns a receiver. The second layer of requests actually polls those receivers to completion.
let (global_validation_data, availability_cores, validators) = join!(
request_global_validation_data_ctx(relay_parent, ctx).await?,
let (availability_cores, validators) = join!(
request_availability_cores_ctx(relay_parent, ctx).await?,
request_validators_ctx(relay_parent, ctx).await?,
);

let global_validation_data = global_validation_data??;
let availability_cores = availability_cores??;
let n_validators = validators??.len();

Expand All @@ -212,9 +210,10 @@ async fn handle_new_activations<Context: SubsystemContext>(
continue;
}

// we get local validation data synchronously for each core instead of within the subtask loop,
// because we have only a single mutable handle to the context, so the work can't really be distributed
let local_validation_data = match request_local_validation_data_ctx(
// we get validation data synchronously for each core instead of
// within the subtask loop, because we have only a single mutable handle to the
// context, so the work can't really be distributed
let validation_data = match request_full_validation_data_ctx(
relay_parent,
scheduled_core.para_id,
assumption,
Expand All @@ -223,29 +222,31 @@ async fn handle_new_activations<Context: SubsystemContext>(
.await?
.await??
{
Some(local_validation_data) => local_validation_data,
Some(v) => v,
None => continue,
};

let task_global_validation_data = global_validation_data.clone();
let task_config = config.clone();
let mut task_sender = sender.clone();
ctx.spawn("collation generation collation builder", Box::pin(async move {
let validation_data_hash =
validation_data_hash(&task_global_validation_data, &local_validation_data);
let persisted_validation_data_hash = validation_data.persisted.hash();

let collation = (task_config.collator)(&task_global_validation_data, &local_validation_data).await;
let collation = (task_config.collator)(&validation_data).await;

let pov_hash = collation.proof_of_validity.hash();

let signature_payload = collator_signature_payload(
&relay_parent,
&scheduled_core.para_id,
&validation_data_hash,
&persisted_validation_data_hash,
&pov_hash,
);

let erasure_root = match erasure_root(n_validators, local_validation_data, task_global_validation_data, collation.proof_of_validity.clone()) {
let erasure_root = match erasure_root(
n_validators,
validation_data.persisted,
collation.proof_of_validity.clone(),
) {
Ok(erasure_root) => erasure_root,
Err(err) => {
log::error!(target: "collation_generation", "failed to calculate erasure root for para_id {}: {:?}", scheduled_core.para_id, err);
Expand All @@ -268,7 +269,7 @@ async fn handle_new_activations<Context: SubsystemContext>(
para_id: scheduled_core.para_id,
relay_parent,
collator: task_config.key.public(),
validation_data_hash,
persisted_validation_data_hash,
pov_hash,
},
};
Expand All @@ -287,17 +288,11 @@ async fn handle_new_activations<Context: SubsystemContext>(

fn erasure_root(
n_validators: usize,
local_validation_data: LocalValidationData,
global_validation_data: GlobalValidationData,
persisted_validation: PersistedValidationData,
pov: PoV,
) -> Result<Hash> {
let omitted_validation = polkadot_primitives::v1::OmittedValidationData {
global_validation: global_validation_data,
local_validation: local_validation_data,
};

let available_data = AvailableData {
omitted_validation,
validation_data: persisted_validation,
pov,
};

Expand All @@ -322,8 +317,8 @@ mod tests {
subsystem_test_harness, TestSubsystemContextHandle,
};
use polkadot_primitives::v1::{
BlockData, BlockNumber, CollatorPair, GlobalValidationData, Id as ParaId,
LocalValidationData, PoV, ScheduledCore,
BlockData, BlockNumber, CollatorPair, Id as ParaId,
PersistedValidationData, PoV, ScheduledCore, ValidationData,
};
use std::pin::Pin;

Expand Down Expand Up @@ -355,7 +350,7 @@ mod tests {
fn test_config<Id: Into<ParaId>>(para_id: Id) -> Arc<CollationGenerationConfig> {
Arc::new(CollationGenerationConfig {
key: CollatorPair::generate().0,
collator: Box::new(|_gvd: &GlobalValidationData, _lvd: &LocalValidationData| {
collator: Box::new(|_vd: &ValidationData| {
Box::new(TestCollator)
}),
para_id: para_id.into(),
Expand All @@ -370,27 +365,21 @@ mod tests {
}

#[test]
fn requests_validation_and_availability_per_relay_parent() {
fn requests_availability_per_relay_parent() {
let activated_hashes: Vec<Hash> = vec![
[1; 32].into(),
[4; 32].into(),
[9; 32].into(),
[16; 32].into(),
];

let requested_validation_data = Arc::new(Mutex::new(Vec::new()));
let requested_availability_cores = Arc::new(Mutex::new(Vec::new()));

let overseer_requested_validation_data = requested_validation_data.clone();
let overseer_requested_availability_cores = requested_availability_cores.clone();
let overseer = |mut handle: TestSubsystemContextHandle<CollationGenerationMessage>| async move {
loop {
match handle.try_recv().await {
None => break,
Some(AllMessages::RuntimeApi(RuntimeApiMessage::Request(hash, RuntimeApiRequest::GlobalValidationData(tx)))) => {
overseer_requested_validation_data.lock().await.push(hash);
tx.send(Ok(Default::default())).unwrap();
}
Some(AllMessages::RuntimeApi(RuntimeApiMessage::Request(hash, RuntimeApiRequest::AvailabilityCores(tx)))) => {
overseer_requested_availability_cores.lock().await.push(hash);
tx.send(Ok(vec![])).unwrap();
Expand All @@ -408,7 +397,7 @@ mod tests {
let subsystem_activated_hashes = activated_hashes.clone();
subsystem_test_harness(overseer, |mut ctx| async move {
handle_new_activations(
test_config(123),
test_config(123u32),
&subsystem_activated_hashes,
&mut ctx,
&tx,
Expand All @@ -417,41 +406,30 @@ mod tests {
.unwrap();
});

let mut requested_validation_data = Arc::try_unwrap(requested_validation_data)
.expect("overseer should have shut down by now")
.into_inner();
requested_validation_data.sort();
let mut requested_availability_cores = Arc::try_unwrap(requested_availability_cores)
.expect("overseer should have shut down by now")
.into_inner();
requested_availability_cores.sort();

assert_eq!(requested_validation_data, activated_hashes);
assert_eq!(requested_availability_cores, activated_hashes);
}

#[test]
fn requests_local_validation_for_scheduled_matches() {
fn requests_validation_data_for_scheduled_matches() {
let activated_hashes: Vec<Hash> = vec![
Hash::repeat_byte(1),
Hash::repeat_byte(4),
Hash::repeat_byte(9),
Hash::repeat_byte(16),
];

let requested_local_validation_data = Arc::new(Mutex::new(Vec::new()));
let requested_full_validation_data = Arc::new(Mutex::new(Vec::new()));

let overseer_requested_local_validation_data = requested_local_validation_data.clone();
let overseer_requested_full_validation_data = requested_full_validation_data.clone();
let overseer = |mut handle: TestSubsystemContextHandle<CollationGenerationMessage>| async move {
loop {
match handle.try_recv().await {
None => break,
Some(AllMessages::RuntimeApi(RuntimeApiMessage::Request(
_hash,
RuntimeApiRequest::GlobalValidationData(tx),
))) => {
tx.send(Ok(Default::default())).unwrap();
}
Some(AllMessages::RuntimeApi(RuntimeApiMessage::Request(
hash,
RuntimeApiRequest::AvailabilityCores(tx),
Expand All @@ -470,13 +448,13 @@ mod tests {
}
Some(AllMessages::RuntimeApi(RuntimeApiMessage::Request(
hash,
RuntimeApiRequest::LocalValidationData(
RuntimeApiRequest::FullValidationData(
_para_id,
_occupied_core_assumption,
tx,
),
))) => {
overseer_requested_local_validation_data
overseer_requested_full_validation_data
.lock()
.await
.push(hash);
Expand All @@ -503,15 +481,15 @@ mod tests {
.unwrap();
});

let requested_local_validation_data = Arc::try_unwrap(requested_local_validation_data)
let requested_full_validation_data = Arc::try_unwrap(requested_full_validation_data)
.expect("overseer should have shut down by now")
.into_inner();

// the only activated hash should be from the 4 hash:
// each activated hash generates two scheduled cores: one with its value * 4, one with its value * 5
// given that the test configuration has a para_id of 16, there's only one way to get that value: with the 4
// hash.
assert_eq!(requested_local_validation_data, vec![[4; 32].into()]);
assert_eq!(requested_full_validation_data, vec![[4; 32].into()]);
}

#[test]
Expand All @@ -527,12 +505,6 @@ mod tests {
loop {
match handle.try_recv().await {
None => break,
Some(AllMessages::RuntimeApi(RuntimeApiMessage::Request(
_hash,
RuntimeApiRequest::GlobalValidationData(tx),
))) => {
tx.send(Ok(Default::default())).unwrap();
}
Some(AllMessages::RuntimeApi(RuntimeApiMessage::Request(
hash,
RuntimeApiRequest::AvailabilityCores(tx),
Expand All @@ -551,7 +523,7 @@ mod tests {
}
Some(AllMessages::RuntimeApi(RuntimeApiMessage::Request(
_hash,
RuntimeApiRequest::LocalValidationData(
RuntimeApiRequest::FullValidationData(
_para_id,
_occupied_core_assumption,
tx,
Expand Down Expand Up @@ -599,8 +571,8 @@ mod tests {
// we don't care too much about the commitments_hash right now, but let's ensure that we've calculated the
// correct descriptor
let expect_pov_hash = test_collation().proof_of_validity.hash();
let expect_validation_data_hash =
validation_data_hash::<BlockNumber>(&Default::default(), &Default::default());
let expect_validation_data_hash
= PersistedValidationData::<BlockNumber>::default().hash();
let expect_relay_parent = Hash::repeat_byte(4);
let expect_payload = collator_signature_payload(
&expect_relay_parent,
Expand All @@ -613,7 +585,7 @@ mod tests {
para_id: config.para_id,
relay_parent: expect_relay_parent,
collator: config.key.public(),
validation_data_hash: expect_validation_data_hash,
persisted_validation_data_hash: expect_validation_data_hash,
pov_hash: expect_pov_hash,
};

Expand All @@ -632,7 +604,7 @@ mod tests {
&collator_signature_payload(
&descriptor.relay_parent,
&descriptor.para_id,
&descriptor.validation_data_hash,
&descriptor.persisted_validation_data_hash,
&descriptor.pov_hash,
)
.as_ref(),
Expand Down
Loading