Skip to content
Closed
Prev Previous commit
Next Next commit
allow enable, warning & ignore + output improvement
  • Loading branch information
Daanvdplas committed Oct 18, 2023
commit 8fc1fcaad8c8fc7e593ca11fb90ddd5fbd90d20d
15 changes: 15 additions & 0 deletions substrate/client/cli/src/arg_enums.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,21 @@ impl std::fmt::Display for WasmExecutionMethod {
}
}

/// What to do when an extrinsic's weight exceeds the max extrinsic weight
/// per block.
#[allow(missing_docs)]
#[derive(Debug, Clone, Copy, ValueEnum, PartialEq)]
#[value(rename_all = "kebab-case")]
pub enum SanityWeightCheck {
Error,
Warning,
Ignore,
}

/// The default [`SanityWeightCheck`].
pub const DEFAULT_SANITY_WEIGHT_CHECK: SanityWeightCheck =
SanityWeightCheck::Error;

/// Converts the execution method and instantiation strategy command line arguments
/// into an execution method which can be used internally.
pub fn execution_method_from_cli(
Expand Down
12 changes: 11 additions & 1 deletion substrate/client/cli/src/params/shared_params.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.

use crate::arg_enums::TracingReceiver;
use crate::{SanityWeightCheck, DEFAULT_SANITY_WEIGHT_CHECK, arg_enums::TracingReceiver};
use clap::Args;
use sc_service::config::BasePath;
use std::path::PathBuf;
Expand Down Expand Up @@ -71,6 +71,16 @@ pub struct SharedParams {
/// Receiver to process tracing messages.
#[arg(long, value_name = "RECEIVER", value_enum, ignore_case = true, default_value_t = TracingReceiver::Log)]
pub tracing_receiver: TracingReceiver,

/// Sanity weight check for benchmarks. Checks whether an extrinsic's maximum weight exceeds the max
/// extrinsic weight.
#[arg(
long,
value_name = "OUTPUT",
value_enum,
default_value_t = DEFAULT_SANITY_WEIGHT_CHECK,
)]
pub sanity_weight_check: SanityWeightCheck,
}

impl SharedParams {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -557,7 +557,7 @@ impl PalletCmd {

// Sanity check for benchmarks. Checks whether an extrinsic's maximum weight (based on max
// component) exceeds the max extrinsic weight.
writer::sanity_weight_check(all_results, max_extrinsic_weight, db_weight, self.sanity_weight_check_warning)?;
writer::sanity_weight_check(all_results, max_extrinsic_weight, db_weight, self.shared_params.sanity_weight_check)?;

Ok(())
}
Expand Down
7 changes: 0 additions & 7 deletions substrate/utils/frame/benchmarking-cli/src/pallet/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -201,11 +201,4 @@ pub struct PalletCmd {
/// This exists only to restore legacy behaviour. It should never actually be needed.
#[arg(long)]
pub unsafe_overwrite_results: bool,

/// Sanity weight check for benchmarks. Checks whether an extrinsic's maximum weight exceeds the max
/// extrinsic weight. If present, it takes the max value of the component range.
///
/// Default will error out if max extrinsic weight is exceeded.
#[arg(long)]
pub sanity_weight_check_warning: bool,
}
119 changes: 65 additions & 54 deletions substrate/utils/frame/benchmarking-cli/src/pallet/writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,14 @@

use std::{
collections::{HashMap, HashSet},
fmt::Write,
fs,
path::PathBuf,
};

use inflector::Inflector;
use itertools::Itertools;
use serde::Serialize;
use sc_cli::SanityWeightCheck;

use crate::{
pallet::command::{ComponentRange, PovEstimationMode, PovModesMap},
Expand Down Expand Up @@ -184,12 +184,12 @@ pub(crate) fn sanity_weight_check(
all_results: HashMap<(String, String), Vec<BenchmarkData>>,
max_extrinsic_weight: Weight,
db_weight: RuntimeDbWeight,
sanity_weight_check_warning: bool,
sanity_weight_check: SanityWeightCheck,
) -> Result<(), std::io::Error> {
let mut err_message = String::new();
let mut sanity_weight_check_failed = false;

// Helper function to return max component value.
if sanity_weight_check == SanityWeightCheck::Ignore {
return Ok(());
}
// Helper function to return max. component value (i.e. max. complexity parameter).
fn max_component(parameter: &ComponentSlope, component_ranges: &Vec<ComponentRange>) -> u64 {
for component in component_ranges {
if parameter.name == component.name {
Expand All @@ -199,65 +199,86 @@ pub(crate) fn sanity_weight_check(
0
}

println!("\n\x1B[1mSanity Weight Check 🧐:\x1B[0m each extrinsic's weight function is executed \
in the worst case scenario and compared with the maximum extrinsic weight (the maximum weight \
that can be put in a single block for an extrinsic with `DispatchClass::Normal`). In other words, \
each extrinsic is checked whether it will fit in an empty (meaning; empty of \
`DispatchClass::Normal` extrinsics) block.\n\n\x1B[4mResults:\x1B[0m\n"
);
let mut sanity_weight_check_passed = true;
// Loop through all benchmark results.
for ((_, _), results) in all_results.iter() {
// Per pallet where there can be multiple instances of a pallet.
// Per pallet, because there can be multiple instances of a pallet.
for result in results {
// Constant `ref_time` & `pov_size`.
let mut total_weight = Weight::from_parts(
result.base_weight.try_into().unwrap(),
result.base_calculated_proof_size.try_into().unwrap(),
);
// `ref_time` multiplied by complexity parameter.
for component in &result.component_weight {
total_weight = total_weight.saturating_add(
Weight::from_parts(component.slope.try_into().unwrap(), 0)
.saturating_mul(max_component(&component, &result.component_ranges)),
);
}
// Constant storage reads.
total_weight =
total_weight.saturating_add(db_weight.reads(result.base_reads.try_into().unwrap()));
// Storage reads multiplied by complexity parameter.
for component in &result.component_reads {
total_weight = total_weight.saturating_add(
db_weight
.reads(component.slope.try_into().unwrap())
.saturating_mul(max_component(&component, &result.component_ranges)),
);
}
// Constant storage writes.
total_weight =
total_weight.saturating_add(db_weight.writes(result.base_writes.try_into().unwrap()));
// Storage writes multiplied by complexity parameter.
for component in &result.component_writes {
total_weight = total_weight.saturating_add(
db_weight
.writes(component.slope.try_into().unwrap())
.saturating_mul(max_component(&component, &result.component_ranges)),
);
}
// `pov_size` multiplied by complexity parameter.
for component in &result.component_calculated_proof_size {
total_weight = total_weight.saturating_add(
Weight::from_parts(0, component.slope.try_into().unwrap())
.saturating_mul(max_component(&component, &result.component_ranges)),
);
}
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

kinda baffled that all of this math to calculate the final weight of a weight-fn, for a given parameter, needs to be re-created here. Is this really the only place where we have this logic?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This logic only exists in the benchmark templates (i.e. here & here)

// Comparing (worst case scenario) the extrinsic weight against the maximum extrinsic weight.
if total_weight.ref_time() > max_extrinsic_weight.ref_time() ||
total_weight.proof_size() > max_extrinsic_weight.proof_size()
{
sanity_weight_check_failed = true;
let error = format!("\nExtrinsic {} exceeds the max extrinsic weight", result.name);
println!("\u{1b}[31mWARNING!!!\u{1b}[39m {:?}", error);
println!("Extrinsic {}: {:?}\nPercentage of max extrinsic weight: {:.2}% (ref_time), {:.2}% (proof_size)",
result.name,
total_weight,
(total_weight.ref_time() as f64 / max_extrinsic_weight.ref_time() as f64) * 100.0,
(total_weight.proof_size() as f64 / max_extrinsic_weight.proof_size() as f64) * 100.0,
);
write!(&mut err_message, "{}", error);
total_weight.proof_size() > max_extrinsic_weight.proof_size() {
sanity_weight_check_passed = false;
println!("\x1B[31m\x1B[1mWARNING!!!\x1B[0m",);
println!("\x1B[31mThe following extrinsic exceeds the maximum extrinsic weight:\x1B[0m",);
}
println!("- \x1B[1m'{}'\x1B[0m: {:?}\nPercentage of max extrinsic weight: {:.2}% (ref_time), {:.2}% (proof_size)\n",
result.name,
total_weight,
(total_weight.ref_time() as f64 / max_extrinsic_weight.ref_time() as f64) * 100.0,
(total_weight.proof_size() as f64 / max_extrinsic_weight.proof_size() as f64) * 100.0,
);
}
}
if sanity_weight_check_failed && !sanity_weight_check_warning {
Err(io_error(&err_message))
} else {
Ok(())
match sanity_weight_check_passed {
false => {
println!("\x1B[31mYour extrinsics failed the Sanity Weight Check, please review \
the extrinsic's logic and/or the associated benchmark function.\x1B[0m\n",);
if sanity_weight_check == SanityWeightCheck::Error {
return Err(io_error(&String::from("One or more extrinsics exceed the maximum extrinsic weight")));
}
},
true => {
println!("\x1B[32mYour extrinsics passed the Sanity Weight Check 😃!\x1B[0m\n");
},
}
Ok(())
}

// Get an iterator of errors.
Expand Down Expand Up @@ -1031,9 +1052,6 @@ mod test {
&AnalysisChoice::MedianSlopes,
1_000_000,
0,
Weight::MAX,
RuntimeDbWeight { read: 1000, write: 1000 },
false,
)
.unwrap();
let result =
Expand Down Expand Up @@ -1091,9 +1109,6 @@ mod test {
&AnalysisChoice::MedianSlopes,
1_000_000,
0,
Weight::MAX,
RuntimeDbWeight { read: 1000, write: 1000 },
false,
)
.unwrap();
let result =
Expand Down Expand Up @@ -1151,9 +1166,6 @@ mod test {
&AnalysisChoice::MedianSlopes,
1_000_000,
0,
Weight::MAX,
RuntimeDbWeight { read: 1000, write: 1000 },
false,
)
.unwrap();
let result =
Expand Down Expand Up @@ -1209,9 +1221,6 @@ mod test {
&AnalysisChoice::MedianSlopes,
1_000_000,
0,
Weight::MAX,
RuntimeDbWeight { read: 1000, write: 1000 },
false,
)
.unwrap();
let result =
Expand Down Expand Up @@ -1269,9 +1278,6 @@ mod test {
&AnalysisChoice::MedianSlopes,
1_000_000,
0,
Weight::MAX,
RuntimeDbWeight { read: 1000, write: 1000 },
false,
)
.unwrap();
let result =
Expand Down Expand Up @@ -1300,9 +1306,6 @@ mod test {
&AnalysisChoice::MedianSlopes,
1_000_000,
0,
Weight::MAX,
RuntimeDbWeight { read: 1000, write: 1000 },
false,
)
.unwrap();

Expand Down Expand Up @@ -1340,7 +1343,7 @@ mod test {

#[test]
fn sanity_check_works() {
assert!(map_results(
let mapped_results = map_results(
&[test_data(b"first", b"first", BenchmarkParameter::a, 10, 3),],
&test_storage_info(),
&Default::default(),
Expand All @@ -1350,11 +1353,28 @@ mod test {
&AnalysisChoice::MedianSlopes,
1_000_000,
0,
).unwrap();

assert!(sanity_weight_check(
mapped_results.clone(),
Weight::from_parts(20_000, 1_000_000),
RuntimeDbWeight { read: 1000, write: 1000 },
true,
)
.is_err());
SanityWeightCheck::Error,
).is_err());

assert!(sanity_weight_check(
mapped_results.clone(),
Weight::from_parts(20_000, 1_000_000),
RuntimeDbWeight { read: 1000, write: 1000 },
SanityWeightCheck::Warning,
).is_ok());

assert!(sanity_weight_check(
mapped_results,
Weight::from_parts(20_000, 1_000_000),
RuntimeDbWeight { read: 1000, write: 1000 },
SanityWeightCheck::Ignore,
).is_ok());
}

#[test]
Expand All @@ -1369,9 +1389,6 @@ mod test {
&AnalysisChoice::MedianSlopes,
1_000_000,
2,
Weight::MAX,
RuntimeDbWeight { read: 1000, write: 1000 },
false,
)
.unwrap();
let with_layer = &mapped_results
Expand All @@ -1387,9 +1404,6 @@ mod test {
&AnalysisChoice::MedianSlopes,
1_000_000,
0,
Weight::MAX,
RuntimeDbWeight { read: 1000, write: 1000 },
false,
)
.unwrap();
let without_layer = &mapped_results
Expand Down Expand Up @@ -1423,9 +1437,6 @@ mod test {
&AnalysisChoice::MedianSlopes,
1_000_000,
0,
Weight::MAX,
RuntimeDbWeight { read: 1000, write: 1000 },
false,
)
.unwrap();

Expand Down