-
Notifications
You must be signed in to change notification settings - Fork 2.7k
Add JSON format to import blocks and set it as default #5816
Changes from 61 commits
0bb1eb7
b291ff1
49b0144
cb6aded
155695d
e59e1ad
7b729fe
69a8bff
ab173a5
e22ed9b
00fbd6c
e7a3c4d
56d49fa
ff75523
7c66af2
419b0ab
be1d626
a67a75d
e1ce1aa
30f60de
ecf8729
7b8f34c
1a9e910
61260b5
4ca8fe6
c986b44
8b3a17a
c57415b
32eb4af
412e942
2266d67
f62d5e0
ad44445
17f4c74
994157c
4d1234e
37ae67b
757693d
9865fae
1150f12
377823c
576606c
9b4658a
76a5954
f848a05
d924935
3d00933
fb040fc
4f9bcf8
9a134eb
fdd3683
d98c92d
344286e
5bdc4fd
5d1f1e3
768c309
a17cc1e
9d7d7a1
03a5cc8
ef02ec3
fb91c53
4f067ef
0c47a45
196de60
731f1b7
f43f3de
cdc1290
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,209 @@ | ||
| // Copyright 2020 Parity Technologies (UK) Ltd. | ||
| // This file is part of Substrate. | ||
|
|
||
| // Substrate is free software: you can redistribute it and/or modify | ||
| // it under the terms of the GNU General Public License as published by | ||
| // the Free Software Foundation, either version 3 of the License, or | ||
| // (at your option) any later version. | ||
|
|
||
| // Substrate is distributed in the hope that it will be useful, | ||
| // but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| // GNU General Public License for more details. | ||
|
|
||
| // You should have received a copy of the GNU General Public License | ||
| // along with Substrate. If not, see <http://www.gnu.org/licenses/>. | ||
|
|
||
| #![cfg(unix)] | ||
|
|
||
| use assert_cmd::cargo::cargo_bin; | ||
| use std::{process::Command, fs, path::PathBuf}; | ||
| use tempfile::{tempdir, TempDir}; | ||
| use regex::Regex; | ||
|
|
||
| pub mod common; | ||
|
|
||
| fn contains_error(logged_output: &str) -> bool { | ||
| logged_output.contains("Error") | ||
| } | ||
|
|
||
| /// Helper struct to execute the export/import/revert tests. | ||
| /// The fields are paths to a temporary directory | ||
| struct ExportImportRevertExecutor<'a> { | ||
| base_path: &'a TempDir, | ||
| exported_blocks_file: &'a PathBuf, | ||
| db_path: &'a PathBuf, | ||
| num_exported_blocks: Option<u64>, | ||
| } | ||
|
|
||
| /// Format options for export / import commands. | ||
| enum FormatOpt { | ||
| Json, | ||
| Binary, | ||
| } | ||
|
|
||
| /// Command corresponding to the different commands we would like to run. | ||
| enum Command { | ||
| ExportBlocks, | ||
| ImportBlocks, | ||
| } | ||
|
|
||
| impl ToString for Command { | ||
| fn to_string(&self) -> String { | ||
| match self { | ||
| Command::ExportBlocks => String::from("export-blocks"), | ||
| Command::ImportBlocks => String::from("import-blocks"), | ||
| } | ||
| } | ||
| } | ||
|
|
||
| impl<'a> ExportImportRevertExecutor<'a> { | ||
| fn new(base_path: &'a TempDir, | ||
gnunicorn marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
| exported_blocks_file: &'a PathBuf, | ||
| db_path: &'a PathBuf | ||
| ) -> Self { | ||
| Self { | ||
| base_path, | ||
| exported_blocks_file, | ||
| db_path, | ||
| num_exported_blocks: None, | ||
| } | ||
| } | ||
|
|
||
| /// Helper method to run a command. Returns a string corresponding to what has been logged. | ||
| fn run_block_command(&self, | ||
| command: Command, | ||
| format_opt: FormatOpt, | ||
| expected_to_fail: bool | ||
| ) -> String { | ||
| let command = command.to_string(); | ||
| // Adding "--binary" if need be. | ||
| let arguments: Vec<&str> = match format_opt { | ||
| FormatOpt::Binary => vec![&command, "--dev", "--pruning", "archive", "--binary", "-d"], | ||
| FormatOpt::Json => vec![&command, "--dev", "--pruning", "archive", "-d"], | ||
| }; | ||
|
|
||
| let tmp: TempDir; | ||
| // Setting base_path to be a temporary folder if we are importing blocks. | ||
| // This allows us to make sure we are importing from scratch. | ||
| let base_path = match command { | ||
| Command::ExportBlocks => &self.base_path.path(), | ||
| Command::ImportBlocks => { | ||
| tmp = tempdir().unwrap(); | ||
| tmp.path() | ||
| } | ||
| }; | ||
|
|
||
| // Running the command and capturing the output. | ||
| let output = Command::new(cargo_bin("substrate")) | ||
| .args(&arguments) | ||
| .arg(&base_path) | ||
| .arg(&self.exported_blocks_file) | ||
| .output() | ||
| .unwrap(); | ||
|
|
||
| let logged_output = String::from_utf8_lossy(&output.stderr).to_string(); | ||
|
|
||
| if expected_to_fail { | ||
| // Checking that we did indeed find an error. | ||
| assert!(contains_error(&logged_output), "expected to error but did not error!"); | ||
| assert!(!output.status.success()); | ||
| } else { | ||
| // Making sure no error were logged. | ||
| assert!(!contains_error(&logged_output), "expected not to error but error'd!"); | ||
| assert!(output.status.success()); | ||
| } | ||
|
|
||
| logged_output | ||
| } | ||
|
|
||
| /// Runs the `export-blocks` command. | ||
| fn run_export(&mut self, fmt_opt: FormatOpt) { | ||
| let log = self.run_block_command(Command::ExportBlocks, fmt_opt, false); | ||
|
|
||
| // Using regex to find out how many block we exported. | ||
| let re = Regex::new(r"Exporting blocks from #\d* to #(?P<exported_blocks>\d*)").unwrap(); | ||
| let caps = re.captures(&log).unwrap(); | ||
| // Saving the number of blocks we've exported for further use. | ||
| self.num_exported_blocks = Some(caps["exported_blocks"].parse::<u64>().unwrap()); | ||
|
|
||
| let metadata = fs::metadata(&self.exported_blocks_file).unwrap(); | ||
| assert!(metadata.len() > 0, "file exported_blocks should not be empty"); | ||
|
|
||
| let _ = fs::remove_dir_all(&self.db_path); | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Why delete files manually if
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. So this was part of the original test and I believe the main idea is to run the node, export the blocks, and then delete the DB to make sure that Maybe what we should add is a test to see that import blocks actually imported blocks? Not sure how to do that, I wouldn't really know what we could query...
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. @arkpar Yes that was my code actually. First create the temp directory, export, delete the directory, import and then only let the destructor clean-up. It was done on purpose but you could also create another tempdir instead of reusing one, it might make the code more understandable.
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. So keep in mind that now that we've changed the test flow, we need to keep the same directory active for the whole testing period, in order to keep the db :) |
||
| } | ||
|
|
||
| /// Runs the `import-blocks` command, asserting that an error was found or | ||
| /// not depending on `expected_to_fail`. | ||
| fn run_import(&mut self, fmt_opt: FormatOpt, expected_to_fail: bool) { | ||
| let log = self.run_block_command(Command::ImportBlocks, fmt_opt, expected_to_fail); | ||
|
|
||
| if !expected_to_fail { | ||
| // Using regex to find out how much block we imported, | ||
| // and what's the best current block. | ||
| let re = Regex::new(r"Imported (?P<imported>\d*) blocks. Best: #(?P<best>\d*)").unwrap(); | ||
| let caps = re.captures(&log).expect("capture should have succeeded"); | ||
| let imported = caps["imported"].parse::<u64>().unwrap(); | ||
| let best = caps["best"].parse::<u64>().unwrap(); | ||
|
|
||
| assert_eq!( | ||
| imported, | ||
| best, | ||
| "numbers of blocks imported and best number differs" | ||
| ); | ||
| assert_eq!( | ||
| best, | ||
| self.num_exported_blocks.expect("number of exported blocks cannot be None; qed"), | ||
| "best block number and number of expected blocks should not differ" | ||
| ); | ||
| } | ||
| self.num_exported_blocks = None; | ||
| } | ||
|
|
||
| /// Runs the `revert` command. | ||
| fn run_revert(&self) { | ||
| let output = Command::new(cargo_bin("substrate")) | ||
| .args(&["revert", "--dev", "--pruning", "archive", "-d"]) | ||
| .arg(&self.base_path.path()) | ||
| .output() | ||
| .unwrap(); | ||
|
|
||
| let logged_output = String::from_utf8_lossy(&output.stderr).to_string(); | ||
|
|
||
| // Reverting should not log any error. | ||
| assert!(!contains_error(&logged_output)); | ||
| // Command should never fail. | ||
| assert!(output.status.success()); | ||
| } | ||
|
|
||
| /// Helper function that runs the whole export / import / revert flow and checks for errors. | ||
| fn run(&mut self, export_fmt: FormatOpt, import_fmt: FormatOpt, expected_to_fail: bool) { | ||
| self.run_export(export_fmt); | ||
| self.run_import(import_fmt, expected_to_fail); | ||
| self.run_revert(); | ||
| } | ||
| } | ||
|
|
||
| #[test] | ||
| fn export_import_revert() { | ||
| let base_path = tempdir().expect("could not create a temp dir"); | ||
| let exported_blocks_file = base_path.path().join("exported_blocks"); | ||
| let db_path = base_path.path().join("db"); | ||
|
|
||
| common::run_dev_node_for_a_while(base_path.path()); | ||
|
|
||
| let mut executor = ExportImportRevertExecutor::new( | ||
| &base_path, | ||
| &exported_blocks_file, | ||
| &db_path, | ||
| ); | ||
|
|
||
| // Binary and binary should work. | ||
| executor.run(FormatOpt::Binary, FormatOpt::Binary, false); | ||
| // Binary and JSON should fail. | ||
| executor.run(FormatOpt::Binary, FormatOpt::Json, true); | ||
| // JSON and JSON should work. | ||
| executor.run(FormatOpt::Json, FormatOpt::Json, false); | ||
| // JSON and binary should fail. | ||
| executor.run(FormatOpt::Json, FormatOpt::Binary, true); | ||
| } | ||
This file was deleted.
Uh oh!
There was an error while loading. Please reload this page.