diff --git a/hash-db/Cargo.toml b/hash-db/Cargo.toml index 0283eb8a..0c3f4416 100644 --- a/hash-db/Cargo.toml +++ b/hash-db/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "hash-db" -version = "0.11.0" +version = "0.12.0" authors = ["Parity Technologies "] description = "Trait for hash-keyed databases." license = "Apache-2.0" diff --git a/hash-db/src/lib.rs b/hash-db/src/lib.rs index 262e4fbc..e71af41d 100644 --- a/hash-db/src/lib.rs +++ b/hash-db/src/lib.rs @@ -20,8 +20,6 @@ #[cfg(feature = "std")] use std::fmt::Debug; #[cfg(feature = "std")] -use std::collections::HashMap; -#[cfg(feature = "std")] use std::hash; #[cfg(feature = "std")] pub trait DebugIfStd: Debug {} @@ -102,22 +100,22 @@ impl<'a, K, V> PlainDBRef for &'a mut PlainDB { pub trait HashDB: Send + Sync + AsHashDB { /// Look up a given hash into the bytes that hash to it, returning None if the /// hash is not known. - fn get(&self, key: &H::Out) -> Option; + fn get(&self, key: &H::Out, prefix: &[u8]) -> Option; /// Check for the existance of a hash-key. - fn contains(&self, key: &H::Out) -> bool; + fn contains(&self, key: &H::Out, prefix: &[u8]) -> bool; /// Insert a datum item into the DB and return the datum's hash for a later lookup. Insertions /// are counted and the equivalent number of `remove()`s must be performed before the data /// is considered dead. - fn insert(&mut self, value: &[u8]) -> H::Out; + fn insert(&mut self, prefix: &[u8], value: &[u8]) -> H::Out; /// Like `insert()`, except you provide the key and the data is all moved. - fn emplace(&mut self, key: H::Out, value: T); + fn emplace(&mut self, key: H::Out, prefix: &[u8], value: T); /// Remove a datum previously inserted. Insertions can be "owed" such that the same number of `insert()`s may /// happen without the data being eventually being inserted into the DB. It can be "owed" more than once. - fn remove(&mut self, key: &H::Out); + fn remove(&mut self, key: &H::Out, prefix: &[u8]); } /// Trait for immutable reference of HashDB. @@ -125,22 +123,22 @@ pub trait HashDB: Send + Sync + AsHashDB { pub trait HashDBRef { /// Look up a given hash into the bytes that hash to it, returning None if the /// hash is not known. - fn get(&self, key: &H::Out) -> Option; + fn get(&self, key: &H::Out, prefix: &[u8]) -> Option; /// Check for the existance of a hash-key. - fn contains(&self, key: &H::Out) -> bool; + fn contains(&self, key: &H::Out, prefix: &[u8]) -> bool; } #[cfg(feature = "std")] impl<'a, H: Hasher, T> HashDBRef for &'a HashDB { - fn get(&self, key: &H::Out) -> Option { HashDB::get(*self, key) } - fn contains(&self, key: &H::Out) -> bool { HashDB::contains(*self, key) } + fn get(&self, key: &H::Out, prefix: &[u8]) -> Option { HashDB::get(*self, key, prefix) } + fn contains(&self, key: &H::Out, prefix: &[u8]) -> bool { HashDB::contains(*self, key, prefix) } } #[cfg(feature = "std")] impl<'a, H: Hasher, T> HashDBRef for &'a mut HashDB { - fn get(&self, key: &H::Out) -> Option { HashDB::get(*self, key) } - fn contains(&self, key: &H::Out) -> bool { HashDB::contains(*self, key) } + fn get(&self, key: &H::Out, prefix: &[u8]) -> Option { HashDB::get(*self, key, prefix) } + fn contains(&self, key: &H::Out, prefix: &[u8]) -> bool { HashDB::contains(*self, key, prefix) } } /// Upcast trait for HashDB. diff --git a/hash256-std-hasher/Cargo.toml b/hash256-std-hasher/Cargo.toml index f0871c43..87d4ff07 100644 --- a/hash256-std-hasher/Cargo.toml +++ b/hash256-std-hasher/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "hash256-std-hasher" description = "Standard library hasher for 256-bit prehashed keys." -version = "0.11.0" +version = "0.12.0" authors = ["Parity Technologies "] license = "Apache-2.0" homepage = "https://github.com/paritytech/trie" diff --git a/memory-db/Cargo.toml b/memory-db/Cargo.toml index d6800084..6b741278 100644 --- a/memory-db/Cargo.toml +++ b/memory-db/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "memory-db" -version = "0.11.0" +version = "0.12.0" authors = ["Parity Technologies "] description = "In-memory implementation of hash-db, useful for tests" repository = "https://github.com/paritytech/parity-common" @@ -8,10 +8,10 @@ license = "Apache-2.0" [dependencies] heapsize = "0.4" -hash-db = { path = "../hash-db", version = "0.11.0"} +hash-db = { path = "../hash-db", version = "0.12.0"} [dev-dependencies] -keccak-hasher = { path = "../test-support/keccak-hasher", version = "0.11.0"} +keccak-hasher = { path = "../test-support/keccak-hasher", version = "0.12.0"} criterion = "0.2.8" [[bench]] diff --git a/memory-db/src/lib.rs b/memory-db/src/lib.rs index 96051340..0516c8b2 100644 --- a/memory-db/src/lib.rs +++ b/memory-db/src/lib.rs @@ -22,13 +22,8 @@ use hash_db::{HashDB, HashDBRef, PlainDB, PlainDBRef, Hasher as KeyHasher, AsHas use heapsize::HeapSizeOf; use std::collections::hash_map::Entry; use std::collections::HashMap; -use std::hash; use std::mem; -// Backing `HashMap` parametrized with a `Hasher` for the keys `Hasher::Out` and the `Hasher::StdHasher` -// as hash map builder. -type FastMap = HashMap<::Out, T, hash::BuildHasherDefault<::StdHasher>>; - /// Reference-counted memory-based `HashDB` implementation. /// /// Use `new()` to create a new database. Insert items with `insert()`, remove items @@ -44,67 +39,118 @@ type FastMap = HashMap<::Out, T, hash::BuildHasherDefault< /// /// use hash_db::{Hasher, HashDB}; /// use keccak_hasher::KeccakHasher; -/// use memory_db::MemoryDB; +/// use memory_db::{MemoryDB, HashKey}; /// fn main() { -/// let mut m = MemoryDB::>::default(); +/// let mut m = MemoryDB::, Vec>::default(); /// let d = "Hello world!".as_bytes(); /// -/// let k = m.insert(d); -/// assert!(m.contains(&k)); -/// assert_eq!(m.get(&k).unwrap(), d); +/// let k = m.insert(&[], d); +/// assert!(m.contains(&k, &[])); +/// assert_eq!(m.get(&k, &[]).unwrap(), d); /// -/// m.insert(d); -/// assert!(m.contains(&k)); +/// m.insert(&[], d); +/// assert!(m.contains(&k, &[])); /// -/// m.remove(&k); -/// assert!(m.contains(&k)); +/// m.remove(&k, &[]); +/// assert!(m.contains(&k, &[])); /// -/// m.remove(&k); -/// assert!(!m.contains(&k)); +/// m.remove(&k, &[]); +/// assert!(!m.contains(&k, &[])); /// -/// m.remove(&k); -/// assert!(!m.contains(&k)); +/// m.remove(&k, &[]); +/// assert!(!m.contains(&k, &[])); /// -/// m.insert(d); -/// assert!(!m.contains(&k)); +/// m.insert(&[], d); +/// assert!(!m.contains(&k, &[])); -/// m.insert(d); -/// assert!(m.contains(&k)); -/// assert_eq!(m.get(&k).unwrap(), d); +/// m.insert(&[], d); +/// assert!(m.contains(&k, &[])); +/// assert_eq!(m.get(&k, &[]).unwrap(), d); /// -/// m.remove(&k); -/// assert!(!m.contains(&k)); +/// m.remove(&k, &[]); +/// assert!(!m.contains(&k, &[])); /// } /// ``` #[derive(Clone, PartialEq)] -pub struct MemoryDB { - data: FastMap, +pub struct MemoryDB + where + H: KeyHasher, + KF: KeyFunction, +{ + data: HashMap, hashed_null_node: H::Out, null_node_data: T, + _kf: ::std::marker::PhantomData, +} + +pub trait KeyFunction { + type Key: Send + Sync + Clone + std::hash::Hash + std::cmp::Eq ; + + fn key(hash: &H::Out, prefix: &[u8]) -> Self::Key; +} + + +/// Make database key from hash and prefix. +pub fn prefixed_key(key: &H::Out, prefix: &[u8]) -> Vec { + let mut prefixed_key = Vec::with_capacity(key.as_ref().len() + prefix.len()); + prefixed_key.extend_from_slice(prefix); + prefixed_key.extend_from_slice(key.as_ref()); + prefixed_key +} + +/// Make database key from hash only. +pub fn hash_key(key: &H::Out, _prefix: &[u8]) -> H::Out { + key.clone() +} + +/// Key function that only uses the hash +pub struct HashKey(std::marker::PhantomData); + +impl KeyFunction for HashKey { + type Key = H::Out; + + fn key(hash: &H::Out, prefix: &[u8]) -> H::Out { + hash_key::(hash, prefix) + } } -impl<'a, H, T> Default for MemoryDB +/// Key function that concatenates prefix and hash. +pub struct PrefixedKey(std::marker::PhantomData); + +impl KeyFunction for PrefixedKey { + type Key = Vec; + + fn key(hash: &H::Out, prefix: &[u8]) -> Vec { + prefixed_key::(hash, prefix) + } +} + +impl<'a, H, KF, T> Default for MemoryDB where H: KeyHasher, - T: From<&'a [u8]> + T: From<&'a [u8]>, + KF: KeyFunction, { fn default() -> Self { Self::from_null_node(&[0u8][..], [0u8][..].into()) } } -impl MemoryDB +/// Create a new `MemoryDB` from a given null key/data +impl MemoryDB where H: KeyHasher, T: Default, + KF: KeyFunction, { /// Remove an element and delete it from storage if reference count reaches zero. /// If the value was purged, return the old value. - pub fn remove_and_purge(&mut self, key: &::Out) -> Option { + pub fn remove_and_purge(&mut self, key: &::Out, prefix: &[u8]) -> Option { if key == &self.hashed_null_node { return None; } - match self.data.entry(key.clone()) { + let key = KF::key(key, prefix); + match self.data.entry(key) { Entry::Occupied(mut entry) => if entry.get().1 == 1 { Some(entry.remove().0) @@ -120,23 +166,24 @@ where } } -impl<'a, H: KeyHasher, T> MemoryDB where T: From<&'a [u8]> { +impl<'a, H: KeyHasher, KF, T> MemoryDB +where + H: KeyHasher, + T: From<&'a [u8]>, + KF: KeyFunction, +{ /// Create a new `MemoryDB` from a given null key/data pub fn from_null_node(null_key: &'a [u8], null_node_data: T) -> Self { MemoryDB { - data: FastMap::::default(), + data: HashMap::default(), hashed_null_node: H::hash(null_key), null_node_data, + _kf: Default::default(), } } - /// Create a new `MemoryDB` from a given null key/data pub fn new(data: &'a [u8]) -> Self { - MemoryDB { - data: FastMap::::default(), - hashed_null_node: H::hash(data), - null_node_data: data.into(), - } + Self::from_null_node(data, data.into()) } /// Clear all data from the database. @@ -149,15 +196,15 @@ impl<'a, H: KeyHasher, T> MemoryDB where T: From<&'a [u8]> { /// /// use hash_db::{Hasher, HashDB}; /// use keccak_hasher::KeccakHasher; - /// use memory_db::MemoryDB; + /// use memory_db::{MemoryDB, HashKey}; /// /// fn main() { - /// let mut m = MemoryDB::>::default(); + /// let mut m = MemoryDB::, Vec>::default(); /// let hello_bytes = "Hello world!".as_bytes(); - /// let hash = m.insert(hello_bytes); - /// assert!(m.contains(&hash)); + /// let hash = m.insert(&[], hello_bytes); + /// assert!(m.contains(&hash, &[])); /// m.clear(); - /// assert!(!m.contains(&hash)); + /// assert!(!m.contains(&hash, &[])); /// } /// ``` pub fn clear(&mut self) { @@ -170,8 +217,8 @@ impl<'a, H: KeyHasher, T> MemoryDB where T: From<&'a [u8]> { } /// Return the internal map of hashes to data, clearing the current state. - pub fn drain(&mut self) -> FastMap { - mem::replace(&mut self.data, FastMap::::default()) + pub fn drain(&mut self) -> HashMap { + mem::replace(&mut self.data, Default::default()) } /// Grab the raw information associated with a key. Returns None if the key @@ -179,11 +226,11 @@ impl<'a, H: KeyHasher, T> MemoryDB where T: From<&'a [u8]> { /// /// Even when Some is returned, the data is only guaranteed to be useful /// when the refs > 0. - pub fn raw(&self, key: &::Out) -> Option<(&T, i32)> { + pub fn raw(&self, key: &::Out, prefix: &[u8]) -> Option<(&T, i32)> { if key == &self.hashed_null_node { return Some((&self.null_node_data, 1)); } - self.data.get(key).map(|(value, count)| (value, *count)) + self.data.get(&KF::key(key, prefix)).map(|(value, count)| (value, *count)) } /// Consolidate all the entries of `other` into `self`. @@ -205,10 +252,10 @@ impl<'a, H: KeyHasher, T> MemoryDB where T: From<&'a [u8]> { } /// Get the keys in the database together with number of underlying references. - pub fn keys(&self) -> HashMap { + pub fn keys(&self) -> HashMap { self.data.iter() .filter_map(|(k, v)| if v.1 != 0 { - Some((*k, v.1)) + Some((k.clone(), v.1)) } else { None }) @@ -216,10 +263,11 @@ impl<'a, H: KeyHasher, T> MemoryDB where T: From<&'a [u8]> { } } -impl MemoryDB +impl MemoryDB where H: KeyHasher, T: HeapSizeOf, + KF: KeyFunction, { /// Returns the size of allocated heap memory pub fn mem_used(&self) -> usize { @@ -228,27 +276,29 @@ where } } -impl PlainDB for MemoryDB +impl PlainDB for MemoryDB where H: KeyHasher, T: Default + PartialEq + for<'a> From<&'a [u8]> + Clone + Send + Sync, + KF: Send + Sync + KeyFunction, + KF::Key: std::borrow::Borrow<[u8]> + for <'a> From<&'a [u8]>, { fn get(&self, key: &H::Out) -> Option { - match self.data.get(key) { + match self.data.get(key.as_ref()) { Some(&(ref d, rc)) if rc > 0 => Some(d.clone()), _ => None } } fn contains(&self, key: &H::Out) -> bool { - match self.data.get(key) { + match self.data.get(key.as_ref()) { Some(&(_, x)) if x > 0 => true, _ => false } } fn emplace(&mut self, key: H::Out, value: T) { - match self.data.entry(key) { + match self.data.entry(key.as_ref().into()) { Entry::Occupied(mut entry) => { let &mut (ref mut old_value, ref mut rc) = entry.get_mut(); if *rc <= 0 { @@ -263,7 +313,7 @@ where } fn remove(&mut self, key: &H::Out) { - match self.data.entry(*key) { + match self.data.entry(key.as_ref().into()) { Entry::Occupied(mut entry) => { let &mut (_, ref mut rc) = entry.get_mut(); *rc -= 1; @@ -275,86 +325,121 @@ where } } -impl PlainDBRef for MemoryDB +impl PlainDBRef for MemoryDB where H: KeyHasher, T: Default + PartialEq + for<'a> From<&'a [u8]> + Clone + Send + Sync, + KF: Send + Sync + KeyFunction, + KF::Key: std::borrow::Borrow<[u8]> + for <'a> From<&'a [u8]>, { fn get(&self, key: &H::Out) -> Option { PlainDB::get(self, key) } fn contains(&self, key: &H::Out) -> bool { PlainDB::contains(self, key) } } -impl HashDB for MemoryDB +impl HashDB for MemoryDB where H: KeyHasher, T: Default + PartialEq + for<'a> From<&'a [u8]> + Clone + Send + Sync, + KF: Send + Sync + KeyFunction, { - fn get(&self, key: &H::Out) -> Option { + fn get(&self, key: &H::Out, prefix: &[u8]) -> Option { if key == &self.hashed_null_node { return Some(self.null_node_data.clone()); } - PlainDB::get(self, key) + let key = KF::key(key, prefix); + match self.data.get(&key) { + Some(&(ref d, rc)) if rc > 0 => Some(d.clone()), + _ => None + } } - fn contains(&self, key: &H::Out) -> bool { + fn contains(&self, key: &H::Out, prefix: &[u8]) -> bool { if key == &self.hashed_null_node { return true; } - PlainDB::contains(self, key) + let key = KF::key(key, prefix); + match self.data.get(&key) { + Some(&(_, x)) if x > 0 => true, + _ => false + } } - fn emplace(&mut self, key: H::Out, value: T) { + fn emplace(&mut self, key: H::Out, prefix: &[u8], value: T) { if value == self.null_node_data { return; } - PlainDB::emplace(self, key, value) + let key = KF::key(&key, prefix); + match self.data.entry(key) { + Entry::Occupied(mut entry) => { + let &mut (ref mut old_value, ref mut rc) = entry.get_mut(); + if *rc <= 0 { + *old_value = value; + } + *rc += 1; + }, + Entry::Vacant(entry) => { + entry.insert((value, 1)); + }, + } } - fn insert(&mut self, value: &[u8]) -> H::Out { + fn insert(&mut self, prefix: &[u8], value: &[u8]) -> H::Out { if T::from(value) == self.null_node_data { return self.hashed_null_node.clone(); } let key = H::hash(value); - PlainDB::emplace(self, key.clone(), value.into()); - + HashDB::emplace(self, key, prefix, value.into()); key } - fn remove(&mut self, key: &H::Out) { + fn remove(&mut self, key: &H::Out, prefix: &[u8]) { if key == &self.hashed_null_node { return; } - PlainDB::remove(self, key) + let key = KF::key(key, prefix); + match self.data.entry(key) { + Entry::Occupied(mut entry) => { + let &mut (_, ref mut rc) = entry.get_mut(); + *rc -= 1; + }, + Entry::Vacant(entry) => { + entry.insert((T::default(), -1)); + }, + } } } -impl HashDBRef for MemoryDB +impl HashDBRef for MemoryDB where H: KeyHasher, T: Default + PartialEq + for<'a> From<&'a [u8]> + Clone + Send + Sync, + KF: Send + Sync + KeyFunction, { - fn get(&self, key: &H::Out) -> Option { HashDB::get(self, key) } - fn contains(&self, key: &H::Out) -> bool { HashDB::contains(self, key) } + fn get(&self, key: &H::Out, prefix: &[u8]) -> Option { HashDB::get(self, key, prefix) } + fn contains(&self, key: &H::Out, prefix: &[u8]) -> bool { HashDB::contains(self, key, prefix) } } -impl AsPlainDB for MemoryDB +impl AsPlainDB for MemoryDB where H: KeyHasher, T: Default + PartialEq + for<'a> From<&'a[u8]> + Clone + Send + Sync, + KF: Send + Sync + KeyFunction, + KF::Key: std::borrow::Borrow<[u8]> + for <'a> From<&'a [u8]>, { fn as_plain_db(&self) -> &PlainDB { self } fn as_plain_db_mut(&mut self) -> &mut PlainDB { self } } -impl AsHashDB for MemoryDB +impl AsHashDB for MemoryDB where H: KeyHasher, T: Default + PartialEq + for<'a> From<&'a[u8]> + Clone + Send + Sync, + KF: Send + Sync + KeyFunction, { fn as_hash_db(&self) -> &HashDB { self } fn as_hash_db_mut(&mut self) -> &mut HashDB { self } @@ -362,7 +447,7 @@ where #[cfg(test)] mod tests { - use super::{MemoryDB, HashDB, KeyHasher}; + use super::{MemoryDB, HashDB, KeyHasher, HashKey}; use keccak_hasher::KeccakHasher; #[test] @@ -370,55 +455,53 @@ mod tests { let hello_bytes = b"Hello world!"; let hello_key = KeccakHasher::hash(hello_bytes); - let mut m = MemoryDB::>::default(); - m.remove(&hello_key); - assert_eq!(m.raw(&hello_key).unwrap().1, -1); + let mut m = MemoryDB::, Vec>::default(); + m.remove(&hello_key, &[]); + assert_eq!(m.raw(&hello_key, &[]).unwrap().1, -1); m.purge(); - assert_eq!(m.raw(&hello_key).unwrap().1, -1); - m.insert(hello_bytes); - assert_eq!(m.raw(&hello_key).unwrap().1, 0); + assert_eq!(m.raw(&hello_key, &[]).unwrap().1, -1); + m.insert(&[], hello_bytes); + assert_eq!(m.raw(&hello_key, &[]).unwrap().1, 0); m.purge(); - assert_eq!(m.raw(&hello_key), None); - - let mut m = MemoryDB::>::default(); - assert!(m.remove_and_purge(&hello_key).is_none()); - assert_eq!(m.raw(&hello_key).unwrap().1, -1); - m.insert(hello_bytes); - m.insert(hello_bytes); - assert_eq!(m.raw(&hello_key).unwrap().1, 1); - assert_eq!(&*m.remove_and_purge(&hello_key).unwrap(), hello_bytes); - assert_eq!(m.raw(&hello_key), None); - assert!(m.remove_and_purge(&hello_key).is_none()); + assert_eq!(m.raw(&hello_key, &[]), None); + + let mut m = MemoryDB::, Vec>::default(); + assert!(m.remove_and_purge(&hello_key, &[]).is_none()); + assert_eq!(m.raw(&hello_key, &[]).unwrap().1, -1); + m.insert(&[], hello_bytes); + m.insert(&[], hello_bytes); + assert_eq!(m.raw(&hello_key, &[]).unwrap().1, 1); + assert_eq!(&*m.remove_and_purge(&hello_key, &[]).unwrap(), hello_bytes); + assert_eq!(m.raw(&hello_key, &[]), None); + assert!(m.remove_and_purge(&hello_key, &[]).is_none()); } #[test] fn consolidate() { - let mut main = MemoryDB::>::default(); - let mut other = MemoryDB::>::default(); - let remove_key = other.insert(b"doggo"); - main.remove(&remove_key); + let mut main = MemoryDB::, Vec>::default(); + let mut other = MemoryDB::, Vec>::default(); + let remove_key = other.insert(&[], b"doggo"); + main.remove(&remove_key, &[]); - let insert_key = other.insert(b"arf"); - main.emplace(insert_key, "arf".as_bytes().to_vec()); + let insert_key = other.insert(&[], b"arf"); + main.emplace(insert_key, &[], "arf".as_bytes().to_vec()); - let negative_remove_key = other.insert(b"negative"); - other.remove(&negative_remove_key); // ref cnt: 0 - other.remove(&negative_remove_key); // ref cnt: -1 - main.remove(&negative_remove_key); // ref cnt: -1 + let negative_remove_key = other.insert(&[], b"negative"); + other.remove(&negative_remove_key, &[]); // ref cnt: 0 + other.remove(&negative_remove_key, &[]); // ref cnt: -1 + main.remove(&negative_remove_key, &[]); // ref cnt: -1 main.consolidate(other); - let overlay = main.drain(); - - assert_eq!(overlay.get(&remove_key).unwrap(), &("doggo".as_bytes().to_vec(), 0)); - assert_eq!(overlay.get(&insert_key).unwrap(), &("arf".as_bytes().to_vec(), 2)); - assert_eq!(overlay.get(&negative_remove_key).unwrap(), &("negative".as_bytes().to_vec(), -2)); + assert_eq!(main.raw(&remove_key, &[]).unwrap(), (&"doggo".as_bytes().to_vec(), 0)); + assert_eq!(main.raw(&insert_key, &[]).unwrap(), (&"arf".as_bytes().to_vec(), 2)); + assert_eq!(main.raw(&negative_remove_key, &[]).unwrap(), (&"negative".as_bytes().to_vec(), -2)); } #[test] fn default_works() { - let mut db = MemoryDB::>::default(); + let mut db = MemoryDB::, Vec>::default(); let hashed_null_node = KeccakHasher::hash(&[0u8][..]); - assert_eq!(db.insert(&[0u8][..]), hashed_null_node); + assert_eq!(db.insert(&[], &[0u8][..]), hashed_null_node); } } diff --git a/test-support/keccak-hasher/Cargo.toml b/test-support/keccak-hasher/Cargo.toml index dc0704ec..a801e293 100644 --- a/test-support/keccak-hasher/Cargo.toml +++ b/test-support/keccak-hasher/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "keccak-hasher" -version = "0.11.0" +version = "0.12.0" authors = ["Parity Technologies "] description = "Keccak-256 implementation of the Hasher trait" repository = "https://github.com/paritytech/parity/" @@ -8,5 +8,5 @@ license = "Apache-2.0" [dependencies] tiny-keccak = "1.4.2" -hash-db = { path = "../../hash-db", version = "0.11.0" } -hash256-std-hasher = { path = "../../hash256-std-hasher", version = "0.11.0" } +hash-db = { path = "../../hash-db", version = "0.12.0" } +hash256-std-hasher = { path = "../../hash256-std-hasher", version = "0.12.0" } diff --git a/test-support/reference-trie/Cargo.toml b/test-support/reference-trie/Cargo.toml index e89da269..a4b768cd 100644 --- a/test-support/reference-trie/Cargo.toml +++ b/test-support/reference-trie/Cargo.toml @@ -1,22 +1,22 @@ [package] name = "reference-trie" -version = "0.11.0" +version = "0.12.0" authors = ["Parity Technologies "] description = "Simple reference trie format" repository = "https://github.com/paritytech/trie/" license = "Apache-2.0" [dependencies] -hash-db = { path = "../../hash-db" , version = "0.11.0"} -hash256-std-hasher = { path = "../../hash256-std-hasher", version = "0.11.0" } -keccak-hasher = { path = "../keccak-hasher", version = "0.11.0" } -trie-db = { path = "../../trie-db", version = "0.11.0"} -trie-root = { path = "../../trie-root", version = "0.11.0" } +hash-db = { path = "../../hash-db" , version = "0.12.0"} +hash256-std-hasher = { path = "../../hash256-std-hasher", version = "0.12.0" } +keccak-hasher = { path = "../keccak-hasher", version = "0.12.0" } +trie-db = { path = "../../trie-db", version = "0.12.0"} +trie-root = { path = "../../trie-root", version = "0.12.0" } parity-codec = "3.0" parity-codec-derive = "3.0" [dev-dependencies] -trie-bench = { path = "../trie-bench", version = "0.11.0" } +trie-bench = { path = "../trie-bench", version = "0.12.0" } criterion = "0.2.8" [[bench]] diff --git a/test-support/reference-trie/src/lib.rs b/test-support/reference-trie/src/lib.rs index 9415a41f..1d7edbd8 100644 --- a/test-support/reference-trie/src/lib.rs +++ b/test-support/reference-trie/src/lib.rs @@ -161,7 +161,6 @@ impl Decode for NodeHeader { BRANCH_NODE_WITH_VALUE => NodeHeader::Branch(true), i @ LEAF_NODE_OFFSET ... LEAF_NODE_LAST => NodeHeader::Leaf((i - LEAF_NODE_OFFSET) as usize), i @ EXTENSION_NODE_OFFSET ... EXTENSION_NODE_LAST => NodeHeader::Extension((i - EXTENSION_NODE_OFFSET) as usize), - _ => unreachable!(), }) } } diff --git a/test-support/trie-bench/Cargo.toml b/test-support/trie-bench/Cargo.toml index 5e7d83c2..26d2d116 100644 --- a/test-support/trie-bench/Cargo.toml +++ b/test-support/trie-bench/Cargo.toml @@ -1,16 +1,16 @@ [package] name = "trie-bench" description = "Standard benchmarking suite for tries" -version = "0.11.0" +version = "0.12.0" authors = ["Parity Technologies "] license = "Apache-2.0" [dependencies] -keccak-hasher = { path = "../keccak-hasher", version = "0.11.0" } -trie-standardmap = { path = "../trie-standardmap", version = "0.11.0" } -hash-db = { path = "../../hash-db" , version = "0.11.0"} -memory-db = { path = "../../memory-db", version = "0.11.0" } -trie-root = { path = "../../trie-root", version = "0.11.0" } -trie-db = { path = "../../trie-db", version = "0.11.0" } +keccak-hasher = { path = "../keccak-hasher", version = "0.12.0" } +trie-standardmap = { path = "../trie-standardmap", version = "0.12.0" } +hash-db = { path = "../../hash-db" , version = "0.12.0"} +memory-db = { path = "../../memory-db", version = "0.12.0" } +trie-root = { path = "../../trie-root", version = "0.12.0" } +trie-db = { path = "../../trie-db", version = "0.12.0" } criterion = "0.2.8" parity-codec = "3.0" diff --git a/test-support/trie-bench/src/lib.rs b/test-support/trie-bench/src/lib.rs index 72017a8f..f21d4379 100644 --- a/test-support/trie-bench/src/lib.rs +++ b/test-support/trie-bench/src/lib.rs @@ -27,7 +27,7 @@ use parity_codec::{Encode, Compact}; use criterion::{Criterion, black_box, Fun}; use keccak_hasher::KeccakHasher; use hash_db::Hasher; -use memory_db::MemoryDB; +use memory_db::{MemoryDB, HashKey}; use trie_db::{NodeCodec, TrieDB, TrieDBMut, Trie, TrieMut}; use trie_root::{TrieStream, trie_root}; use trie_standardmap::*; @@ -54,7 +54,7 @@ where trie_root::(d.0.clone()) })), Fun::new("Fill", |b, d: &TrieInsertionList| b.iter(&mut ||{ - let mut memdb = MemoryDB::new(&N::empty_node()[..]); + let mut memdb = MemoryDB::<_, HashKey<_>, _>::new(&N::empty_node()[..]); let mut root = H::Out::default(); let mut t = TrieDBMut::::new(&mut memdb, &mut root); for i in d.0.iter() { @@ -62,7 +62,7 @@ where } })), Fun::new("Iter", |b, d: &TrieInsertionList| { - let mut memdb = MemoryDB::new(&N::empty_node()[..]); + let mut memdb = MemoryDB::<_, HashKey<_>, _>::new(&N::empty_node()[..]); let mut root = H::Out::default(); { let mut t = TrieDBMut::::new(&mut memdb, &mut root); diff --git a/test-support/trie-standardmap/Cargo.toml b/test-support/trie-standardmap/Cargo.toml index e65f6e6e..542c27ce 100644 --- a/test-support/trie-standardmap/Cargo.toml +++ b/test-support/trie-standardmap/Cargo.toml @@ -1,11 +1,11 @@ [package] name = "trie-standardmap" description = "Standard test map for profiling tries" -version = "0.11.0" +version = "0.12.0" authors = ["Parity Technologies "] license = "Apache-2.0" [dependencies] -keccak-hasher = { path = "../keccak-hasher", version = "0.11.0"} -hash-db = { path = "../../hash-db" , version = "0.11.0"} +keccak-hasher = { path = "../keccak-hasher", version = "0.12.0"} +hash-db = { path = "../../hash-db" , version = "0.12.0"} criterion = "0.2.8" diff --git a/trie-db/Cargo.toml b/trie-db/Cargo.toml index 856f2be6..a55a60e9 100644 --- a/trie-db/Cargo.toml +++ b/trie-db/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "trie-db" -version = "0.11.0" +version = "0.12.0" authors = ["Parity Technologies "] description = "Merkle-Patricia Trie generic over key hasher and node encoding" repository = "https://github.com/paritytech/parity-common" @@ -10,16 +10,16 @@ license = "Apache-2.0" log = "0.4" rand = "0.6" elastic-array = "0.10" -hash-db = { path = "../hash-db" , version = "0.11.0"} +hash-db = { path = "../hash-db" , version = "0.12.0"} [dev-dependencies] env_logger = "0.6" -memory-db = { path = "../memory-db", version = "0.11.0" } -trie-root = { path = "../trie-root", version = "0.11.0"} -trie-standardmap = { path = "../test-support/trie-standardmap", version = "0.11.0" } -keccak-hasher = { path = "../test-support/keccak-hasher", version = "0.11.0" } +memory-db = { path = "../memory-db", version = "0.12.0" } +trie-root = { path = "../trie-root", version = "0.12.0"} +trie-standardmap = { path = "../test-support/trie-standardmap", version = "0.12.0" } +keccak-hasher = { path = "../test-support/keccak-hasher", version = "0.12.0" } # DISABLE the following line when publishing until cyclic dependencies are resolved https://github.com/rust-lang/cargo/issues/4242 -reference-trie = { path = "../test-support/reference-trie", version = "0.11.0" } +reference-trie = { path = "../test-support/reference-trie", version = "0.12.0" } hex-literal = "0.1" criterion = "0.2.8" diff --git a/trie-db/src/fatdb.rs b/trie-db/src/fatdb.rs index a21d2e99..4e04976d 100644 --- a/trie-db/src/fatdb.rs +++ b/trie-db/src/fatdb.rs @@ -113,7 +113,7 @@ where .map(|res| { res.map(|(hash, value)| { let aux_hash = H::hash(&hash); - (self.trie.db().get(&aux_hash).expect("Missing fatdb hash").into_vec(), value) + (self.trie.db().get(&aux_hash, &[]).expect("Missing fatdb hash").into_vec(), value) }) }) } @@ -121,14 +121,14 @@ where #[cfg(test)] mod test { - use memory_db::MemoryDB; + use memory_db::{MemoryDB, HashKey}; use DBValue; use keccak_hasher::KeccakHasher; use reference_trie::{RefFatDBMut, RefFatDB, Trie, TrieMut}; #[test] fn fatdb_to_trie() { - let mut memdb = MemoryDB::::default(); + let mut memdb = MemoryDB::, DBValue>::default(); let mut root = Default::default(); { let mut t = RefFatDBMut::new(&mut memdb, &mut root); diff --git a/trie-db/src/fatdbmut.rs b/trie-db/src/fatdbmut.rs index 5e692178..0dffd063 100644 --- a/trie-db/src/fatdbmut.rs +++ b/trie-db/src/fatdbmut.rs @@ -85,7 +85,7 @@ where // insert if it doesn't exist. if out.is_none() { let aux_hash = H::hash(hash.as_ref()); - db.emplace(aux_hash, DBValue::from_slice(key)); + db.emplace(aux_hash, &[], DBValue::from_slice(key)); } Ok(out) } @@ -97,7 +97,7 @@ where // remove if it already exists. if out.is_some() { let aux_hash = H::hash(hash.as_ref()); - self.raw.db_mut().remove(&aux_hash); + self.raw.db_mut().remove(&aux_hash, &[]); } Ok(out) @@ -107,14 +107,14 @@ where #[cfg(test)] mod test { use DBValue; - use memory_db::MemoryDB; + use memory_db::{MemoryDB, HashKey}; use hash_db::Hasher; use keccak_hasher::KeccakHasher; use reference_trie::{RefFatDBMut, RefTrieDB, Trie, TrieMut}; #[test] fn fatdbmut_to_trie() { - let mut memdb = MemoryDB::default(); + let mut memdb = MemoryDB::, _>::default(); let mut root = Default::default(); { let mut t = RefFatDBMut::new(&mut memdb, &mut root); @@ -126,7 +126,7 @@ mod test { #[test] fn fatdbmut_insert_remove_key_mapping() { - let mut memdb = MemoryDB::default(); + let mut memdb = MemoryDB::, _>::default(); let mut root = Default::default(); let key = [0x01u8, 0x23]; let val = [0x01u8, 0x24]; @@ -135,8 +135,8 @@ mod test { let mut t = RefFatDBMut::new(&mut memdb, &mut root); t.insert(&key, &val).unwrap(); assert_eq!(t.get(&key), Ok(Some(DBValue::from_slice(&val)))); - assert_eq!(t.db().get(&aux_hash), Some(DBValue::from_slice(&key))); + assert_eq!(t.db().get(&aux_hash, &[]), Some(DBValue::from_slice(&key))); t.remove(&key).unwrap(); - assert_eq!(t.db().get(&aux_hash), None); + assert_eq!(t.db().get(&aux_hash, &[]), None); } } diff --git a/trie-db/src/lookup.rs b/trie-db/src/lookup.rs index cc6657e2..fce4fef3 100644 --- a/trie-db/src/lookup.rs +++ b/trie-db/src/lookup.rs @@ -40,12 +40,14 @@ where { /// Look up the given key. If the value is found, it will be passed to the given /// function to decode or copy. - pub fn look_up(mut self, mut key: NibbleSlice) -> Result, H::Out, C::Error> { + pub fn look_up(mut self, key: NibbleSlice) -> Result, H::Out, C::Error> { + let mut partial = key; let mut hash = self.hash; + let mut key_nibbles = 0; // this loop iterates through non-inline nodes. for depth in 0.. { - let node_data = match self.db.get(&hash) { + let node_data = match self.db.get(&hash, &key.encoded_leftmost(key_nibbles, false)) { Some(value) => value, None => return Err(Box::new(match depth { 0 => TrieError::InvalidStateRoot(hash), @@ -67,25 +69,27 @@ where }; match decoded { Node::Leaf(slice, value) => { - return Ok(match slice == key { + return Ok(match slice == partial { true => Some(self.query.decode(value)), false => None, }) } Node::Extension(slice, item) => { - if key.starts_with(&slice) { + if partial.starts_with(&slice) { node_data = item; - key = key.mid(slice.len()); + partial = partial.mid(slice.len()); + key_nibbles += slice.len(); } else { return Ok(None) } } - Node::Branch(children, value) => match key.is_empty() { + Node::Branch(children, value) => match partial.is_empty() { true => return Ok(value.map(move |val| self.query.decode(val))), - false => match children[key.at(0) as usize] { + false => match children[partial.at(0) as usize] { Some(x) => { node_data = x; - key = key.mid(1); + partial = partial.mid(1); + key_nibbles += 1; } None => return Ok(None) } diff --git a/trie-db/src/nibbleslice.rs b/trie-db/src/nibbleslice.rs index bea42b07..d73c0705 100644 --- a/trie-db/src/nibbleslice.rs +++ b/trie-db/src/nibbleslice.rs @@ -18,6 +18,9 @@ use std::cmp::*; use std::fmt; use elastic_array::ElasticArray36; +/// Empty slice encoded as non-leaf partial key +pub const EMPTY_ENCODED: &[u8] = &[0]; + /// Nibble-orientated view onto byte-slice, allowing nibble-precision offsets. /// /// This is an immutable struct. No operations actually change it. @@ -96,7 +99,11 @@ impl<'a> NibbleSlice<'a> { /// Create a new nibble slice from the given HPE encoded data (e.g. output of `encoded()`). pub fn from_encoded(data: &'a [u8]) -> (NibbleSlice, bool) { - (Self::new_offset(data, if data[0] & 16 == 16 {1} else {2}), data[0] & 32 == 32) + if data.is_empty() { + (Self::new(&[]), false) + } else { + (Self::new_offset(data, if data[0] & 16 == 16 {1} else {2}), data[0] & 32 == 32) + } } /// Is this an empty slice? @@ -215,6 +222,12 @@ impl<'a> fmt::Debug for NibbleSlice<'a> { } } +/// Join two encoded nibble slices. +pub fn combine_encoded(prefix: &[u8], extension: &[u8]) -> ElasticArray36 { + let slice = NibbleSlice::new_composed(&NibbleSlice::from_encoded(&prefix).0, &NibbleSlice::from_encoded(extension).0); + slice.encoded(false) +} + #[cfg(test)] mod tests { use super::NibbleSlice; diff --git a/trie-db/src/recorder.rs b/trie-db/src/recorder.rs index 67f7b937..1e45f531 100644 --- a/trie-db/src/recorder.rs +++ b/trie-db/src/recorder.rs @@ -74,7 +74,7 @@ impl Recorder { #[cfg(test)] mod tests { - use memory_db::MemoryDB; + use memory_db::{MemoryDB, HashKey}; use hash_db::Hasher; use keccak_hasher::KeccakHasher; use reference_trie::{RefTrieDB, RefTrieDBMut, Trie, TrieMut, Recorder, Record}; @@ -131,7 +131,7 @@ mod tests { #[test] fn trie_record() { - let mut db = MemoryDB::default(); + let mut db = MemoryDB::, _>::default(); let mut root = Default::default(); { let mut x = RefTrieDBMut::new(&mut db, &mut root); diff --git a/trie-db/src/sectriedb.rs b/trie-db/src/sectriedb.rs index a5773827..f284d2ad 100644 --- a/trie-db/src/sectriedb.rs +++ b/trie-db/src/sectriedb.rs @@ -77,7 +77,7 @@ where #[cfg(test)] mod test { - use memory_db::MemoryDB; + use memory_db::{MemoryDB, HashKey}; use hash_db::Hasher; use keccak_hasher::KeccakHasher; use reference_trie::{RefTrieDBMut, RefSecTrieDB, Trie, TrieMut}; @@ -85,7 +85,7 @@ mod test { #[test] fn trie_to_sectrie() { - let mut db = MemoryDB::default(); + let mut db = MemoryDB::, DBValue>::default(); let mut root = Default::default(); { let mut t = RefTrieDBMut::new(&mut db, &mut root); diff --git a/trie-db/src/sectriedbmut.rs b/trie-db/src/sectriedbmut.rs index 7b657d09..ef4dc87b 100644 --- a/trie-db/src/sectriedbmut.rs +++ b/trie-db/src/sectriedbmut.rs @@ -87,7 +87,7 @@ where #[cfg(test)] mod test { - use memory_db::MemoryDB; + use memory_db::{MemoryDB, HashKey}; use hash_db::Hasher; use keccak_hasher::KeccakHasher; use reference_trie::{RefTrieDB, RefSecTrieDBMut, Trie, TrieMut}; @@ -95,7 +95,7 @@ mod test { #[test] fn sectrie_to_trie() { - let mut memdb = MemoryDB::default(); + let mut memdb = MemoryDB::, DBValue>::default(); let mut root = Default::default(); { let mut t = RefSecTrieDBMut::new(&mut memdb, &mut root); diff --git a/trie-db/src/triedb.rs b/trie-db/src/triedb.rs index 81a14bcd..4cbf2106 100644 --- a/trie-db/src/triedb.rs +++ b/trie-db/src/triedb.rs @@ -14,13 +14,14 @@ use std::fmt; use hash_db::{Hasher, HashDBRef}; -use nibbleslice::NibbleSlice; +use nibbleslice::{self, NibbleSlice, combine_encoded}; use super::node::{Node, OwnedNode}; use node_codec::NodeCodec; use super::lookup::Lookup; use super::{Result, DBValue, Trie, TrieItem, TrieError, TrieIterator, Query}; use std::marker::PhantomData; use std::borrow::Cow; +use elastic_array::ElasticArray36; /// A `Trie` implementation using a generic `HashDB` backing database, a `Hasher` /// implementation to generate keys and a `NodeCodec` implementation to encode/decode @@ -44,7 +45,7 @@ use std::borrow::Cow; /// use memory_db::*; /// /// fn main() { -/// let mut memdb = MemoryDB::default(); +/// let mut memdb = MemoryDB::, _>::default(); /// let mut root = Default::default(); /// RefTrieDBMut::new(&mut memdb, &mut root).insert(b"foo", b"bar").unwrap(); /// let t = RefTrieDB::new(&memdb, &root).unwrap(); @@ -72,7 +73,7 @@ where /// Create a new trie with the backing database `db` and `root` /// Returns an error if `root` does not exist pub fn new(db: &'db HashDBRef, root: &'db H::Out) -> Result { - if !db.contains(root) { + if !db.contains(root, nibbleslice::EMPTY_ENCODED) { Err(Box::new(TrieError::InvalidStateRoot(*root))) } else { Ok(TrieDB {db, root, hash_count: 0, codec_marker: PhantomData}) @@ -85,7 +86,7 @@ where /// Get the data of the root node. pub fn root_data(&self) -> Result { self.db - .get(self.root) + .get(self.root, nibbleslice::EMPTY_ENCODED) .ok_or_else(|| Box::new(TrieError::InvalidStateRoot(*self.root))) } @@ -93,11 +94,11 @@ where /// This could be a simple identity operation in the case that the node is sufficiently small, but /// may require a database lookup. If `is_root_data` then this is root-data and /// is known to be literal. - fn get_raw_or_lookup(&'db self, node: &[u8], is_root_node: bool) -> Result, H::Out, C::Error> { - match (is_root_node, C::try_decode_hash(node)) { + fn get_raw_or_lookup(&'db self, node: &[u8], partial_key: &[u8]) -> Result, H::Out, C::Error> { + match (partial_key.is_empty(), C::try_decode_hash(node)) { (false, Some(key)) => { self.db - .get(&key) + .get(&key, partial_key) .map(|v| Cow::Owned(v)) .ok_or_else(|| Box::new(TrieError::IncompleteDatabase(key))) } @@ -136,9 +137,9 @@ where C: NodeCodec + 'db { trie: &'db TrieDB<'db, H, C>, - key: &'a[u8], + node_key: &'a[u8], + partial_key: ElasticArray36, index: Option, - is_root: bool, } impl<'db, 'a, H, C> fmt::Debug for TrieAwareDebugNode<'db, 'a, H, C> @@ -147,7 +148,7 @@ where C: NodeCodec { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - if let Ok(node) = self.trie.get_raw_or_lookup(self.key, self.is_root) { + if let Ok(node) = self.trie.get_raw_or_lookup(self.node_key, &self.partial_key) { match C::decode(&node) { Ok(Node::Leaf(slice, value)) => match (f.debug_struct("Node::Leaf"), self.index) { @@ -163,13 +164,23 @@ where (ref mut d, _) => d, } .field("slice", &slice) - .field("item", &TrieAwareDebugNode{trie: self.trie, key: item, index: None, is_root: false }) + .field("item", &TrieAwareDebugNode{ + trie: self.trie, + node_key: item, + partial_key: combine_encoded(&self.partial_key, item), + index: None, + }) .finish(), Ok(Node::Branch(ref nodes, ref value)) => { let nodes: Vec> = nodes.into_iter() .enumerate() .filter_map(|(i, n)| n.map(|n| (i, n))) - .map(|(i, n)| TrieAwareDebugNode { trie: self.trie, index: Some(i as u8), key: n, is_root: false }) + .map(|(i, n)| TrieAwareDebugNode { + trie: self.trie, + index: Some(i as u8), + node_key: n, + partial_key: combine_encoded(&self.partial_key, n), + }) .collect(); match (f.debug_struct("Node::Branch"), self.index) { (ref mut d, Some(ref i)) => d.field("index", i), @@ -183,14 +194,14 @@ where Err(e) => f.debug_struct("BROKEN_NODE") .field("index", &self.index) - .field("key", &self.key) + .field("key", &self.node_key) .field("error", &format!("ERROR decoding node branch Rlp: {}", e)) .finish() } } else { f.debug_struct("BROKEN_NODE") .field("index", &self.index) - .field("key", &self.key) + .field("key", &self.node_key) .field("error", &"Not found") .finish() } @@ -208,9 +219,9 @@ where .field("hash_count", &self.hash_count) .field("root", &TrieAwareDebugNode { trie: self, - key: &root_rlp[..], + node_key: &root_rlp[..], + partial_key: Default::default(), index: None, - is_root: true, }) .finish() } @@ -258,14 +269,16 @@ impl<'a, H: Hasher, C: NodeCodec> TrieDBIterator<'a, H, C> { Ok(r) } - fn seek<'key>(&mut self, node_data: &DBValue, mut key: NibbleSlice<'key>) -> Result<(), H::Out, C::Error> { + fn seek<'key>(&mut self, node_data: &DBValue, key: NibbleSlice<'key>) -> Result<(), H::Out, C::Error> { let mut node_data = Cow::Borrowed(node_data); + let mut partial = key; + let mut full_key_nibbles = 0; loop { let (data, mid) = { let node = C::decode(&node_data).expect("encoded data read from db; qed"); match node { Node::Leaf(slice, _) => { - if slice >= key { + if slice >= partial { self.trail.push(Crumb { status: Status::Entering, node: node.clone().into(), @@ -281,20 +294,20 @@ impl<'a, H: Hasher, C: NodeCodec> TrieDBIterator<'a, H, C> { return Ok(()) }, Node::Extension(ref slice, ref item) => { - if key.starts_with(slice) { + if partial.starts_with(slice) { self.trail.push(Crumb { status: Status::At, node: node.clone().into(), }); self.key_nibbles.extend(slice.iter()); - let data = self.db.get_raw_or_lookup(&*item, false)?; + let data = self.db.get_raw_or_lookup(&*item, &key.encoded_leftmost(full_key_nibbles, false))?; (data, slice.len()) } else { self.descend(&node_data)?; return Ok(()) } }, - Node::Branch(ref nodes, _) => match key.is_empty() { + Node::Branch(ref nodes, _) => match partial.is_empty() { true => { self.trail.push(Crumb { status: Status::Entering, @@ -303,14 +316,14 @@ impl<'a, H: Hasher, C: NodeCodec> TrieDBIterator<'a, H, C> { return Ok(()) }, false => { - let i = key.at(0); + let i = partial.at(0); self.trail.push(Crumb { status: Status::AtChild(i as usize), node: node.clone().into(), }); self.key_nibbles.push(i); if let Some(ref child) = nodes[i as usize] { - let child = self.db.get_raw_or_lookup(&*child, false)?; + let child = self.db.get_raw_or_lookup(&*child, &key.encoded_leftmost(full_key_nibbles, false))?; (child, 1) } else { return Ok(()) @@ -322,13 +335,14 @@ impl<'a, H: Hasher, C: NodeCodec> TrieDBIterator<'a, H, C> { }; node_data = data; - key = key.mid(mid); + full_key_nibbles += mid; + partial = partial.mid(mid); } } /// Descend into a payload. fn descend(&mut self, d: &[u8]) -> Result<(), H::Out, C::Error> { - let node_data = &self.db.get_raw_or_lookup(d, self.key_nibbles.is_empty())?; + let node_data = &self.db.get_raw_or_lookup(d, &self.key_nibbles)?; let node = C::decode(&node_data).expect("encoded node read from db; qed"); Ok(self.descend_into_node(node.into())) } @@ -403,7 +417,7 @@ impl<'a, H: Hasher, C: NodeCodec> Iterator for TrieDBIterator<'a, H, C> { return Some(Ok((self.key(), v.clone()))); }, (Status::At, &OwnedNode::Extension(_, ref d)) => { - IterStep::Descend::(self.db.get_raw_or_lookup(&*d, false)) + IterStep::Descend::(self.db.get_raw_or_lookup(&*d, &self.key_nibbles)) }, (Status::At, &OwnedNode::Branch(_)) => IterStep::Continue, (Status::AtChild(i), &OwnedNode::Branch(ref branch)) if branch.index(i).is_some() => { @@ -412,7 +426,9 @@ impl<'a, H: Hasher, C: NodeCodec> Iterator for TrieDBIterator<'a, H, C> { i => *self.key_nibbles.last_mut() .expect("pushed as 0; moves sequentially; removed afterwards; qed") = i as u8, } - IterStep::Descend::(self.db.get_raw_or_lookup(&branch.index(i).expect("this arm guarded by branch[i].is_some(); qed"), false)) + IterStep::Descend::(self.db.get_raw_or_lookup( + &branch.index(i).expect("this arm guarded by branch[i].is_some(); qed"), + &self.key_nibbles)) }, (Status::AtChild(i), &OwnedNode::Branch(_)) => { if i == 0 { @@ -443,7 +459,7 @@ impl<'a, H: Hasher, C: NodeCodec> Iterator for TrieDBIterator<'a, H, C> { #[cfg(test)] mod tests { - use memory_db::MemoryDB; + use memory_db::{MemoryDB, HashKey}; use keccak_hasher::KeccakHasher; use DBValue; use reference_trie::{RefTrieDB, RefTrieDBMut, RefLookup, Trie, TrieMut, NibbleSlice}; @@ -455,7 +471,7 @@ mod tests { (hex!("0103000000000000000469").to_vec(), hex!("ffffffffff").to_vec()), ]; - let mut memdb = MemoryDB::default(); + let mut memdb = MemoryDB::, DBValue>::default(); let mut root = Default::default(); { let mut t = RefTrieDBMut::new(&mut memdb, &mut root); @@ -483,7 +499,7 @@ mod tests { (hex!("0103000000000000000469").to_vec(), hex!("ffffffffff").to_vec()), ]; - let mut memdb = MemoryDB::default(); + let mut memdb = MemoryDB::, DBValue>::default(); let mut root = Default::default(); { let mut t = RefTrieDBMut::new(&mut memdb, &mut root); @@ -507,7 +523,7 @@ mod tests { fn iterator() { let d = vec![DBValue::from_slice(b"A"), DBValue::from_slice(b"AA"), DBValue::from_slice(b"AB"), DBValue::from_slice(b"B")]; - let mut memdb = MemoryDB::::default(); + let mut memdb = MemoryDB::, DBValue>::default(); let mut root = Default::default(); { let mut t = RefTrieDBMut::new(&mut memdb, &mut root); @@ -525,7 +541,7 @@ mod tests { fn iterator_seek() { let d = vec![ DBValue::from_slice(b"A"), DBValue::from_slice(b"AA"), DBValue::from_slice(b"AB"), DBValue::from_slice(b"B") ]; - let mut memdb = MemoryDB::::default(); + let mut memdb = MemoryDB::, DBValue>::default(); let mut root = Default::default(); { let mut t = RefTrieDBMut::new(&mut memdb, &mut root); @@ -564,7 +580,7 @@ mod tests { #[test] fn get_len() { - let mut memdb = MemoryDB::::default(); + let mut memdb = MemoryDB::, DBValue>::default(); let mut root = Default::default(); { let mut t = RefTrieDBMut::new(&mut memdb, &mut root); @@ -582,7 +598,7 @@ mod tests { fn debug_output_supports_pretty_print() { let d = vec![ DBValue::from_slice(b"A"), DBValue::from_slice(b"AA"), DBValue::from_slice(b"AB"), DBValue::from_slice(b"B") ]; - let mut memdb = MemoryDB::::default(); + let mut memdb = MemoryDB::, DBValue>::default(); let mut root = Default::default(); let root = { let mut t = RefTrieDBMut::new(&mut memdb, &mut root); @@ -650,7 +666,7 @@ mod tests { fn test_lookup_with_corrupt_data_returns_decoder_error() { use std::marker::PhantomData; - let mut memdb = MemoryDB::::default(); + let mut memdb = MemoryDB::, DBValue>::default(); let mut root = Default::default(); { let mut t = RefTrieDBMut::new(&mut memdb, &mut root); diff --git a/trie-db/src/triedbmut.rs b/trie-db/src/triedbmut.rs index c02b0bb9..e0e515f4 100644 --- a/trie-db/src/triedbmut.rs +++ b/trie-db/src/triedbmut.rs @@ -21,7 +21,7 @@ use node_codec::NodeCodec; use super::{DBValue, node::NodeKey}; use hash_db::{HashDB, Hasher}; -use nibbleslice::NibbleSlice; +use nibbleslice::{self, NibbleSlice, combine_encoded}; use std::collections::{HashSet, VecDeque}; use std::marker::PhantomData; @@ -56,6 +56,32 @@ fn empty_children() -> Box<[Option>; 16]> { ]) } +struct Partial<'key> { + key: NibbleSlice<'key>, + split: usize, +} + +impl<'key> Partial<'key> { + fn new(key: NibbleSlice) -> Partial { + Partial { + key, + split: 0, + } + } + + fn advance(&mut self, by: usize) { + self.split += by; + } + + fn mid(&self) -> NibbleSlice<'key> { + self.key.mid(self.split) + } + + fn encoded_prefix(&self) -> NodeKey { + self.key.encoded_leftmost(self.split, false) + } +} + /// Node types in the Trie. #[derive(Debug)] enum Node { @@ -131,20 +157,21 @@ where fn into_encoded(self, mut child_cb: F) -> Vec where C: NodeCodec, - F: FnMut(NodeHandle) -> ChildReference, + F: FnMut(NodeHandle, &NodeKey) -> ChildReference, H: Hasher, { match self { Node::Empty => C::empty_node(), Node::Leaf(partial, value) => C::leaf_node(&partial, &value), - Node::Extension(partial, child) => C::ext_node(&partial, child_cb(child)), + Node::Extension(partial, child) => C::ext_node(&partial, child_cb(child, &partial)), Node::Branch(mut children, value) => { C::branch_node( // map the `NodeHandle`s from the Branch to `ChildReferences` children.iter_mut() .map(Option::take) - .map(|maybe_child| - maybe_child.map(|child| child_cb(child)) + .enumerate() + .map(|(i, maybe_child)| + maybe_child.map(|child| child_cb(child, &NibbleSlice::new_offset(&[i as u8], 1).encoded(false))) ), value ) @@ -268,7 +295,7 @@ impl<'a, H> Index<&'a StorageHandle> for NodeStorage { /// use memory_db::*; /// /// fn main() { -/// let mut memdb = MemoryDB::default(); +/// let mut memdb = MemoryDB::, DBValue>::default(); /// let mut root = Default::default(); /// let mut t = RefTrieDBMut::new(&mut memdb, &mut root); /// assert!(t.is_empty()); @@ -289,7 +316,7 @@ where db: &'a mut HashDB, root: &'a mut H::Out, root_handle: NodeHandle, - death_row: HashSet, + death_row: HashSet<(H::Out, NodeKey)>, /// The number of hash operations this trie has performed. /// Note that none are performed until changes are committed. hash_count: usize, @@ -320,7 +347,7 @@ where /// Create a new trie with the backing database `db` and `root. /// Returns an error if `root` does not exist. pub fn from_existing(db: &'a mut HashDB, root: &'a mut H::Out) -> Result { - if !db.contains(root) { + if !db.contains(root, nibbleslice::EMPTY_ENCODED) { return Err(Box::new(TrieError::InvalidStateRoot(*root))); } @@ -346,8 +373,8 @@ where } // cache a node by hash - fn cache(&mut self, hash: H::Out) -> Result { - let node_encoded = self.db.get(&hash).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(hash)))?; + fn cache(&mut self, hash: H::Out, key: &[u8]) -> Result { + let node_encoded = self.db.get(&hash, key).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(hash)))?; let node = Node::from_encoded::( &node_encoded, &*self.db, @@ -358,22 +385,22 @@ where // inspect a node, choosing either to replace, restore, or delete it. // if restored or replaced, returns the new node along with a flag of whether it was changed. - fn inspect(&mut self, stored: Stored, inspector: F) -> Result, bool)>, H::Out, C::Error> - where F: FnOnce(&mut Self, Node) -> Result, H::Out, C::Error> { + fn inspect(&mut self, stored: Stored, key: &mut Partial, inspector: F) -> Result, bool)>, H::Out, C::Error> + where F: FnOnce(&mut Self, Node, &mut Partial) -> Result, H::Out, C::Error> { Ok(match stored { - Stored::New(node) => match inspector(self, node)? { + Stored::New(node) => match inspector(self, node, key)? { Action::Restore(node) => Some((Stored::New(node), false)), Action::Replace(node) => Some((Stored::New(node), true)), Action::Delete => None, }, - Stored::Cached(node, hash) => match inspector(self, node)? { + Stored::Cached(node, hash) => match inspector(self, node, key)? { Action::Restore(node) => Some((Stored::Cached(node, hash), false)), Action::Replace(node) => { - self.death_row.insert(hash); + self.death_row.insert((hash, key.encoded_prefix())); Some((Stored::New(node), true)) } Action::Delete => { - self.death_row.insert(hash); + self.death_row.insert((hash, key.encoded_prefix())); None } }, @@ -430,21 +457,22 @@ where } /// insert a key-value pair into the trie, creating new nodes if necessary. - fn insert_at(&mut self, handle: NodeHandle, partial: NibbleSlice, value: DBValue, old_val: &mut Option) -> Result<(StorageHandle, bool), H::Out, C::Error> { + fn insert_at(&mut self, handle: NodeHandle, key: &mut Partial, value: DBValue, old_val: &mut Option) -> Result<(StorageHandle, bool), H::Out, C::Error> { let h = match handle { NodeHandle::InMemory(h) => h, - NodeHandle::Hash(h) => self.cache(h)?, + NodeHandle::Hash(h) => self.cache(h, &key.encoded_prefix())?, }; let stored = self.storage.destroy(h); - let (new_stored, changed) = self.inspect(stored, move |trie, stored| { - trie.insert_inspector(stored, partial, value, old_val).map(|a| a.into_action()) + let (new_stored, changed) = self.inspect(stored, key, move |trie, stored, key| { + trie.insert_inspector(stored, key, value, old_val).map(|a| a.into_action()) })?.expect("Insertion never deletes."); Ok((self.storage.alloc(new_stored), changed)) } /// the insertion inspector. - fn insert_inspector(&mut self, node: Node, partial: NibbleSlice, value: DBValue, old_val: &mut Option) -> Result, H::Out, C::Error> { + fn insert_inspector(&mut self, node: Node, key: &mut Partial, value: DBValue, old_val: &mut Option) -> Result, H::Out, C::Error> { + let partial = key.mid(); trace!(target: "trie", "augmented (partial: {:?}, value: {:#x?})", partial, value); Ok(match node { @@ -466,10 +494,10 @@ where } } else { let idx = partial.at(0) as usize; - let partial = partial.mid(1); + key.advance(1); if let Some(child) = children[idx].take() { // original had something there. recurse down into it. - let (new_child, changed) = self.insert_at(child, partial, value, old_val)?; + let (new_child, changed) = self.insert_at(child, key, value, old_val)?; children[idx] = Some(new_child.into()); if !changed { // the new node we composed didn't change. that means our branch is untouched too. @@ -477,7 +505,7 @@ where } } else { // original had nothing there. compose a leaf. - let leaf = self.storage.alloc(Stored::New(Node::Leaf(partial.encoded(true), value))); + let leaf = self.storage.alloc(Stored::New(Node::Leaf(key.mid().encoded(true), value))); children[idx] = Some(leaf.into()); } @@ -515,7 +543,7 @@ where }; // always replace because whatever we get out here is not the branch we started with. - let branch_action = self.insert_inspector(branch, partial, value, old_val)?.unwrap_node(); + let branch_action = self.insert_inspector(branch, key, value, old_val)?.unwrap_node(); InsertAction::Replace(branch_action) } else if cp == existing_key.len() { trace!(target: "trie", "complete-prefix (cp={:?}): AUGMENT-AT-END", cp); @@ -524,7 +552,8 @@ where // make a stub branch and an extension. let branch = Node::Branch(empty_children(), Some(stored_value)); // augment the new branch. - let branch = self.insert_inspector(branch, partial.mid(cp), value, old_val)?.unwrap_node(); + key.advance(cp); + let branch = self.insert_inspector(branch, key, value, old_val)?.unwrap_node(); // always replace since we took a leaf and made an extension. let branch_handle = self.storage.alloc(Stored::New(branch)).into(); @@ -538,7 +567,8 @@ where // augment it. this will result in the Leaf -> cp == 0 routine, // which creates a branch. - let augmented_low = self.insert_inspector(low, partial.mid(cp), value, old_val)?.unwrap_node(); + key.advance(cp); + let augmented_low = self.insert_inspector(low, key, value, old_val)?.unwrap_node(); // make an extension using it. this is a replacement. InsertAction::Replace(Node::Extension( @@ -569,7 +599,7 @@ where }; // continue inserting. - let branch_action = self.insert_inspector(Node::Branch(children, None), partial, value, old_val)?.unwrap_node(); + let branch_action = self.insert_inspector(Node::Branch(children, None), key, value, old_val)?.unwrap_node(); InsertAction::Replace(branch_action) } else if cp == existing_key.len() { trace!(target: "trie", "complete-prefix (cp={:?}): AUGMENT-AT-END", cp); @@ -577,7 +607,8 @@ where // fully-shared prefix. // insert into the child node. - let (new_child, changed) = self.insert_at(child_branch, partial.mid(cp), value, old_val)?; + key.advance(cp); + let (new_child, changed) = self.insert_at(child_branch, key, value, old_val)?; let new_ext = Node::Extension(existing_key.encoded(false), new_child.into()); // if the child branch wasn't changed, meaning this extension remains the same. @@ -591,7 +622,8 @@ where // partially-shared. let low = Node::Extension(existing_key.mid(cp).encoded(false), child_branch); // augment the extension. this will take the cp == 0 path, creating a branch. - let augmented_low = self.insert_inspector(low, partial.mid(cp), value, old_val)?.unwrap_node(); + key.advance(cp); + let augmented_low = self.insert_inspector(low, key, value, old_val)?.unwrap_node(); // always replace, since this extension is not the one we started with. // this is known because the partial key is only the common prefix. @@ -605,35 +637,38 @@ where } /// Remove a node from the trie based on key. - fn remove_at(&mut self, handle: NodeHandle, partial: NibbleSlice, old_val: &mut Option) -> Result, H::Out, C::Error> { + fn remove_at(&mut self, handle: NodeHandle, key: &mut Partial, old_val: &mut Option) -> Result, H::Out, C::Error> { let stored = match handle { NodeHandle::InMemory(h) => self.storage.destroy(h), NodeHandle::Hash(h) => { - let handle = self.cache(h)?; + let handle = self.cache(h, &key.encoded_prefix())?; self.storage.destroy(handle) } }; - let opt = self.inspect(stored, move |trie, node| trie.remove_inspector(node, partial, old_val))?; + let opt = self.inspect(stored, key, move |trie, node, key| trie.remove_inspector(node, key, old_val))?; Ok(opt.map(|(new, changed)| (self.storage.alloc(new), changed))) } /// the removal inspector - fn remove_inspector(&mut self, node: Node, partial: NibbleSlice, old_val: &mut Option) -> Result, H::Out, C::Error> { + fn remove_inspector(&mut self, node: Node, key: &mut Partial, old_val: &mut Option) -> Result, H::Out, C::Error> { + let partial = key.mid(); Ok(match (node, partial.is_empty()) { (Node::Empty, _) => Action::Delete, (Node::Branch(c, None), true) => Action::Restore(Node::Branch(c, None)), (Node::Branch(children, Some(val)), true) => { *old_val = Some(val); // always replace since we took the value out. - Action::Replace(self.fix(Node::Branch(children, None))?) + Action::Replace(self.fix(Node::Branch(children, None), key.encoded_prefix())?) } (Node::Branch(mut children, value), false) => { let idx = partial.at(0) as usize; if let Some(child) = children[idx].take() { trace!(target: "trie", "removing value out of branch child, partial={:?}", partial); - match self.remove_at(child, partial.mid(1), old_val)? { + let prefix = key.encoded_prefix(); + key.advance(1); + match self.remove_at(child, key, old_val)? { Some((new, changed)) => { children[idx] = Some(new.into()); let branch = Node::Branch(children, value); @@ -648,7 +683,7 @@ where // the child we took was deleted. // the node may need fixing. trace!(target: "trie", "branch child deleted, partial={:?}", partial); - Action::Replace(self.fix(Node::Branch(children, value))?) + Action::Replace(self.fix(Node::Branch(children, value), prefix)?) } } } else { @@ -675,14 +710,16 @@ where if cp == existing_len { // try to remove from the child branch. trace!(target: "trie", "removing from extension child, partial={:?}", partial); - match self.remove_at(child_branch, partial.mid(cp), old_val)? { + let prefix = key.encoded_prefix(); + key.advance(cp); + match self.remove_at(child_branch, key, old_val)? { Some((new_child, changed)) => { let new_child = new_child.into(); // if the child branch was unchanged, then the extension is too. // otherwise, this extension may need fixing. match changed { - true => Action::Replace(self.fix(Node::Extension(encoded, new_child))?), + true => Action::Replace(self.fix(Node::Extension(encoded, new_child), prefix)?), false => Action::Restore(Node::Extension(encoded, new_child)), } } @@ -706,7 +743,7 @@ where /// _invalid state_ means: /// - Branch node where there is only a single entry; /// - Extension node followed by anything other than a Branch node. - fn fix(&mut self, node: Node) -> Result, H::Out, C::Error> { + fn fix(&mut self, node: Node, key: NodeKey) -> Result, H::Out, C::Error> { match node { Node::Branch(mut children, value) => { // if only a single value, transmute to leaf/extension and feed through fixed. @@ -735,7 +772,7 @@ where let new_partial = NibbleSlice::new_offset(&[a], 1).encoded(false); let child = children[a as usize].take().expect("used_index only set if occupied; qed"); let new_node = Node::Extension(new_partial, child); - self.fix(new_node) + self.fix(new_node, key) } (UsedIndex::None, Some(value)) => { // make a leaf. @@ -753,7 +790,7 @@ where let stored = match child { NodeHandle::InMemory(h) => self.storage.destroy(h), NodeHandle::Hash(h) => { - let handle = self.cache(h)?; + let handle = self.cache(h, &combine_encoded(&key, &partial))?; self.storage.destroy(handle) } }; @@ -768,20 +805,21 @@ where // combine with node below. if let Some(hash) = maybe_hash { // delete the cached child since we are going to replace it. - self.death_row.insert(hash); + self.death_row.insert((hash, key.clone())); } let partial = NibbleSlice::from_encoded(&partial).0; let sub_partial = NibbleSlice::from_encoded(&sub_partial).0; let new_partial = NibbleSlice::new_composed(&partial, &sub_partial); trace!(target: "trie", "fixing: extension combination. new_partial={:?}", new_partial); - self.fix(Node::Extension(new_partial.encoded(false), sub_child)) + let new_partial = new_partial.encoded(false); + self.fix(Node::Extension(new_partial, sub_child), key) } Node::Leaf(sub_partial, value) => { // combine with node below. if let Some(hash) = maybe_hash { // delete the cached child since we are going to replace it. - self.death_row.insert(hash); + self.death_row.insert((hash, key)); } let partial = NibbleSlice::from_encoded(&partial).0; let sub_partial = NibbleSlice::from_encoded(&sub_partial).0; @@ -815,8 +853,8 @@ where // always kill all the nodes on death row. trace!(target: "trie", "{:?} nodes to remove from db", self.death_row.len()); - for hash in self.death_row.drain() { - self.db.remove(&hash); + for (hash, prefix) in self.death_row.drain() { + self.db.remove(&hash, &prefix); } let handle = match self.root_handle() { @@ -826,10 +864,13 @@ where match self.storage.destroy(handle) { Stored::New(node) => { - let encoded_root = node.into_encoded::<_, C, H>(|child| self.commit_child(child) ); + let encoded_root = node.into_encoded::<_, C, H>(|child, k| { + let combined = combine_encoded(nibbleslice::EMPTY_ENCODED, k); + self.commit_child(child, &combined) + }); trace!(target: "trie", "encoded root node: {:#x?}", &encoded_root[..]); - *self.root = self.db.insert(&encoded_root[..]); + *self.root = self.db.insert(nibbleslice::EMPTY_ENCODED, &encoded_root[..]); self.hash_count += 1; self.root_handle = NodeHandle::Hash(*self.root); @@ -847,16 +888,22 @@ where /// case where we can fit the actual data in the `Hasher`s output type, we /// store the data inline. This function is used as the callback to the /// `into_encoded` method of `Node`. - fn commit_child(&mut self, handle: NodeHandle) -> ChildReference { + fn commit_child(&mut self, handle: NodeHandle, prefix: &NodeKey) -> ChildReference { match handle { NodeHandle::Hash(hash) => ChildReference::Hash(hash), NodeHandle::InMemory(storage_handle) => { match self.storage.destroy(storage_handle) { Stored::Cached(_, hash) => ChildReference::Hash(hash), Stored::New(node) => { - let encoded = node.into_encoded::<_, C, H>(|node_handle| self.commit_child(node_handle) ); + let encoded = { + let commit_child = |node_handle, partial: &NodeKey| { + let combined = combine_encoded(&prefix, partial); + self.commit_child(node_handle, &combined) + }; + node.into_encoded::<_, C, H>(commit_child) + }; if encoded.len() >= H::LENGTH { - let hash = self.db.insert(&encoded[..]); + let hash = self.db.insert(&prefix, &encoded[..]); self.hash_count +=1; ChildReference::Hash(hash) } else { @@ -917,7 +964,7 @@ where let root_handle = self.root_handle(); let (new_handle, changed) = self.insert_at( root_handle, - NibbleSlice::new(key), + &mut Partial::new(NibbleSlice::new(key)), DBValue::from_slice(value), &mut old_val, )?; @@ -932,10 +979,10 @@ where trace!(target: "trie", "remove: key={:#x?}", key); let root_handle = self.root_handle(); - let key = NibbleSlice::new(key); + let mut key = Partial::new(NibbleSlice::new(key)); let mut old_val = None; - match self.remove_at(root_handle, key, &mut old_val)? { + match self.remove_at(root_handle, &mut key, &mut old_val)? { Some((handle, changed)) => { trace!(target: "trie", "remove: altered trie={}", changed); self.root_handle = NodeHandle::InMemory(handle); @@ -966,7 +1013,7 @@ mod tests { use env_logger; use standardmap::*; use DBValue; - use memory_db::MemoryDB; + use memory_db::{MemoryDB, PrefixedKey}; use hash_db::{Hasher, HashDB}; use keccak_hasher::KeccakHasher; use reference_trie::{RefTrieDBMut, TrieMut, NodeCodec, @@ -1010,7 +1057,7 @@ mod tests { }.make_with(&mut seed); let real = ref_trie_root(x.clone()); - let mut memdb = MemoryDB::default(); + let mut memdb = MemoryDB::, DBValue>::default(); let mut root = Default::default(); let mut memtrie = populate_trie(&mut memdb, &mut root, &x); @@ -1040,7 +1087,7 @@ mod tests { #[test] fn init() { - let mut memdb = MemoryDB::default(); + let mut memdb = MemoryDB::, DBValue>::default(); let mut root = Default::default(); let mut t = RefTrieDBMut::new(&mut memdb, &mut root); assert_eq!(*t.root(), ReferenceNodeCodec::hashed_null_node()); @@ -1048,7 +1095,7 @@ mod tests { #[test] fn insert_on_empty() { - let mut memdb = MemoryDB::default(); + let mut memdb = MemoryDB::, DBValue>::default(); let mut root = Default::default(); let mut t = RefTrieDBMut::new(&mut memdb, &mut root); t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); @@ -1059,12 +1106,12 @@ mod tests { fn remove_to_empty() { let big_value = b"00000000000000000000000000000000"; - let mut memdb = MemoryDB::default(); + let mut memdb = MemoryDB::, DBValue>::default(); let mut root = Default::default(); let mut t1 = RefTrieDBMut::new(&mut memdb, &mut root); t1.insert(&[0x01, 0x23], big_value).unwrap(); t1.insert(&[0x01, 0x34], big_value).unwrap(); - let mut memdb2 = MemoryDB::default(); + let mut memdb2 = MemoryDB::, DBValue>::default(); let mut root2 = Default::default(); let mut t2 = RefTrieDBMut::new(&mut memdb2, &mut root2); @@ -1076,7 +1123,7 @@ mod tests { #[test] fn insert_replace_root() { - let mut memdb = MemoryDB::default(); + let mut memdb = MemoryDB::, DBValue>::default(); let mut root = Default::default(); let mut t = RefTrieDBMut::new(&mut memdb, &mut root); t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); @@ -1086,7 +1133,7 @@ mod tests { #[test] fn insert_make_branch_root() { - let mut memdb = MemoryDB::default(); + let mut memdb = MemoryDB::, DBValue>::default(); let mut root = Default::default(); let mut t = RefTrieDBMut::new(&mut memdb, &mut root); t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); @@ -1099,7 +1146,7 @@ mod tests { #[test] fn insert_into_branch_root() { - let mut memdb = MemoryDB::default(); + let mut memdb = MemoryDB::, DBValue>::default(); let mut root = Default::default(); let mut t = RefTrieDBMut::new(&mut memdb, &mut root); t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); @@ -1114,7 +1161,7 @@ mod tests { #[test] fn insert_value_into_branch_root() { - let mut memdb = MemoryDB::default(); + let mut memdb = MemoryDB::, DBValue>::default(); let mut root = Default::default(); let mut t = RefTrieDBMut::new(&mut memdb, &mut root); t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); @@ -1127,7 +1174,7 @@ mod tests { #[test] fn insert_split_leaf() { - let mut memdb = MemoryDB::default(); + let mut memdb = MemoryDB::, DBValue>::default(); let mut root = Default::default(); let mut t = RefTrieDBMut::new(&mut memdb, &mut root); t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); @@ -1140,7 +1187,7 @@ mod tests { #[test] fn insert_split_extenstion() { - let mut memdb = MemoryDB::default(); + let mut memdb = MemoryDB::, DBValue>::default(); let mut root = Default::default(); let mut t = RefTrieDBMut::new(&mut memdb, &mut root); t.insert(&[0x01, 0x23, 0x45], &[0x01]).unwrap(); @@ -1158,7 +1205,7 @@ mod tests { let big_value0 = b"00000000000000000000000000000000"; let big_value1 = b"11111111111111111111111111111111"; - let mut memdb = MemoryDB::default(); + let mut memdb = MemoryDB::, DBValue>::default(); let mut root = Default::default(); let mut t = RefTrieDBMut::new(&mut memdb, &mut root); t.insert(&[0x01u8, 0x23], big_value0).unwrap(); @@ -1173,7 +1220,7 @@ mod tests { fn insert_duplicate_value() { let big_value = b"00000000000000000000000000000000"; - let mut memdb = MemoryDB::default(); + let mut memdb = MemoryDB::, DBValue>::default(); let mut root = Default::default(); let mut t = RefTrieDBMut::new(&mut memdb, &mut root); t.insert(&[0x01u8, 0x23], big_value).unwrap(); @@ -1186,7 +1233,7 @@ mod tests { #[test] fn test_at_empty() { - let mut memdb = MemoryDB::default(); + let mut memdb = MemoryDB::, DBValue>::default(); let mut root = Default::default(); let t = RefTrieDBMut::new(&mut memdb, &mut root); assert_eq!(t.get(&[0x5]).unwrap(), None); @@ -1194,7 +1241,7 @@ mod tests { #[test] fn test_at_one() { - let mut memdb = MemoryDB::default(); + let mut memdb = MemoryDB::, DBValue>::default(); let mut root = Default::default(); let mut t = RefTrieDBMut::new(&mut memdb, &mut root); t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); @@ -1205,7 +1252,7 @@ mod tests { #[test] fn test_at_three() { - let mut memdb = MemoryDB::default(); + let mut memdb = MemoryDB::, DBValue>::default(); let mut root = Default::default(); let mut t = RefTrieDBMut::new(&mut memdb, &mut root); t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); @@ -1235,12 +1282,12 @@ mod tests { }.make_with(&mut seed); let real = ref_trie_root(x.clone()); - let mut memdb = MemoryDB::default(); + let mut memdb = MemoryDB::, DBValue>::default(); let mut root = Default::default(); let mut memtrie = populate_trie(&mut memdb, &mut root, &x); let mut y = x.clone(); y.sort_by(|ref a, ref b| a.0.cmp(&b.0)); - let mut memdb2 = MemoryDB::default(); + let mut memdb2 = MemoryDB::, DBValue>::default(); let mut root2 = Default::default(); let mut memtrie_sorted = populate_trie(&mut memdb2, &mut root2, &y); if *memtrie.root() != real || *memtrie_sorted.root() != real { @@ -1262,7 +1309,7 @@ mod tests { #[test] fn test_trie_existing() { - let mut db = MemoryDB::default(); + let mut db = MemoryDB::, DBValue>::default(); let mut root = Default::default(); { let mut t = RefTrieDBMut::new(&mut db, &mut root); @@ -1285,7 +1332,7 @@ mod tests { count: 4, }.make_with(&mut seed); - let mut db = MemoryDB::default(); + let mut db = MemoryDB::, DBValue>::default(); let mut root = Default::default(); let mut t = RefTrieDBMut::new(&mut db, &mut root); for &(ref key, ref value) in &x { @@ -1313,7 +1360,7 @@ mod tests { count: 4, }.make_with(&mut seed); - let mut db = MemoryDB::default(); + let mut db = MemoryDB::, DBValue>::default(); let mut root = Default::default(); let mut t = RefTrieDBMut::new(&mut db, &mut root); for &(ref key, ref value) in &x { diff --git a/trie-root/Cargo.toml b/trie-root/Cargo.toml index 73f7b96f..0ec344d4 100644 --- a/trie-root/Cargo.toml +++ b/trie-root/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "trie-root" -version = "0.11.0" +version = "0.12.0" authors = ["Parity Technologies "] description = "In-memory patricia trie operations" repository = "https://github.com/paritytech/parity-common" @@ -8,14 +8,14 @@ license = "Apache-2.0" categories = [ "no-std" ] [dependencies] -hash-db = { path = "../hash-db", default-features = false, version = "0.11.0"} +hash-db = { path = "../hash-db", default-features = false } [dev-dependencies] hex-literal = "0.1" -keccak-hasher = { path = "../test-support/keccak-hasher", version = "0.11.0" } -trie-standardmap = { path = "../test-support/trie-standardmap", version = "0.11.0" } +keccak-hasher = { path = "../test-support/keccak-hasher" } +trie-standardmap = { path = "../test-support/trie-standardmap" } # DISABLE the following line when publishing until cyclic dependencies are resolved https://github.com/rust-lang/cargo/issues/4242 -reference-trie = { path = "../test-support/reference-trie", version = "0.11.0" } +reference-trie = { path = "../test-support/reference-trie" } [features] default = ["std"]