-
Notifications
You must be signed in to change notification settings - Fork 678
perf: optimize debug_storageRangeAt
#809
Changes from all commits
013adb9
c5d60eb
7b40bfa
02fb847
6e68425
b9c72c9
cfce53b
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,11 @@ | ||
| declare module "merkle-patricia-tree/secure" { | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Just adding a type for SecureTrie |
||
| import { CheckpointTrie } from "merkle-patricia-tree"; | ||
| export default class SecureTrie extends CheckpointTrie { | ||
| copy(): SecureTrie; | ||
| static prove( | ||
| trie: SecureTrie, | ||
| key: LargeNumber, | ||
| cb: Callback<MerkleProof> | ||
| ): void; | ||
| } | ||
| } | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -17,7 +17,11 @@ import { | |
| RuntimeError, | ||
| RETURN_TYPES, | ||
| Snapshots, | ||
| StepEvent | ||
| StepEvent, | ||
| StorageKeys, | ||
| StorageRangeResult, | ||
| StorageRecords, | ||
| RangedStorageKeys | ||
| } from "@ganache/ethereum-utils"; | ||
| import TransactionManager from "./data-managers/transaction-manager"; | ||
| import SecureTrie from "merkle-patricia-tree/secure"; | ||
|
|
@@ -39,7 +43,8 @@ const { | |
| RPCQUANTITY_EMPTY, | ||
| BUFFER_32_ZERO, | ||
| BUFFER_256_ZERO, | ||
| RPCQUANTITY_ZERO | ||
| RPCQUANTITY_ZERO, | ||
| findInsertPosition | ||
| } = utils; | ||
|
|
||
| type SimulationTransaction = { | ||
|
|
@@ -325,7 +330,7 @@ export default class Blockchain extends Emittery.Typed< | |
| }: { | ||
| block: Block; | ||
| serialized: Buffer; | ||
| storageKeys: Map<string, Buffer>; | ||
| storageKeys: StorageKeys; | ||
| }) => { | ||
| const { blocks } = this; | ||
| blocks.latest = block; | ||
|
|
@@ -371,8 +376,8 @@ export default class Blockchain extends Emittery.Typed< | |
| }); | ||
|
|
||
| // save storage keys to the database | ||
| storageKeys.forEach((value, key) => { | ||
| this.storageKeys.put(key, value); | ||
| storageKeys.forEach(value => { | ||
| this.storageKeys.put(value.hashedKey, value.key); | ||
| }); | ||
|
|
||
| blockLogs.blockNumber = blockNumberQ; | ||
|
|
@@ -448,7 +453,7 @@ export default class Blockchain extends Emittery.Typed< | |
| #handleNewBlockData = async (blockData: { | ||
| block: Block; | ||
| serialized: Buffer; | ||
| storageKeys: Map<string, Buffer>; | ||
| storageKeys: StorageKeys; | ||
| }) => { | ||
| this.#blockBeingSavedPromise = this.#blockBeingSavedPromise | ||
| .then(() => this.#saveNewBlock(blockData)) | ||
|
|
@@ -866,11 +871,11 @@ export default class Blockchain extends Emittery.Typed< | |
| transaction: Transaction, | ||
| options: TransactionTraceOptions, | ||
| keys?: Buffer[], | ||
| contractAddress?: string | ||
| contractAddress?: Buffer | ||
| ) => { | ||
| let currentDepth = -1; | ||
| const storageStack: TraceStorageMap[] = []; | ||
| const storage = {}; | ||
| const storage: StorageRecords = {}; | ||
|
|
||
| // TODO: gas could go theoretically go over Number.MAX_SAFE_INTEGER. | ||
| // (Ganache v2 didn't handle this possibility either, so it hasn't been | ||
|
|
@@ -995,42 +1000,42 @@ export default class Blockchain extends Emittery.Typed< | |
| } | ||
| }; | ||
|
|
||
| let txHashCurrentlyProcessing: string = null; | ||
| const transactionHash = Data.from(transaction.hash()).toString(); | ||
| const transactionHash = transaction.hash(); | ||
| let txHashCurrentlyProcessing: Buffer = null; | ||
|
|
||
| const beforeTxListener = async (tx: Transaction) => { | ||
| txHashCurrentlyProcessing = Data.from(tx.hash()).toString(); | ||
| if (txHashCurrentlyProcessing == transactionHash) { | ||
| const beforeTxListener = (tx: Transaction) => { | ||
| txHashCurrentlyProcessing = tx.hash(); | ||
| if (txHashCurrentlyProcessing.equals(transactionHash)) { | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Comparing buffers is faster than converting them to a string then comparing strings. |
||
| if (keys && contractAddress) { | ||
| keys.forEach(async key => { | ||
| // get the raw key using the hashed key | ||
| let rawKey = await this.#database.storageKeys.get( | ||
| Data.from(key).toString() | ||
| ); | ||
|
|
||
| vm.stateManager.getContractStorage( | ||
| Address.from(contractAddress).toBuffer(), | ||
| rawKey, | ||
| (err: Error, result: Buffer) => { | ||
| if (err) { | ||
| throw err; | ||
| const database = this.#database; | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. For reasons,
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. How did you measure the performance difference @davidmurdoch ?
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Don't need to. Just look at what typescript generates for our ES target. It's a function call, a Map I'd love to generate multiple builds so we can ship without these sorts of polyfills. I like the "true" private fields the syntax provides, and don't think I want to remove their use all together. Happy to have your review on this! |
||
| return Promise.all( | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. The |
||
| keys.map(async key => { | ||
| // get the raw key using the hashed key | ||
| const rawKey: Buffer = await database.storageKeys.get(key); | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I've changed the way the keys are stored so we don't need to convert to a string any more. |
||
|
|
||
| vm.stateManager.getContractStorage( | ||
| contractAddress, | ||
| rawKey, | ||
| (err: Error, result: Buffer) => { | ||
| if (err) { | ||
| throw err; | ||
| } | ||
|
|
||
| storage[Data.from(key, key.length).toString()] = { | ||
| key: Data.from(rawKey, rawKey.length), | ||
| value: Data.from(result, 32) | ||
| }; | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
By default, data remove leading |
||
| } | ||
|
|
||
| const keccakHashedKey = Data.from(key).toJSON(); | ||
| storage[keccakHashedKey] = { | ||
| key: Data.from(rawKey).toJSON(), | ||
| value: Data.from(result, 32).toJSON() | ||
| }; | ||
| } | ||
| ); | ||
| }); | ||
| ); | ||
| }) | ||
| ); | ||
| } | ||
| vm.on("step", stepListener); | ||
| } | ||
| }; | ||
|
|
||
| const afterTxListener = () => { | ||
| if (txHashCurrentlyProcessing == transactionHash) { | ||
| if (txHashCurrentlyProcessing.equals(transactionHash)) { | ||
| removeListeners(); | ||
| } | ||
| }; | ||
|
|
@@ -1221,17 +1226,7 @@ export default class Blockchain extends Emittery.Typed< | |
| contractAddress: string, | ||
| startKey: string | Buffer, | ||
| maxResult: number | ||
| ) { | ||
| type StorageRangeResult = { | ||
| nextKey: null | string; | ||
| storage: any; | ||
| }; | ||
|
|
||
| const result: StorageRangeResult = { | ||
| nextKey: null, | ||
| storage: {} | ||
| }; | ||
|
|
||
| ): Promise<StorageRangeResult> { | ||
| // #1 - get block information | ||
| const targetBlock = await this.blocks.getByHash(blockHash); | ||
|
|
||
|
|
@@ -1254,10 +1249,8 @@ export default class Blockchain extends Emittery.Typed< | |
| ); | ||
|
|
||
| // get the contractAddress account storage trie | ||
| const addressDataPromise = this.getFromTrie( | ||
| trie, | ||
| Address.from(contractAddress).toBuffer() | ||
| ); | ||
| const contractAddressBuffer = Address.from(contractAddress).toBuffer(); | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. We use this buffer a few times, so let's compute it just once. |
||
| const addressDataPromise = this.getFromTrie(trie, contractAddressBuffer); | ||
| const addressData = await addressDataPromise; | ||
| if (!addressData) { | ||
| throw new Error(`account ${contractAddress} doesn't exist`); | ||
|
|
@@ -1274,42 +1267,49 @@ export default class Blockchain extends Emittery.Typed< | |
| Buffer /*codeHash*/ | ||
| ])[2]; | ||
|
|
||
| let keys: Buffer[] = []; | ||
| return new Promise((resolve, reject) => { | ||
| storageTrie | ||
| .createReadStream() | ||
| .on("data", data => { | ||
| keys.push(data.key); | ||
| }) | ||
| .on("end", () => { | ||
| // #4 - sort and filter keys | ||
| const sortedKeys = keys.sort((a, b) => Buffer.compare(a, b)); | ||
|
|
||
| // find starting point in array of sorted keys | ||
| const startKeyBuffer = Data.from(startKey).toBuffer(); | ||
| keys = sortedKeys.filter(key => { | ||
| if (Buffer.compare(startKeyBuffer, key) <= 0) { | ||
| return key; | ||
| } | ||
| }); | ||
| return new Promise<RangedStorageKeys>((resolve, reject) => { | ||
| const startKeyBuffer = Data.from(startKey).toBuffer(); | ||
| const compare = (a: Buffer, b: Buffer) => a.compare(b) < 0; | ||
|
|
||
| const keys: Buffer[] = []; | ||
| const handleData = ({ key }) => { | ||
| // ignore anything that comes before our starting point | ||
| if (startKeyBuffer.compare(key) > 0) return; | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. If the key comes before our start key we can just ignore it. No need to sort data we know we'll throw away! |
||
|
|
||
| // #4 - sort and filter keys | ||
| // insert the key exactly where it needs to go in the array | ||
| const position = findInsertPosition(keys, key, compare); | ||
davidmurdoch marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| // ignore if the value couldn't possibly be relevant | ||
| if (position > maxResult) return; | ||
| keys.splice(position, 0, key); | ||
|
Comment on lines
+1279
to
+1284
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
There are other data structures better suited for fast insertion, but ultimately we'll need to convert it into an array anyway, so my guess is that an array would end up winning out in practical use anyway. |
||
| }; | ||
|
|
||
| // only take the maximum number of entries requested | ||
| keys = keys.slice(0, maxResult + 1); | ||
| if (keys.length > maxResult) { | ||
| // assign nextKey and remove it from array of keys | ||
| const nextKey = keys.pop(); | ||
| result.nextKey = Data.from(nextKey).toJSON(); | ||
| } | ||
| const handleEnd = () => { | ||
| if (keys.length > maxResult) { | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. If we have more data than we want to return we need to trim it down, and grab the |
||
| // we collected too much data, so we've got to trim it a bit | ||
| resolve({ | ||
| // only take the maximum number of entries requested | ||
| keys: keys.slice(0, maxResult), | ||
| // assign nextKey | ||
| nextKey: Data.from(keys[maxResult]) | ||
| }); | ||
| } else { | ||
| resolve({ | ||
| keys, | ||
| nextKey: null | ||
| }); | ||
| } | ||
| }; | ||
|
|
||
| resolve(keys); | ||
| }); | ||
| const rs = storageTrie.createReadStream(); | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. We can't get keys to come back already ordered because the merkle patricia tree library we use parallelizes up to 500 database requests at once. This just means we have to sort client side, which could mean performance takes a hit. But generally I think callers to Where we'd take a hit is when we only need to return a small amount of storage. In that case we still have to load all of it. I think turning the priority queue it uses internally into a FIFO queue would actually be very complicated, because it resolves both branches and leaves, and it'd have to ensure the proper ordering there. Not positive though. Since that's not going to happen any time soon, the next best thing is to just insert them in order as we get them, instead of a big oll sort at the end. This will suck a bit more after we get the in-memory cache in place. We won't need any parallelization in that case; and it'll actually just slow everything down (it'll still be faster than hitting leveldb, of course). Thanks for watching. Don't forget to smash that like button! |
||
| rs.on("data", handleData).on("error", reject).on("end", handleEnd); | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. In case you miss it... I added an error handler here. |
||
| }); | ||
| }; | ||
| const keys = await getStorageKeys(); | ||
| const { keys, nextKey } = await getStorageKeys(); | ||
|
|
||
| // #5 - rerun every transaction in that block prior to and including the requested transaction | ||
| // prepare block to be run in traceTransaction | ||
| const transactionHashBuffer = Data.from(transaction.hash()).toBuffer(); | ||
| const transactionHashBuffer = transaction.hash(); | ||
| const newBlock = this.#prepareNextBlock( | ||
| targetBlock, | ||
| parentBlock, | ||
|
|
@@ -1318,7 +1318,7 @@ export default class Blockchain extends Emittery.Typed< | |
| // get storage data given a set of keys | ||
| const options = { | ||
| disableMemory: true, | ||
| disableStack: false, | ||
| disableStack: true, | ||
davidmurdoch marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| disableStorage: false | ||
| }; | ||
|
|
||
|
|
@@ -1327,13 +1327,15 @@ export default class Blockchain extends Emittery.Typed< | |
| newBlock, | ||
| transaction, | ||
| options, | ||
| keys as Buffer[], | ||
| contractAddress | ||
| keys, | ||
| contractAddressBuffer | ||
| ); | ||
| result.storage = storage; | ||
|
|
||
| // #6 - send back results | ||
| return result; | ||
| return { | ||
| storage, | ||
| nextKey | ||
| }; | ||
| } | ||
|
|
||
| /** | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -49,7 +49,10 @@ export default class Database extends Emittery { | |
| } | ||
|
|
||
| #initialize = async () => { | ||
| const levelupOptions: any = { valueEncoding: "binary" }; | ||
| const levelupOptions: any = { | ||
| keyEncoding: "binary", | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This makes it so that we don't have to stringify our keys, which are almost always, if not always, already a Buffer. |
||
| valueEncoding: "binary" | ||
| }; | ||
| const store = this.#options.db; | ||
| let db: levelup.LevelUp; | ||
| if (store) { | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -7,7 +7,8 @@ import { | |
| RETURN_TYPES, | ||
| Executables, | ||
| TraceDataFactory, | ||
| StepEvent | ||
| StepEvent, | ||
| StorageKeys | ||
| } from "@ganache/ethereum-utils"; | ||
| import { utils, Quantity, Data } from "@ganache/utils"; | ||
| import { promisify } from "util"; | ||
|
|
@@ -39,7 +40,7 @@ export default class Miner extends Emittery.Typed< | |
| block: { | ||
| block: Block; | ||
| serialized: Buffer; | ||
| storageKeys: Map<string, Buffer>; | ||
| storageKeys: StorageKeys; | ||
| }; | ||
| }, | ||
| "idle" | ||
|
|
@@ -179,7 +180,7 @@ export default class Miner extends Emittery.Typed< | |
| let keepMining = true; | ||
| const priced = this.#priced; | ||
| const legacyInstamine = this.#options.legacyInstamine; | ||
| const storageKeys = new Map<string, Buffer>(); | ||
| const storageKeys: StorageKeys = new Map(); | ||
| let blockTransactions: Transaction[]; | ||
| do { | ||
| keepMining = false; | ||
|
|
@@ -231,9 +232,9 @@ export default class Miner extends Emittery.Typed< | |
| if (event.opcode.name === "SSTORE") { | ||
| const key = TraceData.from( | ||
| event.stack[event.stack.length - 1].toArrayLike(Buffer) | ||
| ); | ||
| const hashedKey = Data.from(keccak(key.toBuffer())).toString(); | ||
| storageKeys.set(hashedKey, key.toBuffer()); | ||
| ).toBuffer(); | ||
| const hashedKey = keccak(key); | ||
| storageKeys.set(hashedKey.toString(), { key, hashedKey }); | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Trading some memory for CPU. Previously the consumer of this data would convert the Map's keys ( |
||
| } | ||
| next(); | ||
| }; | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Just fixing some types