Skip to content

Commit

Permalink
fix(pg): enable caching in simulation
Browse files Browse the repository at this point in the history
  • Loading branch information
sam-goldman committed Mar 21, 2024
1 parent eef7def commit fd5ac04
Show file tree
Hide file tree
Showing 4 changed files with 43 additions and 47 deletions.
5 changes: 5 additions & 0 deletions .changeset/heavy-vans-teach.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'@sphinx-labs/plugins': patch
---

Enable caching in simulation
41 changes: 0 additions & 41 deletions packages/core/src/networks.ts
Original file line number Diff line number Diff line change
Expand Up @@ -337,44 +337,3 @@ export const shouldUseHigherMaxGasLimit = (chainId: bigint) => {
throw new Error(`Unsupported network id ${chainId}`)
}
}

/**
* The number of blocks that Hardhat rewinds when forking the given network. Rewinding the block
* number protects against chain reorgs. Copied from Hardhat:
* https://github.com/NomicFoundation/hardhat/blob/caa504fe0e53c183578f42d66f4740b8ec147051/packages/hardhat-core/src/internal/hardhat-network/provider/utils/reorgs-protection.ts
*/
export const getLargestPossibleReorg = (chainIdStr: string): bigint => {
const chainId = Number(chainIdStr)

// mainnet
if (chainId === 1) {
return BigInt(5)
}

// Kovan
if (chainId === 42) {
return BigInt(5)
}

// Goerli
if (chainId === 5) {
return BigInt(5)
}

// Rinkeby
if (chainId === 4) {
return BigInt(5)
}

// Ropsten
if (chainId === 3) {
return BigInt(100)
}

// xDai
if (chainId === 100) {
return BigInt(38)
}

return BigInt(30)
}
8 changes: 8 additions & 0 deletions packages/plugins/src/hardhat.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,14 @@ module.exports = {
typeof blockNumber === 'string' ? Number(blockNumber) : undefined,
},
blockGasLimit: Number(blockGasLimit),
// We don't use Hardhat's genesis accounts, so we set this to an empty array. This eliminates
// 20 RPC calls that Hardhat sends at the beginning of every simulation to get the nonce of
// each genesis account. (There's one RPC call per genesis account). Hardhat needs to get
// these nonces on forked networks because the private keys are publicly known.
//
// If a user's script uses one of these genesis accounts, Hardhat will fetch its nonce on an
// as-needed basis, which is the behavior that we want.
accounts: [],
},
},
}
36 changes: 30 additions & 6 deletions packages/plugins/src/hardhat/simulate.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ import {
MerkleRootStatus,
SphinxJsonRpcProvider,
fetchNameForNetwork,
getLargestPossibleReorg,
isFork,
stripLeadingZero,
isLiveNetwork,
Expand All @@ -36,6 +35,10 @@ import {
} from '@sphinx-labs/core'
import { ethers } from 'ethers'
import { HardhatEthersProvider } from '@nomicfoundation/hardhat-ethers/internal/hardhat-ethers-provider'
import {
FALLBACK_MAX_REORG,
getLargestPossibleReorg,
} from 'hardhat/internal/hardhat-network/provider/utils/reorgs-protection'
import pLimit from 'p-limit'

import {
Expand Down Expand Up @@ -145,10 +148,27 @@ export const simulate = async (
}

if ((await isLiveNetwork(provider)) || (await isFork(provider))) {
// Use the same block number as the Forge script that collected the user's transactions. This
// reduces the chance that the simulation throws an error or stalls, which can occur when using
// the most recent block number.
envVars['SPHINX_INTERNAL__BLOCK_NUMBER'] = networkConfig.blockNumber
// Use the block number from the Forge script minus the largest possible chain reorg size, which
// is determined by Hardhat. We must subtract the reorg size so that Hardhat caches the RPC
// calls in the simulation. Otherwise, Hardhat will send hundreds of RPC calls, which frequently
// causes rate limit errors, especially for public or free tier RPC endpoints.
//
// Subtracting the reorg size can lead to the following edge case:
// 1. User executes a transaction on the live network.
// 2. User calls Sphinx's Propose or Deploy command using a script that relies on the state that
// resulted from the transaction in the previous step.
// 3. The collection process works correctly because Foundry uses the latest block number.
// 4. The simulation uses a block where the transaction doesn't exist yet, causing an error.
//
// This edge case is unlikely to happen in practice because the reorg size is pretty small. For
// example, it's 5 blocks on Ethereum, and 30 blocks on most other networks. A reorg size of 30
// blocks corresponds to 15 minutes on Rootstock, which is one of the slowest networks that
// Sphinx supports as of now. If the edge case occurs, it will naturally resolve itself if the
// user continues to attempt to propose/deploy. This is because the corresponding block will
// eventually be included in the simulation after there have been enough block confirmations.
const blockNumber =
BigInt(networkConfig.blockNumber) - BigInt(getLargestReorg(chainId))
envVars['SPHINX_INTERNAL__BLOCK_NUMBER'] = blockNumber.toString()
} else {
// The network is a non-forked local node (i.e. an Anvil or Hardhat node with a fresh state). We
// do not hardcode the block number in the Hardhat config to avoid the following edge case:
Expand Down Expand Up @@ -177,7 +197,7 @@ export const simulate = async (
// is meant to protect against chain reorgs on forks of live networks.
// 3. The simulation fails because the transactions executed in step 1 don't exist on the
// Hardhat fork.
const blocksToFastForward = getLargestPossibleReorg(chainId)
const blocksToFastForward = getLargestReorg(chainId)
const blocksHex = stripLeadingZero(ethers.toBeHex(blocksToFastForward))
await provider.send(
'hardhat_mine', // The `hardhat_mine` RPC method works on Anvil and Hardhat nodes.
Expand Down Expand Up @@ -591,6 +611,10 @@ export const createHardhatEthersProviderProxy = (
return proxy
}

const getLargestReorg = (chainId: string): bigint => {
return getLargestPossibleReorg(Number(chainId)) ?? FALLBACK_MAX_REORG
}

export const getUndeployedContractErrorMesage = (address: string): string =>
`Simulation succeeded, but the following contract wasn't deployed at its expected address:\n` +
address

0 comments on commit fd5ac04

Please sign in to comment.