diff --git a/.changeset/rich-turkeys-act.md b/.changeset/rich-turkeys-act.md new file mode 100644 index 000000000..97bf90ebb --- /dev/null +++ b/.changeset/rich-turkeys-act.md @@ -0,0 +1,6 @@ +--- +'@sphinx-labs/plugins': patch +'@sphinx-labs/core': patch +--- + +Keep previous deployment artifacts diff --git a/packages/core/src/artifacts.ts b/packages/core/src/artifacts.ts index c4a086886..c65e3530c 100644 --- a/packages/core/src/artifacts.ts +++ b/packages/core/src/artifacts.ts @@ -22,6 +22,7 @@ import { DeploymentConfig, ConfigArtifacts, NetworkConfig, + BuildInfos, } from './config/types' import { fetchNetworkConfigFromDeploymentConfig, @@ -32,7 +33,7 @@ import { } from './utils' import { ExecutionMode } from './constants' -type NetworkArtifacts = { +export type NetworkArtifacts = { executionArtifacts: { [txArtifactFileName: string]: ExecutionArtifact } @@ -268,23 +269,24 @@ export const convertEthersTransactionReceipt = ( } /** - * Makes contract deployment artifacts on a single network for a single deployment. + * Makes contract deployment artifacts on a single network for a single deployment. Mutates the + * input `artifacts` object. * - * @param previousArtifacts An object containing all previous contract deployment artifacts on the - * network for the project. + * @param artifacts An object containing all previous contract deployment artifacts on the network + * for the project. */ -const makeContractDeploymentArtifacts = async ( +export const makeContractDeploymentArtifacts = async ( merkleRoot: string, networkConfig: NetworkConfig, - deploymentConfig: DeploymentConfig, + buildInfos: BuildInfos, receipts: Array, configArtifacts: ConfigArtifacts, - previousArtifacts: { + artifacts: { [fileName: string]: ContractDeploymentArtifact | undefined }, provider: SphinxJsonRpcProvider -): Promise<{ [fileName: string]: ContractDeploymentArtifact }> => { - const isSuffixed = Object.keys(previousArtifacts).every((fileName) => +): Promise => { + const isSuffixed = Object.keys(artifacts).every((fileName) => fileName.endsWith('.json') ) if (!isSuffixed) { @@ -297,14 +299,13 @@ const makeContractDeploymentArtifacts = async ( const moduleInterface = new ethers.Interface(SphinxModuleABI) const { gitCommit, chainId } = networkConfig - const artifacts: { [fileName: string]: ContractDeploymentArtifact } = {} const numDeployments: { [fileName: string]: number | undefined } = {} for (const action of networkConfig.actionInputs) { for (const contract of action.contracts) { const { fullyQualifiedName, initCodeWithArgs, address } = contract const { artifact: compilerArtifact, buildInfoId } = configArtifacts[fullyQualifiedName] - const buildInfo = deploymentConfig.buildInfos[buildInfoId] + const buildInfo = buildInfos[buildInfoId] if (!compilerArtifact || !buildInfo) { throw new Error(`Could not find artifact for: ${fullyQualifiedName}`) @@ -393,7 +394,7 @@ const makeContractDeploymentArtifacts = async ( ? `${contractName}_${previousNumDeployments}.json` : `${contractName}.json` - const previousArtifact = previousArtifacts[fileName] + const previousArtifact = artifacts[fileName] if (previousArtifact) { // Separate the previous artifact into two components: its `history` array and the other // fields. @@ -416,8 +417,6 @@ const makeContractDeploymentArtifacts = async ( numDeployments[contractName] = previousNumDeployments + 1 } } - - return artifacts } export const writeDeploymentArtifacts = ( @@ -528,12 +527,11 @@ export const isContractDeploymentArtifact = ( } /** - * Make deployment artifacts for the most recent deployment in a project. + * Make deployment artifacts for the most recent deployment in a project. Mutates the input + * `artifacts` object. * * @param deployments An object containing deployment information for each network where the most - * recent deployment was executed. The `previousContractArtifacts` field contains all previous - * contract deployment artifacts on the network for the project. Note that the `fileName` keys of - * the `previousContractArtifacts` object must be suffixed with `.json`. + * recent deployment was executed. * * @returns {DeploymentArtifacts} The artifacts for the most recent deployment. */ @@ -543,23 +541,28 @@ export const makeDeploymentArtifacts = async ( deploymentConfig: DeploymentConfig receipts: Array provider: SphinxJsonRpcProvider - previousContractArtifacts: { - [fileName: string]: ContractDeploymentArtifact - } } }, merkleRoot: string, - configArtifacts: ConfigArtifacts -): Promise => { - const allNetworkArtifacts: DeploymentArtifacts['networks'] = {} - const compilerInputArtifacts: DeploymentArtifacts['compilerInputs'] = {} - for (const chainId of Object.keys(deployments)) { - const { provider, deploymentConfig, receipts, previousContractArtifacts } = - deployments[chainId] + configArtifacts: ConfigArtifacts, + artifacts: DeploymentArtifacts +): Promise => { + // We'll mutate these variables to update the existing artifacts. + const { + networks: allNetworkArtifacts, + compilerInputs: compilerInputArtifacts, + } = artifacts - const networkArtifacts: NetworkArtifacts = { - contractDeploymentArtifacts: {}, - executionArtifacts: {}, + for (const chainId of Object.keys(deployments)) { + const { provider, deploymentConfig, receipts } = deployments[chainId] + + // Define the network artifacts if it doesn't exist. Otherwise, we'll attempt to operate on an + // object that doesn't exist, leading to an error. + if (allNetworkArtifacts[chainId] === undefined) { + allNetworkArtifacts[chainId] = { + contractDeploymentArtifacts: {}, + executionArtifacts: {}, + } } const networkConfig = fetchNetworkConfigFromDeploymentConfig( @@ -568,16 +571,15 @@ export const makeDeploymentArtifacts = async ( ) // Make the contract artifacts. - const contractArtifacts = await makeContractDeploymentArtifacts( + await makeContractDeploymentArtifacts( merkleRoot, networkConfig, - deploymentConfig, + deploymentConfig.buildInfos, receipts, configArtifacts, - previousContractArtifacts, + allNetworkArtifacts[chainId].contractDeploymentArtifacts, provider ) - networkArtifacts.contractDeploymentArtifacts = contractArtifacts // Make the execution artifact. const executionArtifact = await makeExecutionArtifact( @@ -587,21 +589,15 @@ export const makeDeploymentArtifacts = async ( merkleRoot, provider ) - networkArtifacts.executionArtifacts[`${remove0x(merkleRoot)}.json`] = - executionArtifact - - allNetworkArtifacts[chainId] = networkArtifacts + allNetworkArtifacts[chainId].executionArtifacts[ + `${remove0x(merkleRoot)}.json` + ] = executionArtifact // Make the compiler input artifacts. for (const compilerInput of deploymentConfig.inputs) { compilerInputArtifacts[`${compilerInput.id}.json`] = compilerInput } } - - return { - networks: allNetworkArtifacts, - compilerInputs: compilerInputArtifacts, - } } const makeExecutionArtifact = async ( diff --git a/packages/core/src/utils.ts b/packages/core/src/utils.ts index f694b0056..3114dcb83 100644 --- a/packages/core/src/utils.ts +++ b/packages/core/src/utils.ts @@ -1,6 +1,8 @@ import * as fs from 'fs' import { promisify } from 'util' import { exec, spawn } from 'child_process' +import { join } from 'path' +import { existsSync } from 'fs' import yesno from 'yesno' import axios from 'axios' @@ -60,6 +62,11 @@ import { shouldUseHigherMaxGasLimit, } from './networks' import { RelayProposal, StoreDeploymentConfig } from './types' +import { + NetworkArtifacts, + isContractDeploymentArtifact, + isExecutionArtifact, +} from './artifacts' export const sphinxLog = ( logLevel: 'warning' | 'error' = 'warning', @@ -1569,3 +1576,58 @@ export const fetchNetworkConfigFromDeploymentConfig = ( return networkConfig } + +const isDirectory = (path: string): boolean => + existsSync(path) && fs.statSync(path).isDirectory() + +export const readDeploymentArtifactsForNetwork = ( + projectName: string, + chainId: BigInt, + executionMode: ExecutionMode +): NetworkArtifacts => { + const networkArtifacts: NetworkArtifacts = { + contractDeploymentArtifacts: {}, + executionArtifacts: {}, + } + + const networkArtifactDirPath = join( + `deployments`, + projectName, + getNetworkNameDirectory(chainId.toString(), executionMode) + ) + + if (!isDirectory(networkArtifactDirPath)) { + return networkArtifacts + } + + const contractArtifactFileNames = fs + .readdirSync(networkArtifactDirPath) + .filter((fileName) => fileName.endsWith('.json')) + for (const fileName of contractArtifactFileNames) { + const filePath = join(networkArtifactDirPath, fileName) + const artifact = JSON.parse(fs.readFileSync(filePath, 'utf8')) + if (isContractDeploymentArtifact(artifact)) { + networkArtifacts.contractDeploymentArtifacts[fileName] = artifact + } + } + + const executionArtifactFilePath = join(networkArtifactDirPath, `execution`) + + if (!isDirectory(executionArtifactFilePath)) { + return networkArtifacts + } + + const executionArtifactFileNames = fs + .readdirSync(executionArtifactFilePath) + .filter((fileName) => fileName.endsWith('.json')) + + for (const fileName of executionArtifactFileNames) { + const filePath = join(executionArtifactFilePath, fileName) + const artifact = JSON.parse(fs.readFileSync(filePath, 'utf8')) + if (isExecutionArtifact(artifact)) { + networkArtifacts.executionArtifacts[fileName] = artifact + } + } + + return networkArtifacts +} diff --git a/packages/plugins/src/cli/deploy.ts b/packages/plugins/src/cli/deploy.ts index db31d7dd0..d07d3d465 100644 --- a/packages/plugins/src/cli/deploy.ts +++ b/packages/plugins/src/cli/deploy.ts @@ -1,12 +1,12 @@ import { join, relative } from 'path' -import { existsSync, readFileSync, readdirSync, unlinkSync } from 'fs' +import { existsSync, readFileSync, unlinkSync } from 'fs' import { displayDeploymentTable, fundAccountMaxBalance, - getNetworkNameDirectory, getSphinxWalletPrivateKey, isFile, + readDeploymentArtifactsForNetwork, signMerkleRoot, spawnAsync, } from '@sphinx-labs/core/dist/utils' @@ -17,8 +17,6 @@ import { SphinxPreview, makeDeploymentData, makeDeploymentArtifacts, - ContractDeploymentArtifact, - isContractDeploymentArtifact, DeploymentConfig, makeDeploymentConfig, verifyDeploymentWithRetries, @@ -400,40 +398,35 @@ export const deploy = async ( const { projectName } = networkConfig.newConfig - // Get the existing contract deployment artifacts - const contractArtifactDirPath = join( - `deployments`, + // Get the existing contract deployment artifacts and execution artifacts for the current network. + // This object will potentially be modified when we make the new deployment artifacts. + // Specifically, the `history` field of the contract deployment artifacts could be modified. Even + // though we don't currently modify the execution artifacts, we include them anyways in case we + // add logic in the future that modifies them. We don't include the compiler input artifacts + // mainly as a performance optimization and because we don't expect to modify them in the future. + const networkArtifacts = readDeploymentArtifactsForNetwork( projectName, - getNetworkNameDirectory(chainId.toString(), networkConfig.executionMode) + chainId, + executionMode ) - const artifactFileNames = existsSync(contractArtifactDirPath) - ? readdirSync(contractArtifactDirPath) - : [] - const previousContractArtifacts: { - [fileName: string]: ContractDeploymentArtifact - } = {} - for (const fileName of artifactFileNames) { - if (fileName.endsWith('.json')) { - const filePath = join(contractArtifactDirPath, fileName) - const fileContent = readFileSync(filePath, 'utf8') - const artifact = JSON.parse(fileContent) - if (isContractDeploymentArtifact(artifact)) { - previousContractArtifacts[fileName] = artifact - } - } + const deploymentArtifacts = { + networks: { + [chainId.toString()]: networkArtifacts, + }, + compilerInputs: {}, } - const deploymentArtifacts = await makeDeploymentArtifacts( + await makeDeploymentArtifacts( { [chainId.toString()]: { provider, deploymentConfig, receipts, - previousContractArtifacts, }, }, merkleTree.root, - configArtifacts + configArtifacts, + deploymentArtifacts ) spinner.succeed(`Built deployment artifacts.`) diff --git a/packages/plugins/test/mocha/artifacts.spec.ts b/packages/plugins/test/mocha/artifacts.spec.ts index 892fb91a2..eb2ff45ee 100644 --- a/packages/plugins/test/mocha/artifacts.spec.ts +++ b/packages/plugins/test/mocha/artifacts.spec.ts @@ -6,12 +6,22 @@ import { fetchChainIdForNetwork, writeDeploymentArtifacts, ParsedAccountAccess, + makeContractDeploymentArtifacts, + SphinxJsonRpcProvider, + isContractDeploymentArtifact, + makeDeploymentArtifacts, + isExecutionArtifact, } from '@sphinx-labs/core' import { ethers } from 'ethers' +import { remove0x } from '@sphinx-labs/contracts' +import sinon from 'sinon' +import { expect } from 'chai' +import * as MyContract2Artifact from '../../out/artifacts/MyContracts.sol/MyContract2.json' import { checkArtifacts, getAnvilRpcUrl, + getEmptyDeploymentArtifacts, getGnosisSafeProxyAddress, killAnvilNodes, makeDeployment, @@ -20,6 +30,28 @@ import { runDeployment, startAnvilNodes, } from './common' +import { + dummyChainId, + dummyCompilerInputArtifactFileName, + dummyBuildInfoId, + dummyContractArtifactFileName, + dummyContractName, + dummyExecutionArtifactFileName, + dummyMerkleRoot, + getDummyBuildInfos, + getDummyContractDeploymentArtifact, + getDummyDeploymentArtifacts, + getDummyEthersTransactionResponse, + getDummyNetworkConfig, +} from './dummy' +import { + getFakeActionInputWithContract, + getFakeActionSucceededReceipt, + getFakeConfigArtifacts, + getFakeDeploymentConfig, +} from './fake' +import { FoundryToml } from '../../src/foundry/types' +import { getFoundryToml } from '../../src/foundry/options' const allNetworkNames = ['sepolia', 'optimism_sepolia'] const allChainIds = allNetworkNames.map((network) => @@ -47,30 +79,45 @@ const safeAddress = getGnosisSafeProxyAddress( // deployments in TypeScript instead of collecting the transactions from a Forge script because this // is significantly faster. describe('Artifacts', () => { + const mockProvider = new SphinxJsonRpcProvider(``) + + let foundryToml: FoundryToml + let getCodeStub: sinon.SinonStub + let getTransactionStub: sinon.SinonStub beforeEach(async () => { + getCodeStub = sinon.stub(mockProvider, 'getCode').resolves('0x11') + getTransactionStub = sinon + .stub(mockProvider, 'getTransaction') + .resolves(getDummyEthersTransactionResponse()) + // Make sure that the Anvil nodes aren't running. await killAnvilNodes(allChainIds) // Start the Anvil nodes. await startAnvilNodes(allChainIds) await rm(`deployments`, { recursive: true, force: true }) + + foundryToml = await getFoundryToml() }) afterEach(async () => { await killAnvilNodes(allChainIds) + + getCodeStub.restore() + getTransactionStub.restore() }) it('makes artifacts for the first deployment on local networks', async () => { await makeThenRunThenCheckDeployment( makeStandardDeployment(0, ExecutionMode.LocalNetworkCLI, safeAddress), - {} + getEmptyDeploymentArtifacts() ) }) it('makes artifacts for the first deployment on live networks', async () => { await makeThenRunThenCheckDeployment( makeStandardDeployment(0, ExecutionMode.Platform, safeAddress), - {} + getEmptyDeploymentArtifacts() ) }) @@ -79,11 +126,11 @@ describe('Artifacts', () => { it('makes artifacts for the second deployment on live networks', async () => { const firstArtifacts = await makeThenRunThenCheckDeployment( makeStandardDeployment(0, ExecutionMode.Platform, safeAddress), - {} + getEmptyDeploymentArtifacts() ) await makeThenRunThenCheckDeployment( makeStandardDeployment(1, ExecutionMode.Platform, safeAddress), - firstArtifacts.networks + firstArtifacts ) }) @@ -93,30 +140,156 @@ describe('Artifacts', () => { it('makes artifacts for the third deployment on live networks', async () => { const firstArtifacts = await makeThenRunThenCheckDeployment( makeStandardDeployment(0, ExecutionMode.Platform, safeAddress), - {} + getEmptyDeploymentArtifacts() ) const secondArtifacts = await makeThenRunThenCheckDeployment( makeStandardDeployment(1, ExecutionMode.Platform, safeAddress), - firstArtifacts.networks + firstArtifacts ) await makeThenRunThenCheckDeployment( makeStandardDeployment(2, ExecutionMode.Platform, safeAddress), - secondArtifacts.networks + secondArtifacts ) }) it('makes artifacts for partially executed deployment', async () => { await makeThenRunThenCheckDeployment( makeRevertingDeployment(0, ExecutionMode.Platform, safeAddress), - {} + getEmptyDeploymentArtifacts() ) }) it('makes artifacts for remotely compiled deployment', async () => { await makeThenRunThenCheckRemoteDeployment( makeStandardDeployment(0, ExecutionMode.Platform, safeAddress), - {} + getEmptyDeploymentArtifacts() + ) + }) + + // Tests a scenario where deployment artifacts are created on a chain when existing deployment + // artifacts already exist on a different chain. In this scenario, the deployment artifact + // logic must return the previous deployment artifacts in addition to the new ones. + it('keeps previous deployment artifacts', async () => { + const newChainId = '123' + const newCompilerInputId = 'newCompilerInputId' + const newContractName = 'MyContract2' + const newFullyQualifiedName = 'contracts/test/MyContracts.sol:MyContract2' + const newMerkleRoot = '0x' + '42'.repeat(32) + + const deploymentConfig = await getFakeDeploymentConfig( + newChainId, + newFullyQualifiedName, + MyContract2Artifact.bytecode.object, + foundryToml.artifactFolder, + newCompilerInputId, + newMerkleRoot + ) + const receipts = [getFakeActionSucceededReceipt(newMerkleRoot)] + const artifacts = getDummyDeploymentArtifacts() + const deployment = { + [newChainId]: { + deploymentConfig, + receipts, + provider: mockProvider, + }, + } + + await makeDeploymentArtifacts( + deployment, + newMerkleRoot, + deploymentConfig.configArtifacts, + artifacts + ) + + const previousContractArtifact = + artifacts.networks[dummyChainId].contractDeploymentArtifacts[ + `${dummyContractName}.json` + ] + const newContractArtifact = + artifacts.networks[newChainId].contractDeploymentArtifacts[ + `${newContractName}.json` + ] + expect(isContractDeploymentArtifact(previousContractArtifact)).equals(true) + expect(previousContractArtifact.chainId).equals(dummyChainId) + expect(isContractDeploymentArtifact(newContractArtifact)).equals(true) + expect(newContractArtifact.chainId).equals(newChainId) + + const previousExecutionArtifact = + artifacts.networks[dummyChainId].executionArtifacts[ + dummyExecutionArtifactFileName + ] + const newExecutionArtifact = + artifacts.networks[newChainId].executionArtifacts[ + `${remove0x(newMerkleRoot)}.json` + ] + expect(isExecutionArtifact(previousExecutionArtifact)).equals(true) + expect(previousExecutionArtifact.merkleRoot).equals(dummyMerkleRoot) + expect(isExecutionArtifact(newExecutionArtifact)).equals(true) + expect(newExecutionArtifact.merkleRoot).equals(newMerkleRoot) + + const previousCompilerInput = + artifacts.compilerInputs[dummyCompilerInputArtifactFileName] + const newCompilerInput = + artifacts.compilerInputs[`${newCompilerInputId}.json`] + expect(previousCompilerInput.id).equals(dummyBuildInfoId) + expect(newCompilerInput.id).equals(newCompilerInputId) + }) +}) + +describe('Contract Deployment Artifacts', () => { + const mockProvider = new SphinxJsonRpcProvider(``) + + let foundryToml: FoundryToml + let getCodeStub: sinon.SinonStub + beforeEach(async () => { + getCodeStub = sinon.stub(mockProvider, 'getCode').resolves('0x11') + foundryToml = await getFoundryToml() + }) + + afterEach(async () => { + getCodeStub.restore() + }) + + // Tests a scenario where a contract deployment artifact is created when a contract deployment + // artifact already exists with a different contract name. In this scenario, we must return the + // previous contract deployment artifact in addition to the new one. + it('keeps previous contract deployment artifact', async () => { + const contractName = 'MyContract2' + const fullyQualifiedName = 'contracts/test/MyContracts.sol:MyContract2' + const initCodeWithArgs = MyContract2Artifact.bytecode.object + + const configArtifacts = await getFakeConfigArtifacts( + [fullyQualifiedName], + foundryToml.artifactFolder ) + + const networkConfig = getDummyNetworkConfig() + networkConfig.actionInputs = [ + getFakeActionInputWithContract(fullyQualifiedName, initCodeWithArgs), + ] + + const contractArtifacts = { + [dummyContractArtifactFileName]: getDummyContractDeploymentArtifact(), + } + const receipts = [getFakeActionSucceededReceipt(dummyMerkleRoot)] + + await makeContractDeploymentArtifacts( + dummyMerkleRoot, + networkConfig, + getDummyBuildInfos(), + receipts, + configArtifacts, + contractArtifacts, + mockProvider + ) + + const previousArtifact = contractArtifacts[dummyContractArtifactFileName] + expect(isContractDeploymentArtifact(previousArtifact)).equals(true) + expect(previousArtifact.contractName).equals(dummyContractName) + + const newArtifact = contractArtifacts[`${contractName}.json`] + expect(isContractDeploymentArtifact(newArtifact)).equals(true) + expect(newArtifact.contractName).equals(contractName) }) }) @@ -125,15 +298,13 @@ const makeThenRunThenCheckDeployment = async ( merkleRootNonce: number executionMode: ExecutionMode accountAccesses: Array - expectedNumExecutionArtifacts: number expectedContractFileNames: Array }, - previousArtifacts: DeploymentArtifacts['networks'] + artifacts: DeploymentArtifacts ): Promise => { const { merkleRootNonce, accountAccesses, - expectedNumExecutionArtifacts, expectedContractFileNames, executionMode, } = deployment @@ -150,16 +321,18 @@ const makeThenRunThenCheckDeployment = async ( getAnvilRpcUrl ) - const artifacts = await runDeployment(deploymentConfig, previousArtifacts) + const previousArtifacts = structuredClone(artifacts) + + await runDeployment(deploymentConfig, artifacts) writeDeploymentArtifacts(projectName, executionMode, artifacts) checkArtifacts( projectName, deploymentConfig, + previousArtifacts, artifacts, executionMode, - expectedNumExecutionArtifacts, expectedContractFileNames ) @@ -171,15 +344,13 @@ const makeThenRunThenCheckRemoteDeployment = async ( merkleRootNonce: number executionMode: ExecutionMode accountAccesses: Array - expectedNumExecutionArtifacts: number expectedContractFileNames: Array }, - previousArtifacts: DeploymentArtifacts['networks'] -): Promise => { + artifacts: DeploymentArtifacts +): Promise => { const { merkleRootNonce, accountAccesses, - expectedNumExecutionArtifacts, expectedContractFileNames, executionMode, } = deployment @@ -196,18 +367,18 @@ const makeThenRunThenCheckRemoteDeployment = async ( getAnvilRpcUrl ) - const artifacts = await runDeployment(deploymentConfig, previousArtifacts) + const previousArtifacts = structuredClone(artifacts) + + await runDeployment(deploymentConfig, artifacts) writeDeploymentArtifacts(projectName, executionMode, artifacts) checkArtifacts( projectName, deploymentConfig, + previousArtifacts, artifacts, executionMode, - expectedNumExecutionArtifacts, expectedContractFileNames ) - - return artifacts } diff --git a/packages/plugins/test/mocha/cli/deploy.spec.ts b/packages/plugins/test/mocha/cli/deploy.spec.ts index 32a04653b..fbfbb6380 100644 --- a/packages/plugins/test/mocha/cli/deploy.spec.ts +++ b/packages/plugins/test/mocha/cli/deploy.spec.ts @@ -38,6 +38,7 @@ import { killAnvilNodes, startAnvilNodes, getSphinxModuleAddressFromScript, + getEmptyDeploymentArtifacts, } from '../common' import { makeMockSphinxContextForIntegrationTests } from '../mock' @@ -183,17 +184,21 @@ describe('Deploy CLI command', () => { expect(networkConfig.executionMode).equals(executionMode) - const artifacts = await makeDeploymentArtifacts( + const artifacts: DeploymentArtifacts = { + networks: {}, + compilerInputs: {}, + } + await makeDeploymentArtifacts( { [networkConfig.chainId]: { deploymentConfig, receipts, provider, - previousContractArtifacts: {}, }, }, merkleTree.root, - configArtifacts + configArtifacts, + artifacts ) await expectValidDeployment( @@ -203,7 +208,6 @@ describe('Deploy CLI command', () => { projectName, artifacts, executionMode, - 1, ['MyContract2.json'] ) }) @@ -277,17 +281,21 @@ describe('Deploy CLI command', () => { expect(networkConfig.executionMode).equals(executionMode) - const artifacts = await makeDeploymentArtifacts( + const artifacts: DeploymentArtifacts = { + networks: {}, + compilerInputs: {}, + } + await makeDeploymentArtifacts( { [networkConfig.chainId]: { deploymentConfig, receipts, provider, - previousContractArtifacts: {}, }, }, merkleTree.root, - configArtifacts + configArtifacts, + artifacts ) await expectValidDeployment( @@ -297,7 +305,6 @@ describe('Deploy CLI command', () => { projectName, artifacts, executionMode, - 1, ['MyContract2.json'] ) }) @@ -613,25 +620,29 @@ describe('Deployment Cases', () => { throw new Error(`Object(s) undefined.`) } - const artifacts = await makeDeploymentArtifacts( + const artifacts: DeploymentArtifacts = { + networks: {}, + compilerInputs: {}, + } + await makeDeploymentArtifacts( { [networkConfig.chainId]: { deploymentConfig, receipts, provider, - previousContractArtifacts: {}, }, }, merkleTree.root, - configArtifacts + configArtifacts, + artifacts ) checkArtifacts( 'Deployment_Cases_Project', deploymentConfig, + getEmptyDeploymentArtifacts(), artifacts, ExecutionMode.LocalNetworkCLI, - 1, [ 'MyContract2.json', 'MyContract2_1.json', @@ -658,7 +669,6 @@ const expectValidDeployment = async ( projectName: string, artifacts: DeploymentArtifacts, executionMode: ExecutionMode, - expectedNumExecutionArtifacts: number, expectedContractFileNames: Array ) => { const networkConfig = deploymentConfig.networkConfigs.at(0) @@ -720,9 +730,9 @@ const expectValidDeployment = async ( checkArtifacts( projectName, deploymentConfig, + getEmptyDeploymentArtifacts(), artifacts, executionMode, - expectedNumExecutionArtifacts, expectedContractFileNames ) } diff --git a/packages/plugins/test/mocha/common.ts b/packages/plugins/test/mocha/common.ts index c7684f5af..a2dfb8951 100644 --- a/packages/plugins/test/mocha/common.ts +++ b/packages/plugins/test/mocha/common.ts @@ -14,7 +14,6 @@ import { makeDeploymentData, DeploymentArtifacts, SphinxTransactionReceipt, - ContractDeploymentArtifact, getSphinxWalletPrivateKey, makeDeploymentArtifacts, isReceiptEarlier, @@ -493,7 +492,6 @@ export const makeRevertingDeployment = ( executionMode: ExecutionMode merkleRootNonce: number accountAccesses: Array - expectedNumExecutionArtifacts: number expectedContractFileNames: Array } => { // We use the Merkle root nonce as the `CREATE2` salt to ensure that we don't attempt to deploy a @@ -513,30 +511,25 @@ export const makeRevertingDeployment = ( abiEncodedConstructorArgs: '0x', }, ]) - const expectedNumExecutionArtifacts = 1 const expectedContractFileNames = ['MyContract2.json'] return { executionMode, merkleRootNonce, accountAccesses, - expectedNumExecutionArtifacts, expectedContractFileNames, } } export const runDeployment = async ( deploymentConfig: DeploymentConfig, - previousArtifacts: DeploymentArtifacts['networks'] -): Promise => { + previousArtifacts: DeploymentArtifacts +): Promise => { const artifactInputs: { [chainId: string]: { deploymentConfig: DeploymentConfig receipts: Array provider: SphinxJsonRpcProvider - previousContractArtifacts: { - [fileName: string]: ContractDeploymentArtifact - } } } = {} @@ -608,24 +601,19 @@ export const runDeployment = async ( receipts[0] = receipts[receipts.length - 1] receipts[receipts.length - 1] = tempReceipt - const previousContractArtifacts = - previousArtifacts.networks?.[chainId]?.contractDeploymentArtifacts ?? {} - artifactInputs[networkConfig.chainId] = { deploymentConfig, receipts, provider, - previousContractArtifacts, } } - const artifacts = await makeDeploymentArtifacts( + await makeDeploymentArtifacts( artifactInputs, merkleTree.root, - configArtifacts + configArtifacts, + previousArtifacts ) - - return artifacts } export const isSortedChronologically = ( @@ -695,7 +683,6 @@ export const makeStandardDeployment = ( executionMode: ExecutionMode merkleRootNonce: number accountAccesses: Array - expectedNumExecutionArtifacts: number expectedContractFileNames: Array } => { // The `CREATE2` salt is determined by the Merkle root nonce to ensure that we don't attempt to @@ -737,7 +724,6 @@ export const makeStandardDeployment = ( ), }, ]) - const expectedNumExecutionArtifacts = 1 const expectedContractFileNames = [ 'MyContract2.json', 'MyContract1.json', @@ -749,17 +735,23 @@ export const makeStandardDeployment = ( executionMode, merkleRootNonce, accountAccesses, - expectedNumExecutionArtifacts, expectedContractFileNames, } } +export const getEmptyDeploymentArtifacts = (): DeploymentArtifacts => { + return { + networks: {}, + compilerInputs: {}, + } +} + export const checkArtifacts = ( projectName: string, deploymentConfig: DeploymentConfig, + previousArtifacts: DeploymentArtifacts, artifacts: DeploymentArtifacts, executionMode: ExecutionMode, - expectedNumExecutionArtifacts: number, expectedContractFileNames: Array ) => { for (const chainIdStr of Object.keys(artifacts.networks)) { @@ -775,8 +767,12 @@ export const checkArtifacts = ( expect(Object.keys(contractDeploymentArtifacts).length).equals( expectedContractFileNames.length ) + const numPreviousExecutionArtifacts = getNumExecutionArtifacts( + previousArtifacts, + chainIdStr + ) expect(Object.keys(executionArtifacts).length).equals( - expectedNumExecutionArtifacts + numPreviousExecutionArtifacts + 1 ) const contractArtifactArray = Object.entries(contractDeploymentArtifacts) @@ -918,3 +914,13 @@ export const runForgeScript = async ( } return broadcast } + +export const getNumExecutionArtifacts = ( + artifacts: DeploymentArtifacts, + chainId: string +): number => { + if (artifacts.networks[chainId] === undefined) { + return 0 + } + return Object.keys(artifacts.networks[chainId].executionArtifacts).length +} diff --git a/packages/plugins/test/mocha/dummy.ts b/packages/plugins/test/mocha/dummy.ts new file mode 100644 index 000000000..37a80b321 --- /dev/null +++ b/packages/plugins/test/mocha/dummy.ts @@ -0,0 +1,287 @@ +import { Operation, SphinxMerkleTree } from '@sphinx-labs/contracts' +import { + ActionInputType, + BuildInfo, + BuildInfos, + CompilerInput, + ContractDeploymentArtifact, + DeploymentArtifacts, + ExecutionArtifact, + ExecutionMode, + NetworkConfig, + SolcInput, + SphinxJsonRpcProvider, + SphinxTransactionReceipt, +} from '@sphinx-labs/core' +import { ethers } from 'ethers' + +import { makeAddress } from './common' + +export const dummyChainId = '43211234' +export const dummyMerkleRoot = '0x' + 'fe'.repeat(32) +export const dummyModuleAddress = '0x' + 'df'.repeat(20) +export const dummyContractName = 'DummyContractName' +export const dummyContractArtifactFileName = `${dummyContractName}.json` +export const dummyExecutionArtifactFileName = `${dummyMerkleRoot}.json` +export const dummyBuildInfoId = `dummyBuildInfoId` +export const dummyCompilerInputArtifactFileName = `${dummyBuildInfoId}.json` + +export const getDummyEthersTransactionResponse = + (): ethers.TransactionResponse => { + const provider = new SphinxJsonRpcProvider(``) + const response = new ethers.TransactionResponse( + { + accessList: [], + blobVersionedHashes: null, + blockHash: 'dummyBlockHash', + blockNumber: 11111, + chainId: BigInt(1), + data: 'dummyData', + from: makeAddress(1), + gasLimit: BigInt(1), + gasPrice: BigInt(1), + hash: 'dummyHash', + index: 0, + maxFeePerBlobGas: BigInt(1), + maxFeePerGas: BigInt(1), + maxPriorityFeePerGas: BigInt(1), + nonce: 0, + signature: ethers.Signature.from( + '0xa617d0558818c7a479d5063987981b59d6e619332ef52249be8243572ef1086807e381afe644d9bb56b213f6e08374c893db308ac1a5ae2bf8b33bcddcb0f76a1b' + ), + to: 'dummyTo', + type: 0, + value: BigInt(0), + }, + provider + ) + + return response + } + +export const getDummySphinxTransactionReceipt = + (): SphinxTransactionReceipt => { + return { + blockHash: 'dummyBlockHash', + blockNumber: 123, + contractAddress: null, + cumulativeGasUsed: 'dummyCumulativeGasUsed', + from: 'dummyFrom', + gasPrice: 'dummyGasPrice', + gasUsed: 'dummyGasUsed', + hash: 'dummyHash', + index: 0, + logs: [ + { + address: 'dummyLogAddress', + blockHash: 'dummyLogBlockHash', + blockNumber: 123, + data: 'dummyLogData', + index: 0, + topics: ['dummyTopic1', 'dummyTopic2'], + transactionHash: 'dummyTransactionHash', + transactionIndex: 1, + }, + ], + logsBloom: 'dummyLogsBloom', + status: 1, + to: 'dummyTo', + } + } + +export const getDummyMerkleTree = (): SphinxMerkleTree => { + return { + root: dummyMerkleRoot, + leavesWithProofs: [], + } +} + +export const getDummyContractDeploymentArtifact = + (): ContractDeploymentArtifact => { + return { + _format: 'sphinx-sol-ct-artifact-1', + merkleRoot: dummyMerkleRoot, + address: 'dummyAddress', + sourceName: 'dummySourceName', + contractName: dummyContractName, + chainId: dummyChainId, + receipt: getDummySphinxTransactionReceipt(), + args: [], + solcInputHash: 'dummySolcInputHash', + abi: [], + bytecode: 'dummyBytecode', + deployedBytecode: 'dummyDeployedBytecode', + linkReferences: {}, + deployedLinkReferences: {}, + history: [], + metadata: 'dummyMetadata', + gitCommit: null, + devdoc: {}, + userdoc: {}, + } + } + +export const getDummySolcInput = (): SolcInput => { + return { + language: 'Solidity', + settings: { + optimizer: { + runs: undefined, + enabled: undefined, + details: undefined, + }, + outputSelection: {}, + }, + sources: {}, + } +} + +export const getDummyCompilerInput = (): CompilerInput => { + return { + id: dummyBuildInfoId, + solcVersion: '0.8.0', + solcLongVersion: '0.8.21+commit.d9974bed', + input: getDummySolcInput(), + } +} + +export const getDummyBuildInfo = (): BuildInfo => { + return { + id: dummyBuildInfoId, + solcVersion: '0.8.0', + solcLongVersion: '0.8.21+commit.d9974bed', + input: getDummySolcInput(), + output: { + contracts: {}, + }, + } +} + +export const getDummyBuildInfos = (): BuildInfos => { + return { [dummyBuildInfoId]: getDummyBuildInfo() } +} + +export const getDummyEventLog = (): SphinxTransactionReceipt['logs'][0] => { + return { + address: '0xDummyAddress', + blockHash: '0xDummyBlockHash', + blockNumber: 123, + data: '0xDummyData', + index: 1, + topics: [], + transactionHash: '0xDummyTransactionHash', + transactionIndex: 0, + } +} + +export const getDummyNetworkConfig = (): NetworkConfig => { + return { + safeAddress: '0x' + '11'.repeat(20), + moduleAddress: dummyModuleAddress, + executorAddress: '0x' + '33'.repeat(20), + safeInitData: '0x' + '44'.repeat(20), + nonce: '0', + chainId: '1', + blockGasLimit: '0', + blockNumber: '0', + actionInputs: [ + { + contracts: [], + index: '0', + actionType: ActionInputType.CALL, + decodedAction: { + referenceName: 'MockReference', + functionName: 'MockFunction', + variables: {}, + address: '0x' + '55'.repeat(20), + }, + to: '0x' + '66'.repeat(20), + value: '0', + txData: '0x', + gas: '0', + operation: Operation.Call, + requireSuccess: true, + }, + ], + newConfig: { + projectName: 'MockProject', + orgId: 'MockOrgId', + owners: [], + mainnets: [], + testnets: [], + threshold: '1', + saltNonce: '0', + }, + executionMode: ExecutionMode.LocalNetworkCLI, + initialState: { + isSafeDeployed: false, + isModuleDeployed: false, + isExecuting: false, + }, + isSystemDeployed: true, + unlabeledContracts: [], + arbitraryChain: false, + libraries: [], + gitCommit: null, + } +} + +const getDummyExecutionArtifact = (): ExecutionArtifact => { + return { + _format: 'sphinx-sol-execution-artifact-1', + transactions: [], + merkleRoot: dummyMerkleRoot, + solcInputHashes: ['dummyHash'], + safeAddress: 'dummySafeAddress', + moduleAddress: 'dummyModuleAddress', + executorAddress: 'dummyExecutorAddress', + nonce: 'dummyNonce', + chainId: 'dummyChainId', + actions: [], + sphinxConfig: { + projectName: 'dummyProjectName', + orgId: 'dummyOrgId', + owners: ['dummyOwner'], + mainnets: ['dummyMainnet'], + testnets: ['dummyTestnet'], + threshold: 'dummyThreshold', + saltNonce: 'dummySaltNonce', + }, + executionMode: ExecutionMode.LocalNetworkCLI, + initialState: { + isSafeDeployed: false, + isModuleDeployed: false, + isExecuting: false, + }, + unlabeledContracts: [], + arbitraryChain: false, + libraries: [], + gitCommit: null, + safeInitData: null, + } +} + +export const getDummyDeploymentArtifacts = (): DeploymentArtifacts => { + const { id, input, solcVersion, solcLongVersion } = getDummyBuildInfo() + + return { + networks: { + [dummyChainId]: { + contractDeploymentArtifacts: { + [dummyContractArtifactFileName]: getDummyContractDeploymentArtifact(), + }, + executionArtifacts: { + [dummyExecutionArtifactFileName]: getDummyExecutionArtifact(), + }, + }, + }, + compilerInputs: { + [dummyCompilerInputArtifactFileName]: { + id, + input, + solcLongVersion, + solcVersion, + }, + }, + } +} diff --git a/packages/plugins/test/mocha/fake.ts b/packages/plugins/test/mocha/fake.ts new file mode 100644 index 000000000..3de6790eb --- /dev/null +++ b/packages/plugins/test/mocha/fake.ts @@ -0,0 +1,128 @@ +import { + ActionInput, + ActionInputType, + ConfigArtifacts, + DeploymentConfig, + SphinxTransactionReceipt, +} from '@sphinx-labs/core' +import { Operation, SphinxModuleABI } from '@sphinx-labs/contracts' +import { EventFragment, ethers } from 'ethers' + +import { + dummyBuildInfoId, + dummyModuleAddress, + getDummyBuildInfos, + getDummyCompilerInput, + getDummyEventLog, + getDummyMerkleTree, + getDummyNetworkConfig, + getDummySphinxTransactionReceipt, +} from './dummy' +import { readContractArtifact } from '../../dist' + +export const getFakeActionSucceededReceipt = ( + merkleRoot: string +): SphinxTransactionReceipt => { + const moduleInterface = new ethers.Interface(SphinxModuleABI) + const actionSucceededFragment = moduleInterface.fragments + .filter(EventFragment.isFragment) + .find((fragment) => fragment.name === 'SphinxActionSucceeded') + if (!actionSucceededFragment) { + throw new Error(`Could not find event fragment. Should never happen.`) + } + + const actionIndex = 0 + const logData = moduleInterface.encodeEventLog(actionSucceededFragment, [ + merkleRoot, + actionIndex, + ]) + const dummyLog = getDummyEventLog() + dummyLog.topics = logData.topics + dummyLog.data = logData.data + dummyLog.address = dummyModuleAddress + + const receipt = getDummySphinxTransactionReceipt() + receipt.logs = [dummyLog] + return receipt +} + +export const getFakeConfigArtifacts = async ( + fullyQualifiedNames: Array, + artifactFolder: string +): Promise => { + const configArtifacts: ConfigArtifacts = {} + for (const name of fullyQualifiedNames) { + const artifact = await readContractArtifact( + name, + process.cwd(), + artifactFolder + ) + configArtifacts[name] = { + buildInfoId: dummyBuildInfoId, + artifact, + } + } + return configArtifacts +} + +export const getFakeActionInputWithContract = ( + fullyQualifiedName: string, + initCodeWithArgs: string +): ActionInput => { + return { + contracts: [ + { + address: '0x' + '22'.repeat(20), + fullyQualifiedName, + initCodeWithArgs, + }, + ], + index: '0', + actionType: ActionInputType.CALL, + decodedAction: { + referenceName: 'MockReference', + functionName: 'MockFunction', + variables: {}, + address: '0x' + '22'.repeat(20), + }, + to: '0x' + '66'.repeat(20), + value: '0', + txData: '0x', + gas: '0', + operation: Operation.Call, + requireSuccess: true, + } +} + +export const getFakeDeploymentConfig = async ( + chainId: string, + fullyQualifiedName: string, + initCodeWithArgs: string, + artifactFolder: string, + compilerInputId: string, + merkleRoot: string +): Promise => { + const networkConfig = getDummyNetworkConfig() + networkConfig.chainId = chainId.toString() + networkConfig.actionInputs = [ + getFakeActionInputWithContract(fullyQualifiedName, initCodeWithArgs), + ] + + const compilerInput = getDummyCompilerInput() + compilerInput.id = compilerInputId + + const merkleTree = getDummyMerkleTree() + merkleTree.root = merkleRoot + + return { + networkConfigs: [networkConfig], + merkleTree, + configArtifacts: await getFakeConfigArtifacts( + [fullyQualifiedName], + artifactFolder + ), + buildInfos: getDummyBuildInfos(), + inputs: [compilerInput], + version: '0', + } +} diff --git a/packages/plugins/test/mocha/mock.ts b/packages/plugins/test/mocha/mock.ts index 18cda55b7..aba49cbd0 100644 --- a/packages/plugins/test/mocha/mock.ts +++ b/packages/plugins/test/mocha/mock.ts @@ -1,20 +1,17 @@ import { - ActionInputType, BuildInfo, BuildInfos, ConfigArtifacts, - ExecutionMode, GetConfigArtifacts, - NetworkConfig, isLiveNetwork, } from '@sphinx-labs/core' import sinon from 'sinon' -import { Operation } from '@sphinx-labs/contracts' import { propose } from '../../src/cli/propose' import { deploy } from '../../src/cli/deploy' import { makeSphinxContext } from '../../src/cli/context' import { readContractArtifact } from '../../dist' +import { getDummyBuildInfo, getDummyNetworkConfig } from './dummy' /** * Make a mocked `SphinxContext` object. Use this function if it's safe to assume that all of @@ -43,7 +40,7 @@ export const makeMockSphinxContext = ( .stub(sphinxContext, 'buildNetworkConfigArray') .returns( Promise.resolve({ - networkConfigArray: [makeMockNetworkConfig()], + networkConfigArray: [getDummyNetworkConfig()], configArtifacts: {}, isEmpty: false, }) @@ -73,26 +70,7 @@ export const makeMockSphinxContext = ( projectRoot, artifactFolder ) - const buildInfo: BuildInfo = { - id: '0', - solcVersion: '0.8.0', - solcLongVersion: '0.8.21+commit.d9974bed', - input: { - language: 'Solidity', - settings: { - optimizer: { - runs: undefined, - enabled: undefined, - details: undefined, - }, - outputSelection: {}, - }, - sources: {}, - }, - output: { - contracts: {}, - }, - } + const buildInfo: BuildInfo = getDummyBuildInfo() buildInfos[buildInfo.id] = buildInfo configArtifacts[name] = { buildInfoId: buildInfo.id, @@ -119,58 +97,6 @@ export const makeMockSphinxContext = ( } } -const makeMockNetworkConfig = (): NetworkConfig => { - return { - safeAddress: '0x' + '11'.repeat(20), - moduleAddress: '0x' + '22'.repeat(20), - executorAddress: '0x' + '33'.repeat(20), - safeInitData: '0x' + '44'.repeat(20), - nonce: '0', - chainId: '1', - blockGasLimit: '0', - blockNumber: '0', - actionInputs: [ - { - contracts: [], - index: '0', - actionType: ActionInputType.CALL, - decodedAction: { - referenceName: 'MockReference', - functionName: 'MockFunction', - variables: {}, - address: '0x' + '55'.repeat(20), - }, - to: '0x' + '66'.repeat(20), - value: '0', - txData: '0x', - gas: '0', - operation: Operation.Call, - requireSuccess: true, - }, - ], - newConfig: { - projectName: 'MockProject', - orgId: 'MockOrgId', - owners: [], - mainnets: [], - testnets: [], - threshold: '1', - saltNonce: '0', - }, - executionMode: ExecutionMode.LocalNetworkCLI, - initialState: { - isSafeDeployed: false, - isModuleDeployed: false, - isExecuting: false, - }, - isSystemDeployed: true, - unlabeledContracts: [], - arbitraryChain: false, - libraries: [], - gitCommit: null, - } -} - /** * Make a mock `SphinxContext` to use in integration tests. This object mocks a minimal set of * functionality, such as API calls and the user confirmation prompt.