diff --git a/packages/cli/package.json b/packages/cli/package.json index 1d4d29d5ee9b..190966cc390b 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -99,6 +99,7 @@ "@types/got": "^9.6.12", "@types/inquirer": "^9.0.3", "@types/lodash": "^4.14.192", - "@types/yargs": "^17.0.24" + "@types/yargs": "^17.0.24", + "@lodestar/test-utils": "^1.9.1" } } diff --git a/packages/cli/test/e2e/blsToExecutionchange.test.ts b/packages/cli/test/e2e/blsToExecutionchange.test.ts index 67b0a5969c8c..c36fea252db6 100644 --- a/packages/cli/test/e2e/blsToExecutionchange.test.ts +++ b/packages/cli/test/e2e/blsToExecutionchange.test.ts @@ -4,36 +4,41 @@ import {ApiError, getClient} from "@lodestar/api"; import {config} from "@lodestar/config/default"; import {interopSecretKey} from "@lodestar/state-transition"; import {toHexString} from "@chainsafe/ssz"; +import {execCliCommand, spawnCliCommand, stopChildProcess} from "@lodestar/test-utils"; import {testFilesDir} from "../utils.js"; -import {describeCliTest, execCli} from "../utils/childprocRunner.js"; -import {itDone} from "../utils/runUtils.js"; -describeCliTest("bLSToExecutionChange cmd", function ({spawnCli}) { +describe("bLSToExecutionChange cmd", function () { this.timeout("60s"); - itDone("Perform bLSToExecutionChange", async function (done) { + it("Perform bLSToExecutionChange", async () => { const restPort = 9596; - const devBnProc = spawnCli({pipeStdToParent: false, logPrefix: "dev"}, [ - // ⏎ - "dev", - `--dataDir=${path.join(testFilesDir, "dev-bls-to-execution-change")}`, - "--genesisValidators=8", - "--startValidators=0..7", - "--rest", - `--rest.port=${restPort}`, - // Speed up test to make genesis happen faster - "--params.SECONDS_PER_SLOT=2", - ]); + const devBnProc = await spawnCliCommand( + "packages/cli/bin/lodestar.js", + [ + "dev", + `--dataDir=${path.join(testFilesDir, "dev-bls-to-execution-change")}`, + "--genesisValidators=8", + "--startValidators=0..7", + "--rest", + `--rest.port=${restPort}`, + // Speed up test to make genesis happen faster + "--params.SECONDS_PER_SLOT=2", + ], + {pipeStdioToParent: false, logPrefix: "dev"} + ); + // Exit early if process exits devBnProc.on("exit", (code) => { if (code !== null && code > 0) { - done(Error(`devBnProc process exited with code ${code}`)); + throw new Error(`devBnProc process exited with code ${code}`); } }); const baseUrl = `http://127.0.0.1:${restPort}`; - const client = getClient({baseUrl}, {config}); + // To cleanup the event stream connection + const httpClientController = new AbortController(); + const client = getClient({baseUrl, getAbortSignal: () => httpClientController.signal}, {config}); // Wait for beacon node API to be available + genesis await retry( @@ -57,8 +62,7 @@ describeCliTest("bLSToExecutionChange cmd", function ({spawnCli}) { // 2 0xa3a32b0f8b4ddb83f1a0a853d81dd725dfe577d4f4c3db8ece52ce2b026eca84815c1a7e8e92a4 // 3 0x88c141df77cd9d8d7a71a75c826c41a9c9f03c6ee1b180f3e7852f6a280099ded351b58d66e653 - await execCli([ - // ⏎ + await execCliCommand("packages/cli/bin/lodestar.js", [ "validator", "bls-to-execution-change", "--network=dev", @@ -80,8 +84,9 @@ describeCliTest("bLSToExecutionChange cmd", function ({spawnCli}) { throw Error("Invalid message generated"); } + httpClientController.abort(); devBnProc.kill("SIGINT"); await sleep(1000); - devBnProc.kill("SIGKILL"); + await stopChildProcess(devBnProc, "SIGKILL"); }); }); diff --git a/packages/cli/test/e2e/importFromFsDirect.test.ts b/packages/cli/test/e2e/importFromFsDirect.test.ts index 828aea030a5e..f55587ced2e3 100644 --- a/packages/cli/test/e2e/importFromFsDirect.test.ts +++ b/packages/cli/test/e2e/importFromFsDirect.test.ts @@ -1,14 +1,16 @@ import fs from "node:fs"; import path from "node:path"; import {rimraf} from "rimraf"; +import {getMochaContext} from "@lodestar/test-utils/mocha"; import {testFilesDir} from "../utils.js"; -import {describeCliTest} from "../utils/childprocRunner.js"; -import {getAfterEachCallbacks} from "../utils/runUtils.js"; import {cachedPubkeysHex, cachedSeckeysHex} from "../utils/cachedKeys.js"; -import {expectKeys, getKeymanagerTestRunner} from "../utils/keymanagerTestRunners.js"; +import {expectKeys, startValidatorWithKeyManager} from "../utils/validator.js"; import {getKeystoresStr} from "../utils/keystores.js"; -describeCliTest("import from fs same cmd as validate", function ({spawnCli}) { +describe("import from fs same cmd as validate", function () { + const testContext = getMochaContext(this); + this.timeout("30s"); + const dataDir = path.join(testFilesDir, "import-and-validate-test"); const importFromDir = path.join(dataDir, "eth2.0_deposit_out"); const passphraseFilepath = path.join(importFromDir, "password.text"); @@ -18,9 +20,6 @@ describeCliTest("import from fs same cmd as validate", function ({spawnCli}) { rimraf.sync(importFromDir); }); - const afterEachCallbacks = getAfterEachCallbacks(); - const itKeymanagerStep = getKeymanagerTestRunner({args: {spawnCli}, afterEachCallbacks, dataDir}); - const passphrase = "AAAAAAAA0000000000"; const keyCount = 2; const pubkeys = cachedPubkeysHex.slice(0, keyCount); @@ -38,18 +37,23 @@ describeCliTest("import from fs same cmd as validate", function ({spawnCli}) { }); // Check that there are not keys loaded without adding extra args `--importKeystores` - itKeymanagerStep("run 'validator' check keys are loaded", async function (keymanagerClient) { + it("run 'validator' there are no keys loaded", async () => { + const {keymanagerClient} = await startValidatorWithKeyManager([], { + dataDir, + logPrefix: "case-1", + testContext, + }); + await expectKeys(keymanagerClient, [], "Wrong listKeys response data"); }); // Run validator with extra arguments to load keystores in same step - itKeymanagerStep( - "run 'validator' check keys are loaded", - async function (keymanagerClient) { - await expectKeys(keymanagerClient, pubkeys, "Wrong listKeys response data"); - }, - { - validatorCmdExtraArgs: [`--importKeystores=${importFromDir}`, `--importKeystoresPassword=${passphraseFilepath}`], - } - ); + it("run 'validator' check keys are loaded", async () => { + const {keymanagerClient} = await startValidatorWithKeyManager( + [`--importKeystores=${importFromDir}`, `--importKeystoresPassword=${passphraseFilepath}`], + {dataDir, logPrefix: "case-2", testContext} + ); + + await expectKeys(keymanagerClient, pubkeys, "Wrong listKeys response data"); + }); }); diff --git a/packages/cli/test/e2e/importFromFsPreStep.test.ts b/packages/cli/test/e2e/importFromFsPreStep.test.ts index 36af4e454934..9dd48acaa1a6 100644 --- a/packages/cli/test/e2e/importFromFsPreStep.test.ts +++ b/packages/cli/test/e2e/importFromFsPreStep.test.ts @@ -2,14 +2,17 @@ import fs from "node:fs"; import path from "node:path"; import {rimraf} from "rimraf"; import {expect} from "chai"; +import {getMochaContext} from "@lodestar/test-utils/mocha"; +import {execCliCommand} from "@lodestar/test-utils"; import {testFilesDir} from "../utils.js"; -import {describeCliTest, execCli} from "../utils/childprocRunner.js"; -import {getAfterEachCallbacks} from "../utils/runUtils.js"; import {cachedPubkeysHex, cachedSeckeysHex} from "../utils/cachedKeys.js"; -import {expectKeys, getKeymanagerTestRunner} from "../utils/keymanagerTestRunners.js"; +import {expectKeys, startValidatorWithKeyManager} from "../utils/validator.js"; import {getKeystoresStr} from "../utils/keystores.js"; -describeCliTest("import from fs then validate", function ({spawnCli}) { +describe("import from fs then validate", function () { + const testContext = getMochaContext(this); + this.timeout("30s"); + const dataDir = path.join(testFilesDir, "import-then-validate-test"); const importFromDir = path.join(dataDir, "eth2.0_deposit_out"); const passphraseFilepath = path.join(importFromDir, "password.text"); @@ -19,9 +22,6 @@ describeCliTest("import from fs then validate", function ({spawnCli}) { rimraf.sync(importFromDir); }); - const afterEachCallbacks = getAfterEachCallbacks(); - const itKeymanagerStep = getKeymanagerTestRunner({args: {spawnCli}, afterEachCallbacks, dataDir}); - const passphrase = "AAAAAAAA0000000000"; const keyCount = 2; const pubkeys = cachedPubkeysHex.slice(0, keyCount); @@ -37,8 +37,7 @@ describeCliTest("import from fs then validate", function ({spawnCli}) { fs.writeFileSync(path.join(importFromDir, `keystore_${i}.json`), keystoresStr[i]); } - const stdout = await execCli([ - // ⏎ + const stdout = await execCliCommand("packages/cli/bin/lodestar.js", [ "validator import", `--dataDir ${dataDir}`, `--importKeystores ${importFromDir}`, @@ -54,18 +53,16 @@ describeCliTest("import from fs then validate", function ({spawnCli}) { fs.mkdirSync(path.join(dataDir, "keystores"), {recursive: true}); fs.mkdirSync(path.join(dataDir, "secrets"), {recursive: true}); - const stdout = await execCli([ - // ⏎ - "validator list", - `--dataDir ${dataDir}`, - ]); + const stdout = await execCliCommand("packages/cli/bin/lodestar.js", ["validator list", `--dataDir ${dataDir}`]); for (let i = 0; i < keyCount; i++) { expect(stdout).includes(pubkeys[i], `stdout should include imported pubkey[${i}]`); } }); - itKeymanagerStep("run 'validator' check keys are loaded", async function (keymanagerClient) { + it("run 'validator' check keys are loaded", async function () { + const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir, testContext}); + await expectKeys(keymanagerClient, pubkeys, "Wrong listKeys response data"); }); }); diff --git a/packages/cli/test/e2e/importKeystoresFromApi.test.ts b/packages/cli/test/e2e/importKeystoresFromApi.test.ts index f88d0f4e55c8..d7bd90033c90 100644 --- a/packages/cli/test/e2e/importKeystoresFromApi.test.ts +++ b/packages/cli/test/e2e/importKeystoresFromApi.test.ts @@ -5,23 +5,24 @@ import {DeletionStatus, getClient, ImportStatus} from "@lodestar/api/keymanager" import {config} from "@lodestar/config/default"; import {Interchange} from "@lodestar/validator"; import {ApiError, HttpStatusCode} from "@lodestar/api"; +import {bufferStderr, spawnCliCommand} from "@lodestar/test-utils"; +import {getMochaContext} from "@lodestar/test-utils/mocha"; import {testFilesDir} from "../utils.js"; -import {bufferStderr, describeCliTest} from "../utils/childprocRunner.js"; import {cachedPubkeysHex, cachedSeckeysHex} from "../utils/cachedKeys.js"; -import {expectDeepEquals, getAfterEachCallbacks} from "../utils/runUtils.js"; -import {expectKeys, getKeymanagerTestRunner} from "../utils/keymanagerTestRunners.js"; +import {expectDeepEquals} from "../utils/runUtils.js"; +import {expectKeys, startValidatorWithKeyManager} from "../utils/validator.js"; import {getKeystoresStr} from "../utils/keystores.js"; -describeCliTest("import keystores from api", function ({spawnCli}) { +describe("import keystores from api", function () { + const testContext = getMochaContext(this); + this.timeout("30s"); + const dataDir = path.join(testFilesDir, "import-keystores-test"); before("Clean dataDir", () => { rimraf.sync(dataDir); }); - const afterEachCallbacks = getAfterEachCallbacks(); - const itKeymanagerStep = getKeymanagerTestRunner({args: {spawnCli}, afterEachCallbacks, dataDir}); - /** Generated from const sk = bls.SecretKey.fromKeygen(Buffer.alloc(32, 0xaa)); */ const passphrase = "AAAAAAAA0000000000"; const keyCount = 2; @@ -55,7 +56,8 @@ describeCliTest("import keystores from api", function ({spawnCli}) { const slashingProtectionStr = JSON.stringify(slashingProtection); - itKeymanagerStep("run 'validator' and import remote keys from API", async function (keymanagerClient) { + it("run 'validator' and import remote keys from API", async () => { + const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir, testContext}); // Produce and encrypt keystores const keystoresStr = await getKeystoresStr(passphrase, secretKeys); @@ -84,16 +86,14 @@ describeCliTest("import keystores from api", function ({spawnCli}) { ); // Attempt to run a second process and expect the keystore lock to throw - const vcProc2 = spawnCli({pipeStdToParent: true, logPrefix: "vc-2"}, [ - // ⏎ - "validator", - `--dataDir=${dataDir}`, - ]); + const validator = await spawnCliCommand("packages/cli/bin/lodestar.js", ["validator", "--dataDir", dataDir], { + logPrefix: "vc-2", + }); await new Promise((resolve, reject) => { // logger.error is printed to stdout, Yargs errors are printed in stderr - const vcProc2Stderr = bufferStderr(vcProc2); - vcProc2.on("exit", (code) => { + const vcProc2Stderr = bufferStderr(validator); + validator.on("exit", (code) => { if (code !== null && code > 0) { // process should exit with code > 0, and an error related to locks. Sample error: // vc 351591: ✖ Error: EEXIST: file already exists, open '/tmp/tmp-351554-dMctEAj7sJIz/import-keystores-test/keystores/0x8be678633e927aa0435addad5dcd5283fef6110d91362519cd6d43e61f6c017d724fa579cc4b2972134e050b6ba120c0/voting-keystore.json.lock' @@ -111,7 +111,9 @@ describeCliTest("import keystores from api", function ({spawnCli}) { }); }); - itKeymanagerStep("run 'validator' check keys are loaded + delete", async function (keymanagerClient) { + it("run 'validator' check keys are loaded + delete", async function () { + const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir, testContext}); + // Check that keys imported in previous it() are still there await expectKeys(keymanagerClient, pubkeys, "Wrong listKeys before deleting"); @@ -128,13 +130,16 @@ describeCliTest("import keystores from api", function ({spawnCli}) { await expectKeys(keymanagerClient, [], "Wrong listKeys after deleting"); }); - itKeymanagerStep("different process check no keys are loaded", async function (keymanagerClient) { + it("different process check no keys are loaded", async function () { + const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir, testContext}); // After deleting there should be no keys await expectKeys(keymanagerClient, [], "Wrong listKeys"); }); - itKeymanagerStep("reject calls without bearerToken", async function (_, {keymanagerUrl}) { - const keymanagerClientNoAuth = getClient({baseUrl: keymanagerUrl, bearerToken: undefined}, {config}); + it("reject calls without bearerToken", async function () { + await startValidatorWithKeyManager([], {dataDir, testContext}); + + const keymanagerClientNoAuth = getClient({baseUrl: "http://localhost:38011", bearerToken: undefined}, {config}); const res = await keymanagerClientNoAuth.listRemoteKeys(); expect(res.ok).to.be.false; expect(res.error?.code).to.be.eql(HttpStatusCode.UNAUTHORIZED); diff --git a/packages/cli/test/e2e/importRemoteKeysFromApi.test.ts b/packages/cli/test/e2e/importRemoteKeysFromApi.test.ts index bf3de952575f..7f36a6876fd0 100644 --- a/packages/cli/test/e2e/importRemoteKeysFromApi.test.ts +++ b/packages/cli/test/e2e/importRemoteKeysFromApi.test.ts @@ -4,27 +4,40 @@ import {expect} from "chai"; import {Api, DeleteRemoteKeyStatus, getClient, ImportRemoteKeyStatus} from "@lodestar/api/keymanager"; import {config} from "@lodestar/config/default"; import {ApiError, HttpStatusCode} from "@lodestar/api"; +import {getMochaContext} from "@lodestar/test-utils/mocha"; import {testFilesDir} from "../utils.js"; -import {describeCliTest} from "../utils/childprocRunner.js"; import {cachedPubkeysHex} from "../utils/cachedKeys.js"; -import {expectDeepEquals, getAfterEachCallbacks} from "../utils/runUtils.js"; -import {getKeymanagerTestRunner} from "../utils/keymanagerTestRunners.js"; +import {expectDeepEquals} from "../utils/runUtils.js"; +import {startValidatorWithKeyManager} from "../utils/validator.js"; + +const url = "https://remote.signer"; + +async function expectKeys(keymanagerClient: Api, expectedPubkeys: string[], message: string): Promise { + const remoteKeys = await keymanagerClient.listRemoteKeys(); + ApiError.assert(remoteKeys); + expectDeepEquals( + remoteKeys.response.data, + expectedPubkeys.map((pubkey) => ({pubkey, url, readonly: false})), + message + ); +} + +describe("import remoteKeys from api", function () { + const testContext = getMochaContext(this); + this.timeout("30s"); -describeCliTest("import remoteKeys from api", function ({spawnCli}) { const dataDir = path.join(testFilesDir, "import-remoteKeys-test"); before("Clean dataDir", () => { rimraf.sync(dataDir); }); - const afterEachCallbacks = getAfterEachCallbacks(); - const itKeymanagerStep = getKeymanagerTestRunner({args: {spawnCli}, afterEachCallbacks, dataDir}); - /** Generated from const sk = bls.SecretKey.fromKeygen(Buffer.alloc(32, 0xaa)); */ - const url = "https://remote.signer"; const pubkeysToAdd = [cachedPubkeysHex[0], cachedPubkeysHex[1]]; - itKeymanagerStep("run 'validator' and import remote keys from API", async function (keymanagerClient) { + it("run 'validator' and import remote keys from API", async () => { + const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir, testContext}); + // Wrap in retry since the API may not be listening yet await expectKeys(keymanagerClient, [], "Wrong listRemoteKeys before importing"); @@ -50,7 +63,8 @@ describeCliTest("import remoteKeys from api", function ({spawnCli}) { ); }); - itKeymanagerStep("run 'validator' check keys are loaded + delete", async function (keymanagerClient) { + it("run 'validator' check keys are loaded + delete", async function () { + const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir, testContext}); // Check that keys imported in previous it() are still there await expectKeys(keymanagerClient, pubkeysToAdd, "Wrong listRemoteKeys before deleting"); @@ -67,20 +81,12 @@ describeCliTest("import remoteKeys from api", function ({spawnCli}) { await expectKeys(keymanagerClient, [], "Wrong listRemoteKeys after deleting"); }); - itKeymanagerStep("reject calls without bearerToken", async function (_, {keymanagerUrl}) { + it("reject calls without bearerToken", async function () { + await startValidatorWithKeyManager([], {dataDir, testContext}); + const keymanagerUrl = "http://localhost:38011"; const keymanagerClientNoAuth = getClient({baseUrl: keymanagerUrl, bearerToken: undefined}, {config}); const res = await keymanagerClientNoAuth.listRemoteKeys(); expect(res.ok).to.be.false; expect(res.error?.code).to.be.eql(HttpStatusCode.UNAUTHORIZED); }); - - async function expectKeys(keymanagerClient: Api, expectedPubkeys: string[], message: string): Promise { - const remoteKeys = await keymanagerClient.listRemoteKeys(); - ApiError.assert(remoteKeys); - expectDeepEquals( - remoteKeys.response.data, - expectedPubkeys.map((pubkey) => ({pubkey, url, readonly: false})), - message - ); - } }); diff --git a/packages/cli/test/e2e/propserConfigfromKeymanager.test.ts b/packages/cli/test/e2e/propserConfigfromKeymanager.test.ts index 12118f1686c8..a57bf87ae016 100644 --- a/packages/cli/test/e2e/propserConfigfromKeymanager.test.ts +++ b/packages/cli/test/e2e/propserConfigfromKeymanager.test.ts @@ -2,14 +2,16 @@ import path from "node:path"; import {rimraf} from "rimraf"; import {Interchange} from "@lodestar/validator"; import {ApiError} from "@lodestar/api"; +import {getMochaContext} from "@lodestar/test-utils/mocha"; import {testFilesDir} from "../utils.js"; -import {describeCliTest} from "../utils/childprocRunner.js"; import {cachedPubkeysHex, cachedSeckeysHex} from "../utils/cachedKeys.js"; -import {expectDeepEquals, getAfterEachCallbacks} from "../utils/runUtils.js"; -import {getKeymanagerTestRunner} from "../utils/keymanagerTestRunners.js"; +import {expectDeepEquals} from "../utils/runUtils.js"; +import {startValidatorWithKeyManager} from "../utils/validator.js"; import {getKeystoresStr} from "../utils/keystores.js"; -describeCliTest("import keystores from api, test DefaultProposerConfig", function ({spawnCli}) { +describe("import keystores from api, test DefaultProposerConfig", function () { + this.timeout("30s"); + const testContext = getMochaContext(this); const dataDir = path.join(testFilesDir, "proposer-config-test"); const defaultOptions = { @@ -26,9 +28,6 @@ describeCliTest("import keystores from api, test DefaultProposerConfig", functio rimraf.sync(dataDir); }); - const afterEachCallbacks = getAfterEachCallbacks(); - const itKeymanagerStep = getKeymanagerTestRunner({args: {spawnCli}, afterEachCallbacks, dataDir}); - /** Generated from const sk = bls.SecretKey.fromKeygen(Buffer.alloc(32, 0xaa)); */ const passphrase = "AAAAAAAA0000000000"; const keyCount = 2; @@ -47,119 +46,115 @@ describeCliTest("import keystores from api, test DefaultProposerConfig", functio }; const slashingProtectionStr = JSON.stringify(slashingProtection); - itKeymanagerStep( - "1 . run 'validator' import keys from API, getdefaultfeeRecipient", - async function (keymanagerClient) { - // Produce and encrypt keystores - // Import test keys - const keystoresStr = await getKeystoresStr(passphrase, secretKeys); - await keymanagerClient.importKeystores(keystoresStr, passphrases, slashingProtectionStr); - - //////////////// Fee Recipient - - let feeRecipient0 = await keymanagerClient.listFeeRecipient(pubkeys[0]); - ApiError.assert(feeRecipient0); - expectDeepEquals( - feeRecipient0.response.data, - {pubkey: pubkeys[0], ethaddress: defaultOptions.suggestedFeeRecipient}, - "FeeRecipient Check default" - ); - - // Set feeClient to updatedOptions - ApiError.assert(await keymanagerClient.setFeeRecipient(pubkeys[0], updatedOptions.suggestedFeeRecipient)); - feeRecipient0 = await keymanagerClient.listFeeRecipient(pubkeys[0]); - ApiError.assert(feeRecipient0); - expectDeepEquals( - feeRecipient0.response.data, - {pubkey: pubkeys[0], ethaddress: updatedOptions.suggestedFeeRecipient}, - "FeeRecipient Check updated" - ); - - /////////// GasLimit - - let gasLimit0 = await keymanagerClient.getGasLimit(pubkeys[0]); - ApiError.assert(gasLimit0); - expectDeepEquals( - gasLimit0.response.data, - {pubkey: pubkeys[0], gasLimit: defaultOptions.gasLimit}, - "gasLimit Check default" - ); - - // Set GasLimit to updatedOptions - ApiError.assert(await keymanagerClient.setGasLimit(pubkeys[0], updatedOptions.gasLimit)); - gasLimit0 = await keymanagerClient.getGasLimit(pubkeys[0]); - ApiError.assert(gasLimit0); - expectDeepEquals( - gasLimit0.response.data, - {pubkey: pubkeys[0], gasLimit: updatedOptions.gasLimit}, - "gasLimit Check updated" - ); - } - ); - - itKeymanagerStep( - "2 . run 'validator' Check last feeRecipient and gasLimit persists", - async function (keymanagerClient) { - // next time check edited feeRecipient persists - let feeRecipient0 = await keymanagerClient.listFeeRecipient(pubkeys[0]); - ApiError.assert(feeRecipient0); - expectDeepEquals( - feeRecipient0.response.data, - {pubkey: pubkeys[0], ethaddress: updatedOptions.suggestedFeeRecipient}, - "FeeRecipient Check default persists" - ); - - // after deletion feeRecipient restored to default - ApiError.assert(await keymanagerClient.deleteFeeRecipient(pubkeys[0])); - feeRecipient0 = await keymanagerClient.listFeeRecipient(pubkeys[0]); - ApiError.assert(feeRecipient0); - expectDeepEquals( - feeRecipient0.response.data, - {pubkey: pubkeys[0], ethaddress: defaultOptions.suggestedFeeRecipient}, - "FeeRecipient Check default after delete" - ); - - // gasLimit persists - let gasLimit0 = await keymanagerClient.getGasLimit(pubkeys[0]); - ApiError.assert(gasLimit0); - expectDeepEquals( - gasLimit0.response.data, - {pubkey: pubkeys[0], gasLimit: updatedOptions.gasLimit}, - "gasLimit Check updated persists" - ); - - ApiError.assert(await keymanagerClient.deleteGasLimit(pubkeys[0])); - gasLimit0 = await keymanagerClient.getGasLimit(pubkeys[0]); - ApiError.assert(gasLimit0); - expectDeepEquals( - gasLimit0.response.data, - {pubkey: pubkeys[0], gasLimit: defaultOptions.gasLimit}, - "gasLimit Check default after delete" - ); - } - ); - - itKeymanagerStep( - "3 . run 'validator' FeeRecipient and GasLimit should be default after delete", - async function (keymanagerClient) { - const feeRecipient0 = await keymanagerClient.listFeeRecipient(pubkeys[0]); - ApiError.assert(feeRecipient0); - expectDeepEquals( - feeRecipient0.response.data, - {pubkey: pubkeys[0], ethaddress: defaultOptions.suggestedFeeRecipient}, - "FeeRecipient Check default persists" - ); - - let gasLimit0 = await keymanagerClient.getGasLimit(pubkeys[0]); - - ApiError.assert(await keymanagerClient.deleteGasLimit(pubkeys[0])); - gasLimit0 = await keymanagerClient.getGasLimit(pubkeys[0]); - ApiError.assert(gasLimit0); - expectDeepEquals( - gasLimit0.response.data, - {pubkey: pubkeys[0], gasLimit: defaultOptions.gasLimit}, - "gasLimit Check default after delete" - ); - } - ); + it("1 . run 'validator' import keys from API, getdefaultfeeRecipient", async () => { + const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir, testContext}); + // Produce and encrypt keystores + // Import test keys + const keystoresStr = await getKeystoresStr(passphrase, secretKeys); + await keymanagerClient.importKeystores(keystoresStr, passphrases, slashingProtectionStr); + + //////////////// Fee Recipient + + let feeRecipient0 = await keymanagerClient.listFeeRecipient(pubkeys[0]); + ApiError.assert(feeRecipient0); + expectDeepEquals( + feeRecipient0.response.data, + {pubkey: pubkeys[0], ethaddress: defaultOptions.suggestedFeeRecipient}, + "FeeRecipient Check default" + ); + + // Set feeClient to updatedOptions + ApiError.assert(await keymanagerClient.setFeeRecipient(pubkeys[0], updatedOptions.suggestedFeeRecipient)); + feeRecipient0 = await keymanagerClient.listFeeRecipient(pubkeys[0]); + ApiError.assert(feeRecipient0); + expectDeepEquals( + feeRecipient0.response.data, + {pubkey: pubkeys[0], ethaddress: updatedOptions.suggestedFeeRecipient}, + "FeeRecipient Check updated" + ); + + /////////// GasLimit + + let gasLimit0 = await keymanagerClient.getGasLimit(pubkeys[0]); + ApiError.assert(gasLimit0); + expectDeepEquals( + gasLimit0.response.data, + {pubkey: pubkeys[0], gasLimit: defaultOptions.gasLimit}, + "gasLimit Check default" + ); + + // Set GasLimit to updatedOptions + ApiError.assert(await keymanagerClient.setGasLimit(pubkeys[0], updatedOptions.gasLimit)); + gasLimit0 = await keymanagerClient.getGasLimit(pubkeys[0]); + ApiError.assert(gasLimit0); + expectDeepEquals( + gasLimit0.response.data, + {pubkey: pubkeys[0], gasLimit: updatedOptions.gasLimit}, + "gasLimit Check updated" + ); + }); + + it("2 . run 'validator' Check last feeRecipient and gasLimit persists", async () => { + const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir, testContext}); + + // next time check edited feeRecipient persists + let feeRecipient0 = await keymanagerClient.listFeeRecipient(pubkeys[0]); + ApiError.assert(feeRecipient0); + expectDeepEquals( + feeRecipient0.response.data, + {pubkey: pubkeys[0], ethaddress: updatedOptions.suggestedFeeRecipient}, + "FeeRecipient Check default persists" + ); + + // after deletion feeRecipient restored to default + ApiError.assert(await keymanagerClient.deleteFeeRecipient(pubkeys[0])); + feeRecipient0 = await keymanagerClient.listFeeRecipient(pubkeys[0]); + ApiError.assert(feeRecipient0); + expectDeepEquals( + feeRecipient0.response.data, + {pubkey: pubkeys[0], ethaddress: defaultOptions.suggestedFeeRecipient}, + "FeeRecipient Check default after delete" + ); + + // gasLimit persists + let gasLimit0 = await keymanagerClient.getGasLimit(pubkeys[0]); + ApiError.assert(gasLimit0); + expectDeepEquals( + gasLimit0.response.data, + {pubkey: pubkeys[0], gasLimit: updatedOptions.gasLimit}, + "gasLimit Check updated persists" + ); + + ApiError.assert(await keymanagerClient.deleteGasLimit(pubkeys[0])); + gasLimit0 = await keymanagerClient.getGasLimit(pubkeys[0]); + ApiError.assert(gasLimit0); + expectDeepEquals( + gasLimit0.response.data, + {pubkey: pubkeys[0], gasLimit: defaultOptions.gasLimit}, + "gasLimit Check default after delete" + ); + }); + + it("3 . run 'validator' FeeRecipient and GasLimit should be default after delete", async () => { + const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir, testContext}); + + const feeRecipient0 = await keymanagerClient.listFeeRecipient(pubkeys[0]); + ApiError.assert(feeRecipient0); + expectDeepEquals( + feeRecipient0.response.data, + {pubkey: pubkeys[0], ethaddress: defaultOptions.suggestedFeeRecipient}, + "FeeRecipient Check default persists" + ); + + let gasLimit0 = await keymanagerClient.getGasLimit(pubkeys[0]); + + ApiError.assert(await keymanagerClient.deleteGasLimit(pubkeys[0])); + gasLimit0 = await keymanagerClient.getGasLimit(pubkeys[0]); + ApiError.assert(gasLimit0); + expectDeepEquals( + gasLimit0.response.data, + {pubkey: pubkeys[0], gasLimit: defaultOptions.gasLimit}, + "gasLimit Check default after delete" + ); + }); }); diff --git a/packages/cli/test/e2e/runDevCmd.test.ts b/packages/cli/test/e2e/runDevCmd.test.ts index e1acd7ced617..69c8989f1788 100644 --- a/packages/cli/test/e2e/runDevCmd.test.ts +++ b/packages/cli/test/e2e/runDevCmd.test.ts @@ -1,32 +1,41 @@ import {ApiError, getClient} from "@lodestar/api"; import {config} from "@lodestar/config/default"; import {retry} from "@lodestar/utils"; -import {describeCliTest} from "../utils/childprocRunner.js"; -import {itDone} from "../utils/runUtils.js"; +import {spawnCliCommand} from "@lodestar/test-utils"; +import {getMochaContext} from "@lodestar/test-utils/mocha"; -describeCliTest("Run dev command", function ({spawnCli}) { - itDone("Run dev command with no --dataDir until beacon api is listening", async function (done) { +describe("Run dev command", function () { + const testContext = getMochaContext(this); + this.timeout("30s"); + + it("Run dev command with no --dataDir until beacon api is listening", async () => { const beaconPort = 39011; - const devProc = spawnCli({pipeStdToParent: false, printOnlyOnError: true, logPrefix: "dev"}, [ - // ⏎ - "dev", - "--reset", - "--startValidators=0..7", - `--rest.port=${beaconPort}`, - ]); + const devProc = await spawnCliCommand( + "packages/cli/bin/lodestar.js", + ["dev", "--reset", "--startValidators=0..7", `--rest.port=${beaconPort}`], + {pipeStdioToParent: true, logPrefix: "dev", testContext} + ); // Exit early if process exits devProc.on("exit", (code) => { if (code !== null && code > 0) { - done(Error(`process exited with code ${code}`)); + throw new Error(`process exited with code ${code}`); } }); const beaconUrl = `http://127.0.0.1:${beaconPort}`; - const client = getClient({baseUrl: beaconUrl}, {config}); + // To cleanup the event stream connection + const httpClientController = new AbortController(); + const client = getClient({baseUrl: beaconUrl, getAbortSignal: () => httpClientController.signal}, {config}); // Wrap in retry since the API may not be listening yet await retry(() => client.node.getHealth().then((res) => ApiError.assert(res)), {retryDelay: 1000, retries: 60}); + httpClientController.abort(); + + // The process will exit when the test finishes + // Default behavior would be the abort signal will be passed to the child process + // The earlier registered callback will consider it as an error and throw + devProc.removeAllListeners("exit"); }); }); diff --git a/packages/cli/test/e2e/validatorList.test.ts b/packages/cli/test/e2e/validatorList.test.ts index ce2d10e7b56c..ba2102f07fee 100644 --- a/packages/cli/test/e2e/validatorList.test.ts +++ b/packages/cli/test/e2e/validatorList.test.ts @@ -3,26 +3,19 @@ import fs from "node:fs"; import path from "node:path"; import {rimraf} from "rimraf"; import {expect} from "chai"; -import sinon from "sinon"; import {Keystore} from "@chainsafe/bls-keystore"; import {fromHex} from "@lodestar/utils"; +import {runCliCommand} from "@lodestar/test-utils"; +import {stubLogger} from "@lodestar/test-utils/sinon"; import {testFilesDir} from "../utils.js"; -import {getCliInMemoryRunner} from "../utils/inMemoryRunner.js"; +import {getLodestarCli} from "../../src/cli.js"; describe("cmds / validator", function () { - const lodestar = getCliInMemoryRunner(); - + this.timeout("30s"); + stubLogger(this, console); + const lodestar = getLodestarCli(); const dataDir = testFilesDir; - beforeEach(() => { - sinon.spy(console, "info"); - sinon.spy(console, "log"); - }); - - afterEach(() => { - sinon.restore(); - }); - before("Clean dataDir", () => { rimraf.sync(dataDir); }); @@ -41,7 +34,7 @@ describe("cmds / validator", function () { fs.writeFileSync(passphraseFilepath, passphrase); fs.writeFileSync(keystoreFilepath, keystore.stringify()); - await lodestar([ + await runCliCommand(lodestar, [ "validator import", `--dataDir ${dataDir}`, `--keystore ${keystoreFilepath}`, @@ -55,7 +48,7 @@ describe("cmds / validator", function () { fs.mkdirSync(path.join(dataDir, "keystores"), {recursive: true}); fs.mkdirSync(path.join(dataDir, "secrets"), {recursive: true}); - await lodestar(["validator list", `--dataDir ${dataDir}`]); + await runCliCommand(lodestar, ["validator list", `--dataDir ${dataDir}`], {timeoutMs: 5000}); expect(console.info).calledWith("1 local keystores"); expect(console.info).calledWith(pkHex); diff --git a/packages/cli/test/e2e/voluntaryExit.test.ts b/packages/cli/test/e2e/voluntaryExit.test.ts index aa78f143b1d1..b3a539473581 100644 --- a/packages/cli/test/e2e/voluntaryExit.test.ts +++ b/packages/cli/test/e2e/voluntaryExit.test.ts @@ -1,40 +1,48 @@ import path from "node:path"; -import {sleep, retry} from "@lodestar/utils"; +import {retry} from "@lodestar/utils"; import {ApiError, getClient} from "@lodestar/api"; import {config} from "@lodestar/config/default"; import {interopSecretKey} from "@lodestar/state-transition"; +import {spawnCliCommand, execCliCommand} from "@lodestar/test-utils"; +import {getMochaContext} from "@lodestar/test-utils/mocha"; import {testFilesDir} from "../utils.js"; -import {describeCliTest, execCli} from "../utils/childprocRunner.js"; -import {itDone} from "../utils/runUtils.js"; -describeCliTest("voluntaryExit cmd", function ({spawnCli}) { +describe("voluntaryExit cmd", function () { + const testContext = getMochaContext(this); this.timeout("60s"); - itDone("Perform a voluntary exit", async function (done) { + it("Perform a voluntary exit", async () => { const restPort = 9596; - const devBnProc = spawnCli({pipeStdToParent: false, logPrefix: "dev"}, [ - // ⏎ - "dev", - `--dataDir=${path.join(testFilesDir, "dev-voluntary-exit")}`, - "--genesisValidators=8", - "--startValidators=0..7", - "--rest", - `--rest.port=${restPort}`, - // Speed up test to make genesis happen faster - "--params.SECONDS_PER_SLOT=2", - // Allow voluntary exists to be valid immediately - "--params.SHARD_COMMITTEE_PERIOD=0", - ]); + const devBnProc = await spawnCliCommand( + "packages/cli/bin/lodestar.js", + [ + // ⏎ + "dev", + `--dataDir=${path.join(testFilesDir, "dev-voluntary-exit")}`, + "--genesisValidators=8", + "--startValidators=0..7", + "--rest", + `--rest.port=${restPort}`, + // Speed up test to make genesis happen faster + "--params.SECONDS_PER_SLOT=2", + // Allow voluntary exists to be valid immediately + "--params.SHARD_COMMITTEE_PERIOD=0", + ], + {pipeStdioToParent: false, logPrefix: "dev", testContext} + ); + // Exit early if process exits devBnProc.on("exit", (code) => { if (code !== null && code > 0) { - done(Error(`devBnProc process exited with code ${code}`)); + throw new Error(`devBnProc process exited with code ${code}`); } }); const baseUrl = `http://127.0.0.1:${restPort}`; - const client = getClient({baseUrl}, {config}); + // To cleanup the event stream connection + const httpClientController = new AbortController(); + const client = getClient({baseUrl, getAbortSignal: () => httpClientController.signal}, {config}); // Wait for beacon node API to be available + genesis await retry( @@ -55,16 +63,19 @@ describeCliTest("voluntaryExit cmd", function ({spawnCli}) { // 2 0xa3a32b0f8b4ddb83f1a0a853d81dd725dfe577d4f4c3db8ece52ce2b026eca84815c1a7e8e92a4 // 3 0x88c141df77cd9d8d7a71a75c826c41a9c9f03c6ee1b180f3e7852f6a280099ded351b58d66e653 - await execCli([ - // ⏎ - "validator", - "voluntary-exit", - "--network=dev", - "--yes", - "--interopIndexes=0..3", - `--server=${baseUrl}`, - `--pubkeys=${pubkeysToExit.join(",")}`, - ]); + await execCliCommand( + "packages/cli/bin/lodestar.js", + [ + "validator", + "voluntary-exit", + "--network=dev", + "--yes", + "--interopIndexes=0..3", + `--server=${baseUrl}`, + `--pubkeys=${pubkeysToExit.join(",")}`, + ], + {pipeStdioToParent: false, logPrefix: "voluntary-exit"} + ); for (const pubkey of pubkeysToExit) { await retry( @@ -82,8 +93,7 @@ describeCliTest("voluntaryExit cmd", function ({spawnCli}) { ); } - devBnProc.kill("SIGINT"); - await sleep(1000); - devBnProc.kill("SIGKILL"); + // Disconnect the event stream for the client + httpClientController.abort(); }); }); diff --git a/packages/cli/test/utils/childprocRunner.ts b/packages/cli/test/utils/childprocRunner.ts deleted file mode 100644 index 5e3a79e5e7a4..000000000000 --- a/packages/cli/test/utils/childprocRunner.ts +++ /dev/null @@ -1,146 +0,0 @@ -import child_process from "node:child_process"; -import {shell, ShellOpts} from "./shell.js"; - -const {RUN_FROM_SRC} = process.env; - -const nodeJsBinaryPath = process.execPath; -const tsNodeBinaryPath = esmRelativePathJoin("../../../../node_modules/.bin/ts-node"); -const cliSrcScriptPath = esmRelativePathJoin("../../src/index.ts"); -const cliLibScriptPath = esmRelativePathJoin("../../lib/index.js"); - -/* eslint-disable no-console */ - -export type DescribeArgs = { - spawnCli(opts: SpawnCliOpts, args: string[]): child_process.ChildProcessWithoutNullStreams; -}; - -type SpawnCliOpts = { - ensureProcRunning?: boolean; - logPrefix?: string; - pipeStdToParent?: boolean; - printOnlyOnError?: boolean; -}; - -// eslint-disable-next-line @typescript-eslint/explicit-function-return-type -export function describeCliTest(testName: string, callback: (this: Mocha.Suite, args: DescribeArgs) => void) { - const afterEachCallbacks: (() => Promise | void)[] = []; - afterEach(async () => { - const errs: Error[] = []; - for (const cb of afterEachCallbacks) { - try { - await cb(); - } catch (e) { - errs.push(e as Error); - } - } - afterEachCallbacks.length = 0; // Reset array - if (errs.length > 0) throw errs[0]; - }); - - const args: DescribeArgs = { - spawnCli(opts: SpawnCliOpts, args: string[]) { - const proc = spawnCli(opts, args); - console.log(`Created process ${proc.pid}`); - - afterEachCallbacks.push(async function () { - // Capture state before killing - const killed = proc.killed; - - // Attempt to kill process both with linux tools and built-in .kill() - // Note: `kill ` does not suffice in a local Ubuntu environment. - console.log("Killing process", proc.pid); - proc.kill("SIGKILL"); - await shell(`pkill -P ${proc.pid}`).catch((e) => { - // Do not log unless on debug mode, process is probably killed already - if (process.env.DEBUG) console.error(e); - }); - - if (killed && opts?.ensureProcRunning) { - throw Error(`Process ${proc.pid} already killed`); - } - }); - - return proc; - }, - }; - - describe(testName, function () { - // Extend timeout to allow compiling from src - // TODO: Just build from src once in before - this.timeout(RUN_FROM_SRC ? "60s" : "10s"); - - callback.bind(this)(args); - }); -} - -export function spawnCli(opts: SpawnCliOpts, lodestarArgs: string[]): child_process.ChildProcessWithoutNullStreams { - let stdstr = ""; - const logPrefix = opts?.logPrefix ?? ""; - - const command = RUN_FROM_SRC - ? // ts-node --esm cli.ts - tsNodeBinaryPath - : // node cli.js - nodeJsBinaryPath; - const prefixArgs = RUN_FROM_SRC - ? // ts-node --esm cli.ts - ["--esm", cliSrcScriptPath, ...lodestarArgs] - : // node cli.js - [cliLibScriptPath, ...lodestarArgs]; - - const proc = child_process.spawn(command, prefixArgs); - - if (opts?.pipeStdToParent) { - proc.stdout.on("data", (chunk) => { - const str = Buffer.from(chunk).toString("utf8"); - process.stdout.write(`${logPrefix} ${proc.pid}: ${str}`); // str already contains a new line. console.log adds a new line - }); - proc.stderr.on("data", (chunk) => { - const str = Buffer.from(chunk).toString("utf8"); - process.stderr.write(`${logPrefix} ${proc.pid}: ${str}`); // str already contains a new line. console.log adds a new line - }); - } else { - proc.stdout.on("data", (chunk) => { - stdstr += Buffer.from(chunk).toString("utf8"); - }); - proc.stderr.on("data", (chunk) => { - stdstr += Buffer.from(chunk).toString("utf8"); - }); - } - - proc.on("exit", (code) => { - console.log("process exited", {code}); - if (!opts?.pipeStdToParent) { - if (!opts?.printOnlyOnError || (code !== null && code > 0)) { - console.log(stdstr); - } - } - }); - - return proc; -} - -// eslint-disable-next-line @typescript-eslint/explicit-function-return-type -export function bufferStderr(proc: child_process.ChildProcessWithoutNullStreams) { - let data = ""; - proc.stderr.on("data", (chunk) => { - data += Buffer.from(chunk).toString("utf8"); - }); - return { - read: () => data, - }; -} - -export function execCli(lodestarArgs: string[], opts?: ShellOpts): Promise { - const prefixArgs = RUN_FROM_SRC - ? // ts-node --esm cli.ts - [tsNodeBinaryPath, "--esm", cliSrcScriptPath] - : // node cli.js - [nodeJsBinaryPath, cliLibScriptPath]; - return shell([...prefixArgs, ...lodestarArgs], {pipeToProcess: true, ...opts}); -} - -// From https://blog.logrocket.com/alternatives-dirname-node-js-es-modules -function esmRelativePathJoin(relativePath: string): string { - return new URL(relativePath, import.meta.url).toString().replace(/^file:\/\//, ""); -} diff --git a/packages/cli/test/utils/inMemoryRunner.ts b/packages/cli/test/utils/inMemoryRunner.ts deleted file mode 100644 index 72f4ef2b66fa..000000000000 --- a/packages/cli/test/utils/inMemoryRunner.ts +++ /dev/null @@ -1,26 +0,0 @@ -import yargs from "yargs"; -import {getLodestarCli} from "../../src/cli.js"; - -export function getCliInMemoryRunner() { - return async (arg: string | readonly string[], context?: Record): Promise => { - return new Promise((resolve, reject) => { - const lodestar = getLodestarCli() as yargs.Argv; - // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-call - lodestar - // Method to execute when a failure occurs, rather than printing the failure message. - .fail((msg, err) => { - if (err !== undefined) reject(err); - else if (msg) reject(Error(msg)); - else reject(Error("Unknown error")); - }) - .help(false) - .exitProcess(false) - .parse(Array.isArray(arg) ? arg.join(" ") : arg, context) - // Called after the completion of any command. handler is invoked with the result returned by the command: - .then((result: any) => { - resolve(result); - }) - .catch((e: unknown) => reject(e)); - }); - }; -} diff --git a/packages/cli/test/utils/keymanagerTestRunners.ts b/packages/cli/test/utils/keymanagerTestRunners.ts deleted file mode 100644 index ce470c93ac04..000000000000 --- a/packages/cli/test/utils/keymanagerTestRunners.ts +++ /dev/null @@ -1,87 +0,0 @@ -import {sleep, retry} from "@lodestar/utils"; -import {Api, getClient} from "@lodestar/api/keymanager"; -import {config} from "@lodestar/config/default"; -import {ApiError} from "@lodestar/api"; -import {getMockBeaconApiServer} from "./mockBeaconApiServer.js"; -import {AfterEachCallback, expectDeepEqualsUnordered, findApiToken, itDone} from "./runUtils.js"; -import {DescribeArgs} from "./childprocRunner.js"; - -type TestContext = { - args: DescribeArgs; - afterEachCallbacks: AfterEachCallback[]; - dataDir: string; -}; - -type KeymanagerStepOpts = { - validatorCmdExtraArgs?: string[]; -}; - -type KeymanagerStepCbArgs = { - keymanagerUrl: string; -}; - -export function getKeymanagerTestRunner({args: {spawnCli}, afterEachCallbacks, dataDir}: TestContext) { - return function itKeymanagerStep( - itName: string, - cb: (this: Mocha.Context, keymanagerClient: Api, args: KeymanagerStepCbArgs) => Promise, - keymanagerStepOpts?: KeymanagerStepOpts - ): void { - itDone(itName, async function (done) { - this.timeout("60s"); - - const keymanagerPort = 38011; - const beaconPort = 39011; - const keymanagerUrl = `http://localhost:${keymanagerPort}`; - const beaconUrl = `http://localhost:${beaconPort}`; - - const beaconServer = getMockBeaconApiServer({port: beaconPort}); - afterEachCallbacks.push(() => beaconServer.close()); - await beaconServer.listen(); - - const validatorProc = spawnCli({pipeStdToParent: true, logPrefix: "vc"}, [ - // ⏎ - "validator", - `--dataDir=${dataDir}`, - "--keymanager", - "--keymanager.address=localhost", - `--keymanager.port=${keymanagerPort}`, - `--server=${beaconUrl}`, - ...(keymanagerStepOpts?.validatorCmdExtraArgs ?? []), - ]); - // Exit early if process exits - validatorProc.on("exit", (code) => { - if (code !== null && code > 0) { - done(Error(`process exited with code ${code}`)); - } - }); - - // Wait for api-token.txt file to be written to disk and find it - const apiToken = await retry(async () => findApiToken(dataDir), {retryDelay: 500, retries: 10}); - - const keymanagerClient = getClient({baseUrl: keymanagerUrl, bearerToken: apiToken}, {config}); - - // Wrap in retry since the API may not be listening yet - await retry(() => keymanagerClient.listRemoteKeys(), {retryDelay: 500, retries: 10}); - - await cb.bind(this)(keymanagerClient, {keymanagerUrl}); - - validatorProc.kill("SIGINT"); - await sleep(1000); - validatorProc.kill("SIGKILL"); - }); - }; -} - -/** - * Query `keymanagerClient.listKeys()` API endpoint and assert that expectedPubkeys are in the response - */ -export async function expectKeys(keymanagerClient: Api, expectedPubkeys: string[], message: string): Promise { - const keys = await keymanagerClient.listKeys(); - ApiError.assert(keys); - // The order of keys isn't always deterministic so we can't use deep equal - expectDeepEqualsUnordered( - keys.response.data, - expectedPubkeys.map((pubkey) => ({validatingPubkey: pubkey, derivationPath: "", readonly: false})), - message - ); -} diff --git a/packages/cli/test/utils/runUtils.ts b/packages/cli/test/utils/runUtils.ts index 7050460be2bb..f6a9c311946b 100644 --- a/packages/cli/test/utils/runUtils.ts +++ b/packages/cli/test/utils/runUtils.ts @@ -26,45 +26,3 @@ export function expectDeepEquals(a: T, b: T, message: string): void { export function expectDeepEqualsUnordered(a: T[], b: T[], message: string): void { expect(a).to.have.deep.members(b, message); } - -export type DoneCb = (err?: Error) => void; - -/** - * Extends Mocha it() to allow BOTH: - * - Resolve / reject callback promise to end test - * - Use done() to end test early - */ -export function itDone(itName: string, cb: (this: Mocha.Context, done: DoneCb) => Promise): void { - it(itName, function () { - return new Promise((resolve, reject) => { - function done(err?: Error): void { - if (err) reject(err); - else resolve(); - } - cb.bind(this)(done).then(resolve, reject); - }); - }); -} - -export type AfterEachCallback = () => Promise | void; - -export function getAfterEachCallbacks(): AfterEachCallback[] { - const afterEachCallbacks: (() => Promise | void)[] = []; - - afterEach(async () => { - const errs: Error[] = []; - for (const cb of afterEachCallbacks) { - try { - await cb(); - } catch (e) { - errs.push(e as Error); - } - } - afterEachCallbacks.length = 0; // Reset array - if (errs.length > 0) { - throw errs[0]; - } - }); - - return afterEachCallbacks; -} diff --git a/packages/cli/test/utils/shell.ts b/packages/cli/test/utils/shell.ts deleted file mode 100644 index 4cd4d9473232..000000000000 --- a/packages/cli/test/utils/shell.ts +++ /dev/null @@ -1,50 +0,0 @@ -import childProcess from "node:child_process"; - -/** - * If timeout is greater than 0, the parent will send the signal - * identified by the killSignal property (the default is 'SIGTERM') - * if the child runs longer than timeout milliseconds. - */ -const defaultTimeout = 15 * 60 * 1000; // ms - -export type ShellOpts = { - timeout?: number; - maxBuffer?: number; - signal?: AbortSignal; - pipeToProcess?: boolean; -}; - -/** - * Run arbitrary commands in a shell - * If the child process exits with code > 0, rejects - */ -export async function shell(cmd: string | string[], options?: ShellOpts): Promise { - const timeout = options?.timeout ?? defaultTimeout; - const maxBuffer = options?.maxBuffer; - const cmdStr = Array.isArray(cmd) ? cmd.join(" ") : cmd; - - return new Promise((resolve, reject) => { - const proc = childProcess.exec(cmdStr, {timeout, maxBuffer}, (err, stdout) => { - if (err) { - reject(err); - } else { - resolve(stdout.trim()); - } - }); - - if (options?.pipeToProcess) { - proc.stdout?.pipe(process.stdout); - proc.stderr?.pipe(process.stderr); - } - - if (options?.signal) { - options.signal.addEventListener( - "abort", - () => { - proc.kill("SIGKILL"); - }, - {once: true} - ); - } - }); -} diff --git a/packages/cli/test/utils/simulation/runner/ChildProcessRunner.ts b/packages/cli/test/utils/simulation/runner/ChildProcessRunner.ts index a736359a361d..8554a2d9a3be 100644 --- a/packages/cli/test/utils/simulation/runner/ChildProcessRunner.ts +++ b/packages/cli/test/utils/simulation/runner/ChildProcessRunner.ts @@ -1,6 +1,12 @@ import {ChildProcess} from "node:child_process"; +import { + spawnChildProcess, + stopChildProcess, + ChildProcessHealthStatus, + SpawnChildProcessOptions, + ChildProcessResolve, +} from "@lodestar/test-utils"; import {Job, JobOptions, RunnerEnv, RunnerType} from "../interfaces.js"; -import {startChildProcess, stopChildProcess} from "../utils/child_process.js"; export class ChildProcessRunner implements RunnerEnv { type = RunnerType.ChildProcess as const; @@ -8,10 +14,32 @@ export class ChildProcessRunner implements RunnerEnv { create(jobOption: Omit, "children">): Job { let childProcess: ChildProcess; + const spawnOpts: SpawnChildProcessOptions = { + env: jobOption.cli.env, + pipeStdioToFile: jobOption.logs.stdoutFilePath, + logPrefix: jobOption.id, + }; + + const health = jobOption.health; + + if (health) { + spawnOpts.healthTimeoutMs = 30000; + spawnOpts.health = async (): Promise => + health() + .then((status) => { + return status.ok ? {healthy: true} : {healthy: false}; + }) + .catch((error) => { + return {healthy: false, message: (error as Error).message}; + }); + } else { + spawnOpts.resolveOn = ChildProcessResolve.Completion; + } + return { id: jobOption.id, start: async () => { - childProcess = await startChildProcess(jobOption); + childProcess = await spawnChildProcess(jobOption.cli.command, jobOption.cli.args, spawnOpts); }, stop: async () => { if (childProcess === undefined) { diff --git a/packages/cli/test/utils/simulation/runner/DockerRunner.ts b/packages/cli/test/utils/simulation/runner/DockerRunner.ts index 0e809d28f159..91dcb492c3eb 100644 --- a/packages/cli/test/utils/simulation/runner/DockerRunner.ts +++ b/packages/cli/test/utils/simulation/runner/DockerRunner.ts @@ -1,8 +1,15 @@ /* eslint-disable no-console */ import {ChildProcess} from "node:child_process"; import {sleep} from "@lodestar/utils"; +import { + ChildProcessHealthStatus, + SpawnChildProcessOptions, + execChildProcess, + spawnChildProcess, + stopChildProcess, + ChildProcessResolve, +} from "@lodestar/test-utils"; import {Job, JobOptions, RunnerEnv, RunnerType} from "../interfaces.js"; -import {startChildProcess, stopChildProcess} from "../utils/child_process.js"; const dockerNetworkIpRange = "192.168.0"; const dockerNetworkName = "sim-env-net"; @@ -19,15 +26,9 @@ export class DockerRunner implements RunnerEnv { async start(): Promise { try { - await startChildProcess({ - id: `create docker network '${dockerNetworkName}'`, - cli: { - command: "docker", - args: ["network", "create", "--subnet", `${dockerNetworkIpRange}.0/24`, dockerNetworkName], - }, - logs: { - stdoutFilePath: this.logFilePath, - }, + await execChildProcess(`docker network create --subnet ${dockerNetworkIpRange}.0/24 ${dockerNetworkName}`, { + logPrefix: "docker-runner", + pipeStdioToFile: this.logFilePath, }); } catch { // During multiple sim tests files the network might already exist @@ -38,15 +39,9 @@ export class DockerRunner implements RunnerEnv { // Wait for couple of seconds to allow docker to cleanup containers to network connections for (let i = 0; i < 5; i++) { try { - await startChildProcess({ - id: `docker network rm '${dockerNetworkName}'`, - cli: { - command: "docker", - args: ["network", "rm", dockerNetworkName], - }, - logs: { - stdoutFilePath: this.logFilePath, - }, + await execChildProcess(`docker network rm ${dockerNetworkName}`, { + logPrefix: "docker-runner", + pipeStdioToFile: this.logFilePath, }); return; } catch { @@ -94,15 +89,32 @@ export class DockerRunner implements RunnerEnv { let childProcess: ChildProcess; + const spawnOpts: SpawnChildProcessOptions = { + env: jobOption.cli.env, + pipeStdioToFile: jobOption.logs.stdoutFilePath, + logPrefix: jobOption.id, + }; + + const health = jobOption.health; + + if (health) { + spawnOpts.healthTimeoutMs = 30000; + spawnOpts.health = async (): Promise => + health() + .then((status) => { + return status.ok ? {healthy: true} : {healthy: false}; + }) + .catch((error) => { + return {healthy: false, message: (error as Error).message}; + }); + } else { + spawnOpts.resolveOn = ChildProcessResolve.Completion; + } + return { id: jobOption.id, start: async () => { - childProcess = await startChildProcess({ - id: jobOption.id, - logs: jobOption.logs, - cli: {...jobOption.cli, command: "docker", args: jobArgs}, - health: jobOption.health, - }); + childProcess = await spawnChildProcess("docker", jobArgs, spawnOpts); }, stop: async () => { if (childProcess === undefined) { diff --git a/packages/cli/test/utils/simulation/utils/child_process.ts b/packages/cli/test/utils/simulation/utils/child_process.ts deleted file mode 100644 index 094dbc988ed8..000000000000 --- a/packages/cli/test/utils/simulation/utils/child_process.ts +++ /dev/null @@ -1,97 +0,0 @@ -/* eslint-disable no-console */ -import {ChildProcess, spawn} from "node:child_process"; -import fs from "node:fs"; -import path from "node:path"; -import {JobOptions, RunnerType} from "../interfaces.js"; - -const healthCheckIntervalMs = 1000; -const logHealthChecksAfterMs = 2000; - -export const stopChildProcess = async ( - childProcess: ChildProcess, - signal: NodeJS.Signals | number = "SIGTERM" -): Promise => { - if (childProcess.killed || childProcess.exitCode !== null || childProcess.signalCode !== null) { - return; - } - - return new Promise((resolve, reject) => { - childProcess.once("error", reject); - childProcess.once("close", resolve); - childProcess.kill(signal); - }); -}; - -export const startChildProcess = async ( - jobOptions: Pick, "cli" | "logs" | "id" | "health"> -): Promise => { - return new Promise((resolve, reject) => { - void (async () => { - const childProcess = spawn(jobOptions.cli.command, jobOptions.cli.args, { - env: {...process.env, ...jobOptions.cli.env}, - }); - - fs.mkdirSync(path.dirname(jobOptions.logs.stdoutFilePath), {recursive: true}); - const stdoutFileStream = fs.createWriteStream(jobOptions.logs.stdoutFilePath); - childProcess.stdout?.pipe(stdoutFileStream); - childProcess.stderr?.pipe(stdoutFileStream); - - // If there is any error in running the child process, reject the promise - childProcess.on("error", reject); - - // If there is a health check, wait for it to pass - const health = jobOptions.health; - - // If there is a health check, wait for it to pass - if (health) { - const startHealthCheckMs = Date.now(); - const intervalId = setInterval(() => { - health() - .then((isHealthy) => { - if (isHealthy.ok) { - clearInterval(intervalId); - childProcess.removeAllListeners("exit"); - resolve(childProcess); - } else { - const timeSinceHealthCheckStart = Date.now() - startHealthCheckMs; - if (timeSinceHealthCheckStart > logHealthChecksAfterMs) { - console.log(`Health check unsuccessful '${jobOptions.id}' after ${timeSinceHealthCheckStart} ms`); - } - } - }) - .catch((e) => { - console.error("error on health check, health functions must never throw", e); - }); - }, healthCheckIntervalMs); - - childProcess.once("exit", (code: number) => { - clearInterval(intervalId); - stdoutFileStream.close(); - reject( - new Error( - `process exited. job=${jobOptions.id}, code=${code}, command="${ - jobOptions.cli.command - } ${jobOptions.cli.args.join(" ")}"` - ) - ); - }); - } else { - // If there is no health check, resolve/reject on completion - childProcess.once("exit", (code: number) => { - stdoutFileStream.close(); - if (code > 0) { - reject( - new Error( - `process exited. job=${jobOptions.id}, code=${code}, command="${ - jobOptions.cli.command - } ${jobOptions.cli.args.join(" ")}"` - ) - ); - } else { - resolve(childProcess); - } - }); - } - })(); - }); -}; diff --git a/packages/cli/test/utils/validator.ts b/packages/cli/test/utils/validator.ts new file mode 100644 index 000000000000..e6eca55603ed --- /dev/null +++ b/packages/cli/test/utils/validator.ts @@ -0,0 +1,102 @@ +import childProcess from "node:child_process"; +import {retry} from "@lodestar/utils"; +import {Api, getClient} from "@lodestar/api/keymanager"; +import {config} from "@lodestar/config/default"; +import {ApiError} from "@lodestar/api"; +import {spawnCliCommand, gracefullyStopChildProcess} from "@lodestar/test-utils"; +import {TestContext} from "@lodestar/test-utils/mocha"; +import {getMockBeaconApiServer} from "./mockBeaconApiServer.js"; +import {expectDeepEqualsUnordered, findApiToken} from "./runUtils.js"; + +export async function startValidatorWithKeyManager( + args: string[], + { + dataDir, + logPrefix, + testContext, + }: { + dataDir: string; + testContext?: TestContext; + logPrefix?: string; + } +): Promise<{ + validator: childProcess.ChildProcessWithoutNullStreams; + stopValidator: () => Promise; + keymanagerClient: Api; +}> { + const keymanagerPort = 38011; + const beaconPort = 39011; + const keymanagerUrl = `http://localhost:${keymanagerPort}`; + const beaconUrl = `http://localhost:${beaconPort}`; + const beaconServer = getMockBeaconApiServer({port: beaconPort}); + + await beaconServer.listen(); + + const validatorProc = await spawnCliCommand( + "packages/cli/bin/lodestar.js", + [ + "validator", + `--dataDir=${dataDir}`, + "--keymanager", + "--keymanager.address=localhost", + `--keymanager.port=${keymanagerPort}`, + `--server=${beaconUrl}`, + ...(args ?? []), + ], + {pipeStdioToParent: true, logPrefix: logPrefix ?? "vc"} + ); + + // Exit early if process exits + validatorProc.on("exit", (code) => { + if (code !== null && code > 0) { + throw new Error(`process exited with code ${code}`); + } + }); + + // Wait for api-token.txt file to be written to disk and find it + const apiToken = await retry(async () => findApiToken(dataDir), {retryDelay: 500, retries: 10}); + const controller = new AbortController(); + const keymanagerClient = getClient( + {baseUrl: keymanagerUrl, bearerToken: apiToken, getAbortSignal: () => controller.signal}, + {config} + ); + + // Wrap in retry since the API may not be listening yet + // Remote key endpoint takes a while to be ready + await retry(() => keymanagerClient.listRemoteKeys(), {retryDelay: 500, retries: 20}); + + validatorProc.addListener("exit", () => { + controller.abort(); + }); + + const stopValidator = async (): Promise => { + validatorProc.removeAllListeners("exit"); + controller.abort(); + await beaconServer.close(); + await gracefullyStopChildProcess(validatorProc, 3000); + }; + + if (testContext) { + testContext.afterEach(stopValidator); + } + + return { + validator: validatorProc, + stopValidator, + keymanagerClient, + }; +} + +/** + * Query `keymanagerClient.listKeys()` API endpoint and assert that expectedPubkeys are in the response + */ +export async function expectKeys(keymanagerClient: Api, expectedPubkeys: string[], message: string): Promise { + const keys = await keymanagerClient.listKeys(); + ApiError.assert(keys); + // The order of keys isn't always deterministic so we can't use deep equal + expectDeepEqualsUnordered( + keys.response.data, + expectedPubkeys.map((pubkey) => ({validatingPubkey: pubkey, derivationPath: "", readonly: false})), + message + ); +} diff --git a/packages/prover/package.json b/packages/prover/package.json index e060e3abb28d..f4da974ff4af 100644 --- a/packages/prover/package.json +++ b/packages/prover/package.json @@ -76,13 +76,15 @@ "ethereum-cryptography": "^1.2.0", "find-up": "^6.3.0", "http-proxy": "^1.18.1", + "js-yaml": "^4.1.0", "source-map-support": "^0.5.21", - "winston": "^3.8.2", "winston-transport": "^4.5.0", + "winston": "^3.8.2", "yargs": "^17.7.1" }, "devDependencies": { "@lodestar/logger": "^1.9.1", + "@lodestar/test-utils": "^1.9.1", "@types/http-proxy": "^1.17.10", "@types/yargs": "^17.0.24", "axios": "^1.3.4", diff --git a/packages/prover/src/cli/applyPreset.ts b/packages/prover/src/cli/applyPreset.ts new file mode 100644 index 000000000000..a6a3568c5f91 --- /dev/null +++ b/packages/prover/src/cli/applyPreset.ts @@ -0,0 +1,81 @@ +// MUST import this file first before anything and not import any Lodestar code. +// +// ## Rationale +// +// Lodestar implemented PRESET / CONFIG separation to allow importing types and preset constants directly +// see https://github.com/ChainSafe/lodestar/pull/2585 +// +// However this prevents dynamic configuration changes which is exactly what the CLI required before. +// - The dev command can't apply the minimal preset dynamically +// - `--network gnosis` can't apply a different preset dynamically +// - `--network chiado` can't apply a different preset dynamically +// +// Running this file allows us to keep a static export strategy while NOT requiring users to +// set LODESTAR_PRESET manually every time. + +// IMPORTANT: only import Lodestar code here which does not import any other Lodestar libraries +import {setActivePreset, presetFromJson, PresetName} from "@lodestar/params/setPreset"; +import {readFile} from "../utils/file.js"; + +const network = valueOfArg("network"); +const preset = valueOfArg("preset"); +const presetFile = valueOfArg("presetFile"); + +// Apply preset flag if present +if (preset) { + process.env.LODESTAR_PRESET = preset; +} + +// If ENV is set overrides, network (otherwise can not override network --dev in mainnet mode) +else if (process.env.LODESTAR_PRESET) { + // break +} + +// Translate network to preset +else if (network) { + if (network === "dev") { + process.env.LODESTAR_PRESET = "minimal"; + // "c-kzg" has hardcoded the mainnet value, do not use presets + // eslint-disable-next-line @typescript-eslint/naming-convention + setActivePreset(PresetName.minimal, {FIELD_ELEMENTS_PER_BLOB: 4096}); + } else if (network === "gnosis" || network === "chiado") { + process.env.LODESTAR_PRESET = "gnosis"; + } +} + +if (presetFile) { + // Override the active preset with custom values from file + // Do not modify the preset to use as a base by passing null + setActivePreset(null, presetFromJson(readFile(presetFile) ?? {})); +} + +/** + * Valid syntax + * - `--preset minimal` + * - `--preset=minimal` + */ +function valueOfArg(argName: string): string | null { + // Syntax `--preset minimal` + // process.argv = ["--preset", "minimal"]; + + { + const index = process.argv.indexOf(`--${argName}`); + if (index > -1) { + return process.argv[index + 1] ?? ""; + } + } + + // Syntax `--preset=minimal` + { + const prefix = `--${argName}=`; + const item = process.argv.find((arg) => arg.startsWith(prefix)); + if (item) { + return item.slice(prefix.length); + } + } + + return null; +} + +// Add empty export to make this a module +export {}; diff --git a/packages/prover/src/cli/cmds/start/handler.ts b/packages/prover/src/cli/cmds/start/handler.ts index c9706602a2d9..5f13db0cfcb6 100644 --- a/packages/prover/src/cli/cmds/start/handler.ts +++ b/packages/prover/src/cli/cmds/start/handler.ts @@ -1,4 +1,6 @@ +import {ChainConfig, chainConfigFromJson} from "@lodestar/config"; import {LCTransport} from "../../../interfaces.js"; +import {readFile} from "../../../utils/file.js"; import {createVerifiedExecutionProxy, VerifiedProxyOptions} from "../../../web3_proxy.js"; import {GlobalArgs, parseGlobalArgs} from "../../options.js"; import {parseStartArgs, StartArgs} from "./options.js"; @@ -7,17 +9,19 @@ import {parseStartArgs, StartArgs} from "./options.js"; * Runs a beacon node. */ export async function proverProxyStartHandler(args: StartArgs & GlobalArgs): Promise { - const {network, logLevel} = parseGlobalArgs(args); + const {network, logLevel, paramsFile} = parseGlobalArgs(args); const opts = parseStartArgs(args); const {executionRpcUrl, port, wsCheckpoint} = opts; + const config: Partial = paramsFile ? chainConfigFromJson(readFile(paramsFile)) : {}; + const options: VerifiedProxyOptions = { logLevel, - network, executionRpcUrl, wsCheckpoint, unverifiedWhitelist: opts.unverifiedWhitelist, requestTimeout: opts.requestTimeout, + ...(network ? {network} : {config}), ...(opts.transport === LCTransport.Rest ? {transport: LCTransport.Rest, urls: opts.urls} : {transport: LCTransport.P2P, bootnodes: opts.bootnodes}), diff --git a/packages/prover/src/cli/index.ts b/packages/prover/src/cli/index.ts index 687c3bb301da..53a32a02eb87 100644 --- a/packages/prover/src/cli/index.ts +++ b/packages/prover/src/cli/index.ts @@ -1,6 +1,7 @@ #!/usr/bin/env node // MUST import first to apply preset from args +import "./applyPreset.js"; import {YargsError} from "../utils/errors.js"; import {getLodestarProverCli, yarg} from "./cli.js"; import "source-map-support/register.js"; diff --git a/packages/prover/src/cli/options.ts b/packages/prover/src/cli/options.ts index 3a5eb4cc49af..cb6ba1aaeca2 100644 --- a/packages/prover/src/cli/options.ts +++ b/packages/prover/src/cli/options.ts @@ -1,22 +1,35 @@ import {NetworkName, networksChainConfig} from "@lodestar/config/networks"; import {LogLevel, LogLevels} from "@lodestar/utils"; +import {ACTIVE_PRESET} from "@lodestar/params"; import {CliCommandOptions} from "../utils/command.js"; export type GlobalArgs = { network: string; logLevel: string; + presetFile?: string; + preset: string; + paramsFile: string; }; export type GlobalOptions = { logLevel: LogLevel; - network: NetworkName; -}; +} & ({paramsFile: string; network?: never} | {network: NetworkName; paramsFile?: never}); export const globalOptions: CliCommandOptions = { network: { description: "Specify the network to connect.", type: "string", - choices: Object.keys(networksChainConfig), + choices: [ + ...Object.keys(networksChainConfig), // Leave always as last network. The order matters for the --help printout + "dev", + ], + conflicts: ["paramsFile"], + }, + + paramsFile: { + description: "Network configuration file", + type: "string", + conflicts: ["network"], }, logLevel: { @@ -25,12 +38,32 @@ export const globalOptions: CliCommandOptions = { choices: LogLevels, default: "info", }, + + // hidden option to allow for LODESTAR_PRESET to be set + preset: { + hidden: true, + type: "string", + default: ACTIVE_PRESET, + }, + + presetFile: { + hidden: true, + description: "Preset configuration file to override the active preset with custom values", + type: "string", + }, }; export function parseGlobalArgs(args: GlobalArgs): GlobalOptions { // Remove undefined values to allow deepmerge to inject default values downstream + if (args.network) { + return { + network: args.network as NetworkName, + logLevel: args.logLevel as LogLevel, + }; + } + return { - network: args.network as NetworkName, logLevel: args.logLevel as LogLevel, + paramsFile: args.paramsFile, }; } diff --git a/packages/prover/src/utils/file.ts b/packages/prover/src/utils/file.ts new file mode 100644 index 000000000000..d236d2d5dc95 --- /dev/null +++ b/packages/prover/src/utils/file.ts @@ -0,0 +1,51 @@ +import fs from "node:fs"; +import path from "node:path"; +import yaml from "js-yaml"; +const {load, FAILSAFE_SCHEMA, Type} = yaml; + +enum FileFormat { + json = "json", + yaml = "yaml", + yml = "yml", + toml = "toml", +} + +const yamlSchema = FAILSAFE_SCHEMA.extend({ + implicit: [ + new Type("tag:yaml.org,2002:str", { + kind: "scalar", + construct: function construct(data) { + // eslint-disable-next-line @typescript-eslint/no-unsafe-return + return data !== null ? data : ""; + }, + }), + ], +}); + +/** + * Parse file contents as Json. + */ +function parse(contents: string, fileFormat: FileFormat): T { + switch (fileFormat) { + case FileFormat.json: + return JSON.parse(contents) as T; + case FileFormat.yaml: + case FileFormat.yml: + return load(contents, {schema: yamlSchema}) as T; + default: + return contents as unknown as T; + } +} + +/** + * Read a JSON serializable object from a file + * + * Parse either from json, yaml, or toml + * Optional acceptedFormats object can be passed which can be an array of accepted formats, in future can be extended to include parseFn for the accepted formats + */ +export function readFile(filepath: string, acceptedFormats?: string[]): T { + const fileFormat = path.extname(filepath).substr(1); + if (acceptedFormats && !acceptedFormats.includes(fileFormat)) throw new Error(`UnsupportedFileFormat: ${filepath}`); + const contents = fs.readFileSync(filepath, "utf-8"); + return parse(contents, fileFormat as FileFormat); +} diff --git a/packages/prover/src/web3_proxy.ts b/packages/prover/src/web3_proxy.ts index c8e7af26519f..508aed45b86c 100644 --- a/packages/prover/src/web3_proxy.ts +++ b/packages/prover/src/web3_proxy.ts @@ -66,7 +66,7 @@ export function createVerifiedExecutionProxy(opts: VerifiedProxyOptions): { } { const {executionRpcUrl, requestTimeout} = opts; const signal = opts.signal ?? new AbortController().signal; - const logger = opts.logger ?? getNodeLogger({level: opts.logLevel ?? LogLevel.info}); + const logger = opts.logger ?? getNodeLogger({level: opts.logLevel ?? LogLevel.info, module: "prover"}); const proofProvider = ProofProvider.init({ ...opts, diff --git a/packages/prover/test/e2e/cli/cmds/start.test.ts b/packages/prover/test/e2e/cli/cmds/start.test.ts new file mode 100644 index 000000000000..941f2c4e71c6 --- /dev/null +++ b/packages/prover/test/e2e/cli/cmds/start.test.ts @@ -0,0 +1,92 @@ +import childProcess from "node:child_process"; +import {writeFile, mkdir} from "node:fs/promises"; +import path from "node:path"; +import {expect} from "chai"; +import Web3 from "web3"; +import {runCliCommand, spawnCliCommand, stopChildProcess} from "@lodestar/test-utils"; +import {sleep} from "@lodestar/utils"; +import {ChainConfig, chainConfigToJson} from "@lodestar/config"; +import {getLodestarProverCli} from "../../../../src/cli/cli.js"; +import {rpcUrl, beaconUrl, proxyPort, proxyUrl, chainId, waitForCapellaFork, config} from "../../../utils/e2e_env.js"; + +const cli = getLodestarProverCli(); + +describe("prover/start", () => { + it("should show help", async () => { + const output = await runCliCommand(cli, ["start", "--help"]); + + expect(output).contains("Show help"); + }); + + it("should fail when --executionRpcUrl is missing", async () => { + await expect(runCliCommand(cli, ["start", "--port", "8088"])).eventually.rejectedWith( + "Missing required argument: executionRpcUrl" + ); + }); + + it("should fail when --beaconUrls and --beaconBootnodes are provided together", async () => { + await expect( + runCliCommand(cli, [ + "start", + "--beaconUrls", + "http://localhost:4000", + "--beaconBootnodes", + "http://localhost:0000", + ]) + ).eventually.rejectedWith("Arguments beaconBootnodes and beaconUrls are mutually exclusive"); + }); + + it("should fail when both of --beaconUrls and --beaconBootnodes are not provided", async () => { + await expect( + runCliCommand(cli, ["start", "--port", "8088", "--executionRpcUrl", "http://localhost:3000"]) + ).eventually.rejectedWith("Either --beaconUrls or --beaconBootnodes must be provided"); + }); + + describe("when started", () => { + let proc: childProcess.ChildProcess; + const paramsFilePath = path.join("/tmp", "e2e-test-env", "params.json"); + const web3: Web3 = new Web3(proxyUrl); + + before(async function () { + this.timeout(50000); + await waitForCapellaFork(); + await mkdir(path.dirname(paramsFilePath), {recursive: true}); + await writeFile(paramsFilePath, JSON.stringify(chainConfigToJson(config as ChainConfig))); + + proc = await spawnCliCommand( + "packages/prover/bin/lodestar-prover.js", + [ + "start", + "--port", + String(proxyPort as number), + "--executionRpcUrl", + rpcUrl, + "--beaconUrls", + beaconUrl, + "--preset", + "minimal", + "--paramsFile", + paramsFilePath, + ], + {runWith: "ts-node", pipeStdioToParent: true} + ); + // Give sometime to the prover to start proxy server + await sleep(3000); + }); + + after(async () => { + await stopChildProcess(proc); + }); + + it("should respond to verified calls", async () => { + const accounts = await web3.eth.getAccounts(); + + expect(accounts.length).to.be.gt(0); + await expect(web3.eth.getBalance(accounts[0])).eventually.not.null; + }); + + it("should respond to unverified calls", async () => { + await expect(web3.eth.getChainId()).eventually.eql(chainId); + }); + }); +}); diff --git a/packages/prover/test/e2e/web3_batch_request.test.ts b/packages/prover/test/e2e/web3_batch_request.test.ts index d44ee8145e4f..472c04274764 100644 --- a/packages/prover/test/e2e/web3_batch_request.test.ts +++ b/packages/prover/test/e2e/web3_batch_request.test.ts @@ -3,7 +3,7 @@ import {expect} from "chai"; import Web3 from "web3"; import {LCTransport} from "../../src/interfaces.js"; import {createVerifiedExecutionProvider} from "../../src/web3_provider.js"; -import {rpcURL, beaconUrl, config} from "../utils/e2e_env.js"; +import {rpcUrl, beaconUrl, config} from "../utils/e2e_env.js"; describe("web3_batch_requests", function () { // Give some margin to sync light client @@ -12,7 +12,7 @@ describe("web3_batch_requests", function () { let web3: Web3; before(() => { - const {provider} = createVerifiedExecutionProvider(new Web3.providers.HttpProvider(rpcURL), { + const {provider} = createVerifiedExecutionProvider(new Web3.providers.HttpProvider(rpcUrl), { transport: LCTransport.Rest, urls: [beaconUrl], config, diff --git a/packages/prover/test/e2e/web3_provider.test.ts b/packages/prover/test/e2e/web3_provider.test.ts index ad2982023ead..b2b4b94277d8 100644 --- a/packages/prover/test/e2e/web3_provider.test.ts +++ b/packages/prover/test/e2e/web3_provider.test.ts @@ -4,7 +4,7 @@ import Web3 from "web3"; import {ethers} from "ethers"; import {LCTransport} from "../../src/interfaces.js"; import {createVerifiedExecutionProvider} from "../../src/web3_provider.js"; -import {waitForCapellaFork, testTimeout, rpcURL, beaconUrl, config} from "../utils/e2e_env.js"; +import {waitForCapellaFork, testTimeout, rpcUrl, beaconUrl, config} from "../utils/e2e_env.js"; describe("web3_provider", function () { this.timeout(testTimeout); @@ -16,7 +16,7 @@ describe("web3_provider", function () { describe("createVerifiedExecutionProvider", () => { describe("web3", () => { it("should connect to the network and call a non-verified method", async () => { - const {provider} = createVerifiedExecutionProvider(new Web3.providers.HttpProvider(rpcURL), { + const {provider} = createVerifiedExecutionProvider(new Web3.providers.HttpProvider(rpcUrl), { transport: LCTransport.Rest, urls: [beaconUrl], config, @@ -33,7 +33,7 @@ describe("web3_provider", function () { describe("ethers", () => { it("should connect to the network and call a non-verified method", async () => { - const {provider} = createVerifiedExecutionProvider(new ethers.JsonRpcProvider(rpcURL), { + const {provider} = createVerifiedExecutionProvider(new ethers.JsonRpcProvider(rpcUrl), { transport: LCTransport.Rest, urls: [beaconUrl], config, diff --git a/packages/prover/test/utils/e2e_env.ts b/packages/prover/test/utils/e2e_env.ts index b746b22401fa..1968fb841090 100644 --- a/packages/prover/test/utils/e2e_env.ts +++ b/packages/prover/test/utils/e2e_env.ts @@ -1,9 +1,10 @@ -import {waitForEndpoint} from "./network.js"; +import {waitForEndpoint} from "@lodestar/test-utils"; /* eslint-disable @typescript-eslint/naming-convention */ -export const rpcURL = "http://0.0.0.0:8001"; +export const rpcUrl = "http://0.0.0.0:8001"; export const beaconUrl = "http://0.0.0.0:5001"; export const proxyPort = 8888; +export const chainId = 1234; export const proxyUrl = `http://localhost:${proxyPort}`; // Wait for at least teh capella fork to be started diff --git a/packages/prover/test/utils/network.ts b/packages/prover/test/utils/network.ts deleted file mode 100644 index e439ccca5c9c..000000000000 --- a/packages/prover/test/utils/network.ts +++ /dev/null @@ -1,19 +0,0 @@ -import {request} from "node:http"; -import {sleep} from "@lodestar/utils"; - -export async function waitForEndpoint(url: string): Promise { - // eslint-disable-next-line no-constant-condition - while (true) { - const status = await new Promise((resolve) => { - const req = request(url, {method: "GET"}, (res) => { - resolve(res.statusCode); - }); - req.end(); - }); - if (status === 200) { - break; - } else { - await sleep(1000); - } - } -} diff --git a/packages/test-utils/.mocharc.yaml b/packages/test-utils/.mocharc.yaml new file mode 100644 index 000000000000..1f15bf5929e0 --- /dev/null +++ b/packages/test-utils/.mocharc.yaml @@ -0,0 +1,4 @@ +colors: true +extension: ["ts"] +node-option: + - "loader=ts-node/esm" diff --git a/packages/test-utils/.nycrc.json b/packages/test-utils/.nycrc.json new file mode 100644 index 000000000000..69aa626339a0 --- /dev/null +++ b/packages/test-utils/.nycrc.json @@ -0,0 +1,3 @@ +{ + "extends": "../../.nycrc.json" +} diff --git a/packages/test-utils/LICENSE b/packages/test-utils/LICENSE new file mode 100644 index 000000000000..f49a4e16e68b --- /dev/null +++ b/packages/test-utils/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/packages/test-utils/README.md b/packages/test-utils/README.md new file mode 100644 index 000000000000..cc7db46c0990 --- /dev/null +++ b/packages/test-utils/README.md @@ -0,0 +1,11 @@ +# lodestar-test-util + +> This package is part of [ChainSafe's Lodestar](https://lodestar.chainsafe.io) project + +Mocha / Chai and other utility to reuse across testing of other packages. + +For usage see [spec tests]("https://github.com/ChainSafe/lodestar/tree/unstable/packages/beacon-node/test/spec") + +## License + +Apache-2.0 [ChainSafe Systems](https://chainsafe.io) diff --git a/packages/test-utils/package.json b/packages/test-utils/package.json new file mode 100644 index 000000000000..0045378f5d46 --- /dev/null +++ b/packages/test-utils/package.json @@ -0,0 +1,79 @@ +{ + "name": "@lodestar/test-utils", + "private": true, + "version": "1.9.1", + "description": "Test utilities reused across other packages", + "author": "ChainSafe Systems", + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/ChainSafe/lodestar/issues" + }, + "homepage": "https://github.com/ChainSafe/lodestar#readme", + "type": "module", + "exports": { + ".": { + "import": "./lib/index.js" + }, + "./sinon": { + "import": "./lib/sinon.js" + }, + "./mocha": { + "import": "./lib/mocha.js" + } + }, + "typesVersions": { + "*": { + "*": [ + "*", + "lib/*", + "lib/*/index" + ] + } + }, + "types": "lib/index.d.ts", + "files": [ + "lib/**/*.js", + "lib/**/*.js.map", + "lib/**/*.d.ts", + "*.d.ts", + "*.js" + ], + "scripts": { + "clean": "rm -rf lib && rm -f *.tsbuildinfo", + "build": "tsc -p tsconfig.build.json", + "build:release": "yarn clean && yarn build", + "build:watch": "yarn run build --watch", + "check-build": "node -e \"(async function() { await import('./lib/index.js') })()\"", + "check-types": "tsc", + "lint": "eslint --color --ext .ts src/ test/", + "lint:fix": "yarn run lint --fix", + "pretest": "yarn run check-types", + "check-readme": "typescript-docs-verifier" + }, + "repository": { + "type": "git", + "url": "git+https://github.com:ChainSafe/lodestar.git" + }, + "keywords": [ + "ethereum", + "eth-consensus", + "beacon", + "blockchain" + ], + "dependencies": { + "@lodestar/utils": "^1.9.1", + "axios": "^1.3.4", + "chai": "^4.3.7", + "mocha": "^10.2.0", + "sinon": "^15.0.3" + }, + "devDependencies": { + "@types/mocha": "^10.0.1", + "@types/yargs": "^17.0.24", + "yargs": "^17.7.1" + }, + "peerDependencies": { + "chai": "^4.3.7", + "mocha": "^10.2.0" + } +} diff --git a/packages/test-utils/src/childProcess.ts b/packages/test-utils/src/childProcess.ts new file mode 100644 index 000000000000..35149b1ca967 --- /dev/null +++ b/packages/test-utils/src/childProcess.ts @@ -0,0 +1,359 @@ +/* eslint-disable no-console */ +import childProcess from "node:child_process"; +import stream from "node:stream"; +import fs from "node:fs"; +import path from "node:path"; +import {sleep} from "@lodestar/utils"; +import {TestContext} from "./interfaces.js"; + +/** + * If timeout is greater than 0, the parent will send the signal + * identified by the killSignal property (the default is 'SIGTERM') + * if the child runs longer than timeout milliseconds. + */ +const defaultTimeout = 15 * 60 * 1000; // ms + +export type ExecChildProcessOptions = { + env?: Record; + pipeStdioToFile?: string; + pipeStdioToParent?: boolean; + logPrefix?: string; + timeoutMs?: number; + maxBuffer?: number; + signal?: AbortSignal; +}; + +/** + * Run arbitrary commands in a shell + * If the child process exits with code > 0, rejects + */ +export async function execChildProcess(cmd: string | string[], options?: ExecChildProcessOptions): Promise { + const {timeoutMs, maxBuffer, logPrefix, pipeStdioToParent, pipeStdioToFile} = options ?? {}; + const cmdStr = Array.isArray(cmd) ? cmd.join(" ") : cmd; + + return new Promise((resolve, reject) => { + const proc = childProcess.exec( + cmdStr, + {timeout: timeoutMs ?? defaultTimeout, maxBuffer, env: {...process.env, ...options?.env}}, + (err, stdout) => { + if (err) { + reject(err); + } else { + resolve(stdout.trim()); + } + } + ); + + const logPrefixStream = new stream.Transform({ + transform(chunk, _encoding, callback) { + callback(null, `${logPrefix} ${proc.pid}: ${Buffer.from(chunk).toString("utf8")}`); + }, + }); + + if (pipeStdioToParent) { + proc.stdout?.pipe(logPrefixStream).pipe(process.stdout); + proc.stderr?.pipe(logPrefixStream).pipe(process.stderr); + } + + if (pipeStdioToFile) { + fs.mkdirSync(path.dirname(pipeStdioToFile), {recursive: true}); + const stdoutFileStream = fs.createWriteStream(pipeStdioToFile); + + proc.stdout?.pipe(logPrefixStream).pipe(stdoutFileStream); + proc.stderr?.pipe(logPrefixStream).pipe(stdoutFileStream); + + proc.once("exit", (_code: number) => { + stdoutFileStream.close(); + }); + } + + if (options?.signal) { + options.signal.addEventListener( + "abort", + () => { + proc.kill("SIGKILL"); + }, + {once: true} + ); + } + }); +} + +export const stopChildProcess = async ( + childProcess: childProcess.ChildProcess, + signal: NodeJS.Signals | number = "SIGTERM" +): Promise => { + if (childProcess.killed || childProcess.exitCode !== null || childProcess.signalCode !== null) { + return; + } + + return new Promise((resolve, reject) => { + childProcess.once("error", reject); + childProcess.once("close", resolve); + childProcess.kill(signal); + }); +}; + +/** + * Gracefully stop child process by sending SIGINT signal + * + * @param childProcess - child process to gracefully stop + * @param timeoutMs - timeout to wait for child process to exit before killing + * @returns + */ +export const gracefullyStopChildProcess = async ( + childProcess: childProcess.ChildProcess, + timeoutMs = 3000 +): Promise => { + if (childProcess.killed || childProcess.exitCode !== null || childProcess.signalCode !== null) { + return; + } + + // Send signal to child process to gracefully stop + childProcess.kill("SIGINT"); + + // Wait for process to exit or timeout + const result = await Promise.race([ + new Promise((resolve) => childProcess.once("exit", resolve)).then(() => "exited"), + sleep(timeoutMs).then(() => "timeout"), + ]); + + // If process is timeout kill it + if (result === "timeout") { + await stopChildProcess(childProcess, "SIGKILL"); + } +}; + +export enum ChildProcessResolve { + /** + * Resolve immediately after spawning child process + */ + Immediate, + /** + * Resolve after child process exits + */ + Completion, + /** + * Resolve after child process is healthy. Only considered when `heath` attr is set + */ + Healthy, +} + +export type ChildProcessHealthStatus = {healthy: boolean; error?: string}; + +export type SpawnChildProcessOptions = { + /** + * Environment variables to pass to child process + */ + env?: Record; + /** + * If true, pipe child process stdio to parent process + */ + pipeStdioToFile?: string; + /** + * If true, pipe child process stdio to parent process + */ + pipeStdioToParent?: boolean; + /** + * The prefix to add to child process stdio to identify it from logs + */ + logPrefix?: string; + /** + * Hide stdio from parent process and only show errors + */ + pipeOnlyError?: boolean; + /** + * Child process resolve behavior + */ + resolveOn?: ChildProcessResolve; + /** + * Timeout to wait for child process before considering it unhealthy + */ + healthTimeoutMs?: number; + /** + * Interval to check child process health + */ + healthCheckIntervalMs?: number; + /** + * Log health checks after this time + */ + logHealthChecksAfterMs?: number; + /** + * Test context to pass to child process. Useful for testing to close the process after test case + */ + testContext?: TestContext; + /** + * Abort signal to stop child process + */ + signal?: AbortSignal; + /** + * If health attribute defined we will consider resolveOn = ChildProcessResolve.Healthy + */ + health?: () => Promise<{healthy: boolean; error?: string}>; +}; + +const defaultStartOpts = { + env: {}, + pipeStdToParent: false, + pipeOnlyError: false, + logPrefix: "", + healthCheckIntervalMs: 1000, + logHealthChecksAfterMs: 2000, + resolveOn: ChildProcessResolve.Immediate, +}; + +/** + * Spawn child process and return it + * + * @param command - command to run in child process relative to mono-repo root + * @param args - command arguments + * @param opts - options + * @returns + */ +export async function spawnChildProcess( + command: string, + args: string[], + opts?: Partial +): Promise { + const options = {...defaultStartOpts, ...opts}; + const {env, pipeStdioToFile, pipeStdioToParent, logPrefix, pipeOnlyError, signal} = options; + const {health, resolveOn, healthCheckIntervalMs, logHealthChecksAfterMs, healthTimeoutMs, testContext} = options; + + return new Promise((resolve, reject) => { + void (async () => { + const proc = childProcess.spawn(command, args, { + env: {...process.env, ...env}, + }); + + const getLogPrefixStream = (): stream.Transform => + new stream.Transform({ + transform(chunk, _encoding, callback) { + callback(null, `[${logPrefix}] [${proc.pid}]: ${Buffer.from(chunk).toString("utf8")}`); + }, + }); + + if (testContext) { + testContext.afterEach(async () => { + proc.kill("SIGINT"); + await sleep(1000, signal); + await stopChildProcess(proc); + }); + } + + if (signal) { + signal.addEventListener( + "abort", + () => { + proc.kill("SIGKILL"); + }, + {once: true} + ); + } + + if (pipeStdioToFile) { + fs.mkdirSync(path.dirname(pipeStdioToFile), {recursive: true}); + const stdoutFileStream = fs.createWriteStream(pipeStdioToFile); + + proc.stdout.pipe(getLogPrefixStream()).pipe(stdoutFileStream); + proc.stderr.pipe(getLogPrefixStream()).pipe(stdoutFileStream); + + proc.once("exit", (_code: number) => { + stdoutFileStream.close(); + }); + } + + if (pipeStdioToParent) { + proc.stdout.pipe(getLogPrefixStream()).pipe(process.stdout); + proc.stderr.pipe(getLogPrefixStream()).pipe(process.stderr); + } + + if (!pipeStdioToParent && pipeOnlyError) { + // If want to see only errors then show it on the output stream of main process + proc.stderr.pipe(getLogPrefixStream()).pipe(process.stdout); + } + + // If there is any error in running the child process, reject the promise + proc.on("error", reject); + + if (!health && resolveOn === ChildProcessResolve.Immediate) { + return resolve(proc); + } + + if (!health && resolveOn === ChildProcessResolve.Completion) { + proc.once("exit", (code: number) => { + if (code > 0) { + reject(new Error(`process exited. pid=${proc.pid}, code=${code}, command="${command} ${args.join(" ")}"`)); + } else { + resolve(proc); + } + }); + + return; + } + + // If there is a health check, wait for it to pass + if (health) { + const startHealthCheckMs = Date.now(); + const intervalId = setInterval(() => { + health() + .then((isHealthy) => { + if (isHealthy.healthy) { + clearInterval(intervalId); + clearTimeout(healthTimeoutId); + proc.removeAllListeners("exit"); + resolve(proc); + } else { + const timeSinceHealthCheckStart = Date.now() - startHealthCheckMs; + if (timeSinceHealthCheckStart > logHealthChecksAfterMs) { + console.log( + `Health check unsuccessful. logPrefix=${logPrefix} pid=${proc.pid} timeSinceHealthCheckStart=${timeSinceHealthCheckStart}` + ); + } + } + }) + .catch((e) => { + console.error("error on health check, health functions must never throw", e); + }); + }, healthCheckIntervalMs); + + const healthTimeoutId = setTimeout(() => { + clearTimeout(healthTimeoutId); + + if (intervalId !== undefined) { + reject( + new Error( + `Health check timeout. logPrefix=${logPrefix} pid=${proc.pid} healthTimeoutMs=${healthTimeoutMs}` + ) + ); + } + }, healthTimeoutMs); + + proc.once("exit", (code: number) => { + if (healthTimeoutId !== undefined) return; + + clearInterval(intervalId); + clearTimeout(healthTimeoutId); + + reject( + new Error( + `process exited before healthy. logPrefix=${logPrefix} pid=${ + proc.pid + } healthTimeoutMs=${healthTimeoutMs} code=${code} command="${command} ${args.join(" ")}"` + ) + ); + }); + } + })(); + }); +} + +export function bufferStderr(proc: childProcess.ChildProcessWithoutNullStreams): {read: () => string} { + let data = ""; + proc.stderr.on("data", (chunk) => { + data += Buffer.from(chunk).toString("utf8"); + }); + + return { + read: () => data, + }; +} diff --git a/packages/test-utils/src/cli.ts b/packages/test-utils/src/cli.ts new file mode 100644 index 000000000000..13689da5bea9 --- /dev/null +++ b/packages/test-utils/src/cli.ts @@ -0,0 +1,91 @@ +import childProcess from "node:child_process"; +import type {Argv} from "yargs"; +import {wrapTimeout} from "./timeout.js"; +import {nodeJsBinaryPath, repoRootPath} from "./path.js"; +import { + ExecChildProcessOptions, + SpawnChildProcessOptions, + execChildProcess, + spawnChildProcess, +} from "./childProcess.js"; + +// We need to make it easy for the user to pass the args for the CLI +// yargs treat `["--preset minimal"] as a single arg, so we need to split it ["--preset", "minimal"] +function parseArgs(args: string[]): string[] { + return args.map((a) => a.split(" ")).flat(); +} + +type CommandRunOptions = { + timeoutMs: number; +}; + +/** + * Run the cli command inside the main process from the Yargs object + */ +export async function runCliCommand( + cli: Argv, + args: string[], + opts: CommandRunOptions = {timeoutMs: 1000} +): Promise { + return wrapTimeout( + // eslint-disable-next-line no-async-promise-executor + new Promise(async (resolve, reject) => { + await cli.parseAsync(parseArgs(args), {}, (err, _argv, output) => { + if (err) return reject(err); + + resolve(output); + }); + }), + opts.timeoutMs + ); +} + +/** + * Exec a command in bash script mode. Useful for short-running commands + * + * @param command - The command should be relative to mono-repo root + * @param args + * @param opts + * @returns + */ +export function execCliCommand( + command: string, + args: string[], + opts?: ExecChildProcessOptions & {runWith?: "node" | "ts-node"} +): Promise { + const commandPrefixed = nodeJsBinaryPath; + + const argsPrefixed = + opts?.runWith === "ts-node" + ? // node --loader ts-node/esm cli.ts + ["--loader", "ts-node/esm", repoRootPath(command), ...args] + : // node cli.js + [repoRootPath(command), ...args]; + + return execChildProcess([commandPrefixed, ...parseArgs(argsPrefixed)], opts); +} + +/** + * Spawn a process and keep it running + * + * @param command - The command should be relative to mono-repo root + * @param args + * @param opts + * @returns + */ +export async function spawnCliCommand( + command: string, + args: string[], + opts?: SpawnChildProcessOptions & {runWith?: "node" | "ts-node"} +): Promise { + const commandPrefixed = nodeJsBinaryPath; + + const argsPrefixed = + opts?.runWith === "ts-node" + ? // node --loader ts-node/esm cli.ts + ["--loader", "ts-node/esm", repoRootPath(command), ...args] + : // node cli.js + [repoRootPath(command), ...args]; + + return spawnChildProcess(commandPrefixed, parseArgs(argsPrefixed), opts); +} diff --git a/packages/test-utils/src/http.ts b/packages/test-utils/src/http.ts new file mode 100644 index 000000000000..b4dd16390483 --- /dev/null +++ b/packages/test-utils/src/http.ts @@ -0,0 +1,55 @@ +import axios from "axios"; +import {sleep} from "@lodestar/utils"; + +type Method = "GET" | "POST" | "PUT"; + +/** + * Return the status code of a request for given url and method + */ +export async function getReqStatus(url: string, method: Method = "GET"): Promise { + const res = await axios.request({url, method}); + return res.status; +} + +/** + * Get the response body of a request for given url and method + */ +export async function getRespBody( + url: string, + method: Method = "GET", + data?: Record +): Promise { + const res = await axios.request({url, method, data}); + return res.data as T; +} + +/** + * Match the status code of a request for given url and method + */ +export async function matchReqStatus(url: string, code: number, method: Method = "GET"): Promise { + return (await getReqStatus(url, method)) === code; +} + +/** + * Match the status code of a request for given url and method + */ +export async function matchReqSuccess(url: string, method: Method = "GET"): Promise { + const status = await getReqStatus(url, method); + return status >= 200 && status < 300; +} + +/** + * Wait for a given endpoint to return a given status code + */ +export async function waitForEndpoint(url: string, statusCode = 200): Promise { + // eslint-disable-next-line no-constant-condition + while (true) { + const status = await getReqStatus(url); + + if (status === statusCode) { + break; + } + + await sleep(1000); + } +} diff --git a/packages/test-utils/src/index.ts b/packages/test-utils/src/index.ts new file mode 100644 index 000000000000..84f0efe0f587 --- /dev/null +++ b/packages/test-utils/src/index.ts @@ -0,0 +1,6 @@ +export * from "./cli.js"; +export * from "./childProcess.js"; +export * from "./path.js"; +export * from "./timeout.js"; +export * from "./http.js"; +export * from "./interfaces.js"; diff --git a/packages/test-utils/src/interfaces.ts b/packages/test-utils/src/interfaces.ts new file mode 100644 index 000000000000..25e254eb28a9 --- /dev/null +++ b/packages/test-utils/src/interfaces.ts @@ -0,0 +1,5 @@ +export interface TestContext { + afterEach: (cb: () => Promise | void) => void; + beforeEach: (cb: () => Promise | void) => void; + afterAll: (cb: () => Promise | void) => void; +} diff --git a/packages/test-utils/src/mocha.ts b/packages/test-utils/src/mocha.ts new file mode 100644 index 000000000000..12c2741f67b9 --- /dev/null +++ b/packages/test-utils/src/mocha.ts @@ -0,0 +1,45 @@ +import type {Suite} from "mocha"; +import {TestContext} from "./interfaces.js"; +export {TestContext} from "./interfaces.js"; + +/** + * Create a Mocha context object that can be used to register callbacks that will be executed + */ +export function getMochaContext(suite: Suite): TestContext { + const afterEachCallbacks: (() => Promise | void)[] = []; + const beforeEachCallbacks: (() => Promise | void)[] = []; + const afterAllCallbacks: (() => Promise | void)[] = []; + + const context: TestContext = { + afterEach: (cb) => afterEachCallbacks.push(cb), + beforeEach: (cb) => beforeEachCallbacks.push(cb), + afterAll: (cb) => afterAllCallbacks.push(cb), + }; + + const callbacks = [afterEachCallbacks, beforeEachCallbacks, afterAllCallbacks]; + const hooks = [suite.afterEach, suite.beforeEach, suite.afterAll]; + + for (const [index, cbs] of callbacks.entries()) { + const hook = hooks[index].bind(suite); + + hook(async function mochaHook() { + // Add increased timeout for that hook + this.timeout(10000); + + const errs: Error[] = []; + for (const cb of cbs) { + try { + await cb(); + } catch (e) { + errs.push(e as Error); + } + } + cbs.length = 0; // Reset array + if (errs.length > 0) { + throw errs[0]; + } + }); + } + + return context; +} diff --git a/packages/test-utils/src/path.ts b/packages/test-utils/src/path.ts new file mode 100644 index 000000000000..24b10d272e95 --- /dev/null +++ b/packages/test-utils/src/path.ts @@ -0,0 +1,21 @@ +import path from "node:path"; + +/** + * Return the absolute path to a file relative to the current file + * From https://blog.logrocket.com/alternatives-dirname-node-js-es-modules + */ +export function esmRelativePathResolve(relativePath: string): string { + return new URL(relativePath, import.meta.url).toString().replace(/^file:\/\//, ""); +} + +/** + * Return the path to the root of the repo + */ +export function repoRootPath(fileDirPath: string): string { + return path.join(esmRelativePathResolve("../../../"), fileDirPath); +} + +/** + * Path to the node binary + */ +export const nodeJsBinaryPath = "node"; diff --git a/packages/test-utils/src/sinon.ts b/packages/test-utils/src/sinon.ts new file mode 100644 index 000000000000..9c75dd171248 --- /dev/null +++ b/packages/test-utils/src/sinon.ts @@ -0,0 +1,23 @@ +import {SinonSpy, spy} from "sinon"; + +type Callback = () => void; +type Handler = (cb: Callback) => void; + +/** + * Stub the logger methods + */ +export function stubLogger(context: {beforeEach: Handler; afterEach: Handler}, logger = console): void { + context.beforeEach(() => { + spy(logger, "info"); + spy(logger, "log"); + spy(logger, "warn"); + spy(logger, "error"); + }); + + context.afterEach(() => { + (logger.info as SinonSpy).restore(); + (logger.log as SinonSpy).restore(); + (logger.warn as SinonSpy).restore(); + (logger.error as SinonSpy).restore(); + }); +} diff --git a/packages/test-utils/src/timeout.ts b/packages/test-utils/src/timeout.ts new file mode 100644 index 000000000000..77e716c0691c --- /dev/null +++ b/packages/test-utils/src/timeout.ts @@ -0,0 +1,17 @@ +import {sleep} from "@lodestar/utils"; + +/** + * Wrap a promise with a timeout + */ +export function wrapTimeout( + p: Promise, + timeoutMs: number, + opts?: {timeoutMsg?: string; signal?: AbortSignal} +): Promise { + return Promise.race([ + p, + sleep(timeoutMs, opts?.signal).then(() => { + throw new Error(opts?.timeoutMsg ?? `Promise timeout after ${timeoutMs}ms.`); + }), + ]) as Promise; +} diff --git a/packages/test-utils/tsconfig.build.json b/packages/test-utils/tsconfig.build.json new file mode 100644 index 000000000000..3c66d5619616 --- /dev/null +++ b/packages/test-utils/tsconfig.build.json @@ -0,0 +1,8 @@ +{ + "extends": "../../tsconfig.build.json", + "include": ["src"], + "compilerOptions": { + "outDir": "./lib", + "typeRoots": ["../../node_modules/@types", "./node_modules/@types", "../../types"] + } +} diff --git a/packages/test-utils/tsconfig.json b/packages/test-utils/tsconfig.json new file mode 100644 index 000000000000..f81823701532 --- /dev/null +++ b/packages/test-utils/tsconfig.json @@ -0,0 +1,6 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "typeRoots": ["../../node_modules/@types", "./node_modules/@types", "../../types"] + } +}