diff --git a/test/e2e/commands/account.test.ts b/test/e2e/commands/account.test.ts index e1dab0ec2..9094bec3c 100644 --- a/test/e2e/commands/account.test.ts +++ b/test/e2e/commands/account.test.ts @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. * - * @mocha-environment steps */ import { it, describe, after, before } from 'mocha' import { expect } from 'chai' @@ -22,7 +21,7 @@ import { AccountId, PrivateKey } from '@hashgraph/sdk' import { constants } from '../../../src/core/index.ts' import * as version from '../../../version.ts' import { - bootstrapNetwork, + e2eTestSuite, getDefaultArgv, HEDERA_PLATFORM_VERSION_TAG, TEST_CLUSTER, @@ -35,242 +34,244 @@ import { MINUTES, SECONDS } from '../../../src/core/constants.ts' const defaultTimeout = 20 * SECONDS -describe('AccountCommand', async () => { - const testName = 'account-cmd-e2e' - const namespace = testName - const testSystemAccounts = [[3, 5]] - const argv = getDefaultArgv() - argv[flags.namespace.name] = namespace - argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG - argv[flags.nodeAliasesUnparsed.name] = 'node1' - argv[flags.generateGossipKeys.name] = true - argv[flags.generateTlsKeys.name] = true - argv[flags.clusterName.name] = TEST_CLUSTER - argv[flags.soloChartVersion.name] = version.SOLO_CHART_VERSION - // set the env variable SOLO_CHARTS_DIR if developer wants to use local Solo charts - argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ?? undefined - const bootstrapResp = await bootstrapNetwork(testName, argv) - const accountCmd = new AccountCommand(bootstrapResp.opts, testSystemAccounts) - bootstrapResp.cmd.accountCmd = accountCmd - const k8 = bootstrapResp.opts.k8 - const accountManager = bootstrapResp.opts.accountManager - const configManager = bootstrapResp.opts.configManager - const nodeCmd = bootstrapResp.cmd.nodeCmd - - after(async function () { - this.timeout(3 * MINUTES) - - await getNodeLogs(k8, namespace) - await k8.deleteNamespace(namespace) - await accountManager.close() - await nodeCmd.close() - }) - - describe('node proxies should be UP', () => { - for (const nodeAlias of argv[flags.nodeAliasesUnparsed.name].split(',')) { - it(`proxy should be UP: ${nodeAlias} `, async () => { - await k8.waitForPodReady( - [`app=haproxy-${nodeAlias}`, 'solo.hedera.com/type=haproxy'], - 1, 300, 2 * SECONDS) - }).timeout(30 * SECONDS) - } - }) - - describe('account init command', () => { - it('should succeed with init command', async () => { - const status = await accountCmd.init(argv) - expect(status).to.be.ok - }).timeout(3 * MINUTES) - - describe('special accounts should have new keys', () => { - const genesisKey = PrivateKey.fromStringED25519(constants.GENESIS_KEY) - const realm = constants.HEDERA_NODE_ACCOUNT_ID_START.realm - const shard = constants.HEDERA_NODE_ACCOUNT_ID_START.shard - - before(async function () { - this.timeout(20 * SECONDS) - await accountManager.loadNodeClient(namespace) - }) - - after(async function () { - this.timeout(20 * SECONDS) - await accountManager.close() - }) - - for (const [start, end] of testSystemAccounts) { - for (let i = start; i <= end; i++) { - it(`account ${i} should not have genesis key`, async () => { - expect(accountManager._nodeClient).not.to.be.null +const testName = 'account-cmd-e2e' +const namespace = testName +const testSystemAccounts = [[3, 5]] +const argv = getDefaultArgv() +argv[flags.namespace.name] = namespace +argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG +argv[flags.nodeAliasesUnparsed.name] = 'node1' +argv[flags.generateGossipKeys.name] = true +argv[flags.generateTlsKeys.name] = true +argv[flags.clusterName.name] = TEST_CLUSTER +argv[flags.soloChartVersion.name] = version.SOLO_CHART_VERSION +// set the env variable SOLO_CHARTS_DIR if developer wants to use local Solo charts +argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ?? undefined + +e2eTestSuite(testName, argv, undefined, undefined, undefined, undefined, undefined, undefined, true, (bootstrapResp) => { + describe('AccountCommand', async () => { + const accountCmd = new AccountCommand(bootstrapResp.opts, testSystemAccounts) + bootstrapResp.cmd.accountCmd = accountCmd + const k8 = bootstrapResp.opts.k8 + const accountManager = bootstrapResp.opts.accountManager + const configManager = bootstrapResp.opts.configManager + const nodeCmd = bootstrapResp.cmd.nodeCmd + + after(async function () { + this.timeout(3 * MINUTES) + + await getNodeLogs(k8, namespace) + await k8.deleteNamespace(namespace) + await accountManager.close() + await nodeCmd.close() + }) - const accountId = `${realm}.${shard}.${i}` - nodeCmd.logger.info(`Fetching account keys: accountId ${accountId}`) - const keys = await accountManager.getAccountKeys(accountId) - nodeCmd.logger.info(`Fetched account keys: accountId ${accountId}`) + describe('node proxies should be UP', () => { + for (const nodeAlias of argv[flags.nodeAliasesUnparsed.name].split(',')) { + it(`proxy should be UP: ${nodeAlias} `, async () => { + await k8.waitForPodReady( + [`app=haproxy-${nodeAlias}`, 'solo.hedera.com/type=haproxy'], + 1, 300, 2 * SECONDS) + }).timeout(30 * SECONDS) + } + }) - expect(keys.length).not.to.equal(0) - expect(keys[0].toString()).not.to.equal(genesisKey.toString()) - }).timeout(20 * SECONDS) + describe('account init command', () => { + it('should succeed with init command', async () => { + const status = await accountCmd.init(argv) + expect(status).to.be.ok + }).timeout(3 * MINUTES) + + describe('special accounts should have new keys', () => { + const genesisKey = PrivateKey.fromStringED25519(constants.GENESIS_KEY) + const realm = constants.HEDERA_NODE_ACCOUNT_ID_START.realm + const shard = constants.HEDERA_NODE_ACCOUNT_ID_START.shard + + before(async function () { + this.timeout(20 * SECONDS) + await accountManager.loadNodeClient(namespace) + }) + + after(async function () { + this.timeout(20 * SECONDS) + await accountManager.close() + }) + + for (const [start, end] of testSystemAccounts) { + for (let i = start; i <= end; i++) { + it(`account ${i} should not have genesis key`, async () => { + expect(accountManager._nodeClient).not.to.be.null + + const accountId = `${realm}.${shard}.${i}` + nodeCmd.logger.info(`Fetching account keys: accountId ${accountId}`) + const keys = await accountManager.getAccountKeys(accountId) + nodeCmd.logger.info(`Fetched account keys: accountId ${accountId}`) + + expect(keys.length).not.to.equal(0) + expect(keys[0].toString()).not.to.equal(genesisKey.toString()) + }).timeout(20 * SECONDS) + } } - } + }) }) - }) - describe('account create/update command', () => { - let accountId1: string, accountId2: string + describe('account create/update command', () => { + let accountId1: string, accountId2: string - it('should create account with no options', async () => { - try { - argv[flags.amount.name] = 200 - await expect(accountCmd.create(argv)).to.eventually.be.ok + it('should create account with no options', async () => { + try { + argv[flags.amount.name] = 200 + await expect(accountCmd.create(argv)).to.eventually.be.ok - // @ts-ignore to access the private property - const accountInfo = accountCmd.accountInfo + // @ts-ignore to access the private property + const accountInfo = accountCmd.accountInfo - expect(accountInfo).not.to.be.null - expect(accountInfo.accountId).not.to.be.null + expect(accountInfo).not.to.be.null + expect(accountInfo.accountId).not.to.be.null - accountId1 = accountInfo.accountId + accountId1 = accountInfo.accountId - expect(accountInfo.privateKey).not.to.be.null - expect(accountInfo.publicKey).not.to.be.null - expect(accountInfo.balance).to.equal(configManager.getFlag(flags.amount)) - } catch (e) { - testLogger.showUserError(e) - expect.fail() - } - }).timeout(40 * SECONDS) - - it('should create account with private key and hbar amount options', async () => { - try { - argv[flags.privateKey.name] = constants.GENESIS_KEY - argv[flags.amount.name] = 777 - configManager.update(argv, true) - - await expect(accountCmd.create(argv)).to.eventually.be.ok - - // @ts-ignore to access the private property - const accountInfo = accountCmd.accountInfo - expect(accountInfo).not.to.be.null - expect(accountInfo.accountId).not.to.be.null - accountId2 = accountInfo.accountId - expect(accountInfo.privateKey.toString()).to.equal(constants.GENESIS_KEY) - expect(accountInfo.publicKey).not.to.be.null - expect(accountInfo.balance).to.equal(configManager.getFlag(flags.amount)) - } catch (e) { - testLogger.showUserError(e) - expect.fail() - } - }).timeout(defaultTimeout) - - it('should update account-1', async () => { - try { - argv[flags.amount.name] = 0 - argv[flags.accountId.name] = accountId1 - configManager.update(argv, true) - - await expect(accountCmd.update(argv)).to.eventually.be.ok - - // @ts-ignore to access the private property - const accountInfo = accountCmd.accountInfo - expect(accountInfo).not.to.be.null - expect(accountInfo.accountId).to.equal(argv[flags.accountId.name]) - expect(accountInfo.privateKey).to.be.undefined - expect(accountInfo.publicKey).not.to.be.null - expect(accountInfo.balance).to.equal(200) - } catch (e) { - testLogger.showUserError(e) - expect.fail() - } - }).timeout(defaultTimeout) - - it('should update account-2 with accountId, amount, new private key, and standard out options', async () => { - try { - argv[flags.accountId.name] = accountId2 - argv[flags.privateKey.name] = constants.GENESIS_KEY - argv[flags.amount.name] = 333 - configManager.update(argv, true) - - await expect(accountCmd.update(argv)).to.eventually.be.ok - - // @ts-ignore to access the private property - const accountInfo = accountCmd.accountInfo - expect(accountInfo).not.to.be.null - expect(accountInfo.accountId).to.equal(argv[flags.accountId.name]) - expect(accountInfo.privateKey).to.be.undefined - expect(accountInfo.publicKey).not.to.be.null - expect(accountInfo.balance).to.equal(1_110) - } catch (e) { - testLogger.showUserError(e) - expect.fail() - } - }).timeout(defaultTimeout) - - it('should be able to get account-1', async () => { - try { - argv[flags.accountId.name] = accountId1 - configManager.update(argv, true) - - await expect(accountCmd.get(argv)).to.eventually.be.ok - // @ts-ignore to access the private property - const accountInfo = accountCmd.accountInfo - expect(accountInfo).not.to.be.null - expect(accountInfo.accountId).to.equal(argv[flags.accountId.name]) - expect(accountInfo.privateKey).to.be.undefined - expect(accountInfo.publicKey).to.be.ok - expect(accountInfo.balance).to.equal(200) - } catch (e) { - testLogger.showUserError(e) - expect.fail() - } - }).timeout(defaultTimeout) - - it('should be able to get account-2', async () => { - try { - argv[flags.accountId.name] = accountId2 - configManager.update(argv, true) - - await expect(accountCmd.get(argv)).to.eventually.be.ok - // @ts-ignore to access the private property - const accountInfo = accountCmd.accountInfo - expect(accountInfo).not.to.be.null - expect(accountInfo.accountId).to.equal(argv[flags.accountId.name]) - expect(accountInfo.privateKey).to.be.undefined - expect(accountInfo.publicKey).to.be.ok - expect(accountInfo.balance).to.equal(1_110) - } catch (e) { - testLogger.showUserError(e) - expect.fail() - } - }).timeout(defaultTimeout) - - it('should create account with ecdsa private key and set alias', async () => { - const ecdsaPrivateKey = PrivateKey.generateECDSA() - - try { - argv[flags.ecdsaPrivateKey.name] = ecdsaPrivateKey.toString() - argv[flags.setAlias.name] = true - configManager.update(argv, true) - - await expect(accountCmd.create(argv)).to.eventually.be.ok - - // @ts-ignore to access the private property - const newAccountInfo = accountCmd.accountInfo - expect(newAccountInfo).not.to.be.null - expect(newAccountInfo.accountId).not.to.be.null - expect(newAccountInfo.privateKey.toString()).to.equal(ecdsaPrivateKey.toString()) - expect(newAccountInfo.publicKey.toString()).to.equal(ecdsaPrivateKey.publicKey.toString()) - expect(newAccountInfo.balance).to.be.greaterThan(0) - - const accountId = AccountId.fromString(newAccountInfo.accountId) - expect(newAccountInfo.accountAlias).to.equal(`${accountId.realm}.${accountId.shard}.${ecdsaPrivateKey.publicKey.toEvmAddress()}`) - - await accountManager.loadNodeClient(namespace) - const accountAliasInfo = await accountManager.accountInfoQuery(newAccountInfo.accountAlias) - expect(accountAliasInfo).not.to.be.null - } catch (e) { - testLogger.showUserError(e) - expect.fail() - } - }).timeout(defaultTimeout) + expect(accountInfo.privateKey).not.to.be.null + expect(accountInfo.publicKey).not.to.be.null + expect(accountInfo.balance).to.equal(configManager.getFlag(flags.amount)) + } catch (e) { + testLogger.showUserError(e) + expect.fail() + } + }).timeout(40 * SECONDS) + + it('should create account with private key and hbar amount options', async () => { + try { + argv[flags.privateKey.name] = constants.GENESIS_KEY + argv[flags.amount.name] = 777 + configManager.update(argv, true) + + await expect(accountCmd.create(argv)).to.eventually.be.ok + + // @ts-ignore to access the private property + const accountInfo = accountCmd.accountInfo + expect(accountInfo).not.to.be.null + expect(accountInfo.accountId).not.to.be.null + accountId2 = accountInfo.accountId + expect(accountInfo.privateKey.toString()).to.equal(constants.GENESIS_KEY) + expect(accountInfo.publicKey).not.to.be.null + expect(accountInfo.balance).to.equal(configManager.getFlag(flags.amount)) + } catch (e) { + testLogger.showUserError(e) + expect.fail() + } + }).timeout(defaultTimeout) + + it('should update account-1', async () => { + try { + argv[flags.amount.name] = 0 + argv[flags.accountId.name] = accountId1 + configManager.update(argv, true) + + await expect(accountCmd.update(argv)).to.eventually.be.ok + + // @ts-ignore to access the private property + const accountInfo = accountCmd.accountInfo + expect(accountInfo).not.to.be.null + expect(accountInfo.accountId).to.equal(argv[flags.accountId.name]) + expect(accountInfo.privateKey).to.be.undefined + expect(accountInfo.publicKey).not.to.be.null + expect(accountInfo.balance).to.equal(200) + } catch (e) { + testLogger.showUserError(e) + expect.fail() + } + }).timeout(defaultTimeout) + + it('should update account-2 with accountId, amount, new private key, and standard out options', async () => { + try { + argv[flags.accountId.name] = accountId2 + argv[flags.privateKey.name] = constants.GENESIS_KEY + argv[flags.amount.name] = 333 + configManager.update(argv, true) + + await expect(accountCmd.update(argv)).to.eventually.be.ok + + // @ts-ignore to access the private property + const accountInfo = accountCmd.accountInfo + expect(accountInfo).not.to.be.null + expect(accountInfo.accountId).to.equal(argv[flags.accountId.name]) + expect(accountInfo.privateKey).to.be.undefined + expect(accountInfo.publicKey).not.to.be.null + expect(accountInfo.balance).to.equal(1_110) + } catch (e) { + testLogger.showUserError(e) + expect.fail() + } + }).timeout(defaultTimeout) + + it('should be able to get account-1', async () => { + try { + argv[flags.accountId.name] = accountId1 + configManager.update(argv, true) + + await expect(accountCmd.get(argv)).to.eventually.be.ok + // @ts-ignore to access the private property + const accountInfo = accountCmd.accountInfo + expect(accountInfo).not.to.be.null + expect(accountInfo.accountId).to.equal(argv[flags.accountId.name]) + expect(accountInfo.privateKey).to.be.undefined + expect(accountInfo.publicKey).to.be.ok + expect(accountInfo.balance).to.equal(200) + } catch (e) { + testLogger.showUserError(e) + expect.fail() + } + }).timeout(defaultTimeout) + + it('should be able to get account-2', async () => { + try { + argv[flags.accountId.name] = accountId2 + configManager.update(argv, true) + + await expect(accountCmd.get(argv)).to.eventually.be.ok + // @ts-ignore to access the private property + const accountInfo = accountCmd.accountInfo + expect(accountInfo).not.to.be.null + expect(accountInfo.accountId).to.equal(argv[flags.accountId.name]) + expect(accountInfo.privateKey).to.be.undefined + expect(accountInfo.publicKey).to.be.ok + expect(accountInfo.balance).to.equal(1_110) + } catch (e) { + testLogger.showUserError(e) + expect.fail() + } + }).timeout(defaultTimeout) + + it('should create account with ecdsa private key and set alias', async () => { + const ecdsaPrivateKey = PrivateKey.generateECDSA() + + try { + argv[flags.ecdsaPrivateKey.name] = ecdsaPrivateKey.toString() + argv[flags.setAlias.name] = true + configManager.update(argv, true) + + await expect(accountCmd.create(argv)).to.eventually.be.ok + + // @ts-ignore to access the private property + const newAccountInfo = accountCmd.accountInfo + expect(newAccountInfo).not.to.be.null + expect(newAccountInfo.accountId).not.to.be.null + expect(newAccountInfo.privateKey.toString()).to.equal(ecdsaPrivateKey.toString()) + expect(newAccountInfo.publicKey.toString()).to.equal(ecdsaPrivateKey.publicKey.toString()) + expect(newAccountInfo.balance).to.be.greaterThan(0) + + const accountId = AccountId.fromString(newAccountInfo.accountId) + expect(newAccountInfo.accountAlias).to.equal(`${accountId.realm}.${accountId.shard}.${ecdsaPrivateKey.publicKey.toEvmAddress()}`) + + await accountManager.loadNodeClient(namespace) + const accountAliasInfo = await accountManager.accountInfoQuery(newAccountInfo.accountAlias) + expect(accountAliasInfo).not.to.be.null + } catch (e) { + testLogger.showUserError(e) + expect.fail() + } + }).timeout(defaultTimeout) + }) }) }) diff --git a/test/e2e/commands/mirror_node.test.ts b/test/e2e/commands/mirror_node.test.ts index f3b0cfb1a..7084edc27 100644 --- a/test/e2e/commands/mirror_node.test.ts +++ b/test/e2e/commands/mirror_node.test.ts @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. * - * @mocha-environment steps */ import { it, describe, after, before, afterEach } from 'mocha' import { expect } from 'chai' @@ -22,7 +21,7 @@ import { flags } from '../../../src/commands/index.ts' import { accountCreationShouldSucceed, balanceQueryShouldSucceed, - bootstrapNetwork, + e2eTestSuite, getDefaultArgv, HEDERA_PLATFORM_VERSION_TAG, TEST_CLUSTER @@ -36,195 +35,196 @@ import * as http from 'http' import { MINUTES, SECONDS } from '../../../src/core/constants.ts' import type { PodName } from '../../../src/types/aliases.ts' -describe('MirrorNodeCommand', async () => { - const testName = 'mirror-cmd-e2e' - const namespace = testName - const argv = getDefaultArgv() - argv[flags.namespace.name] = namespace - argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG - - argv[flags.nodeAliasesUnparsed.name] = 'node1' // use a single node to reduce resource during e2e tests - argv[flags.generateGossipKeys.name] = true - argv[flags.generateTlsKeys.name] = true - argv[flags.clusterName.name] = TEST_CLUSTER - argv[flags.soloChartVersion.name] = version.SOLO_CHART_VERSION - argv[flags.force.name] = true - argv[flags.relayReleaseTag.name] = flags.relayReleaseTag.definition.defaultValue - // set the env variable SOLO_CHARTS_DIR if developer wants to use local Solo charts - argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ?? undefined - argv[flags.quiet.name] = true - - const bootstrapResp = await bootstrapNetwork(testName, argv) - const k8 = bootstrapResp.opts.k8 - const mirrorNodeCmd = new MirrorNodeCommand(bootstrapResp.opts) - const downloader = new core.PackageDownloader(mirrorNodeCmd.logger) - const accountManager = bootstrapResp.opts.accountManager - - const testMessage = 'Mirror node test message' - let portForwarder = null - let newTopicId = null - - before(() => { - bootstrapResp.opts.logger.showUser(`------------------------- START: ${testName} ----------------------------`) - }) +const testName = 'mirror-cmd-e2e' +const namespace = testName +const argv = getDefaultArgv() +argv[flags.namespace.name] = namespace +argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG + +argv[flags.nodeAliasesUnparsed.name] = 'node1' // use a single node to reduce resource during e2e tests +argv[flags.generateGossipKeys.name] = true +argv[flags.generateTlsKeys.name] = true +argv[flags.clusterName.name] = TEST_CLUSTER +argv[flags.soloChartVersion.name] = version.SOLO_CHART_VERSION +argv[flags.force.name] = true +argv[flags.relayReleaseTag.name] = flags.relayReleaseTag.definition.defaultValue +// set the env variable SOLO_CHARTS_DIR if developer wants to use local Solo charts +argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ?? undefined +argv[flags.quiet.name] = true + +e2eTestSuite(testName, argv, undefined, undefined, undefined, undefined, undefined, undefined, true, (bootstrapResp) => { + describe('MirrorNodeCommand', async () => { + const k8 = bootstrapResp.opts.k8 + const mirrorNodeCmd = new MirrorNodeCommand(bootstrapResp.opts) + const downloader = new core.PackageDownloader(mirrorNodeCmd.logger) + const accountManager = bootstrapResp.opts.accountManager + + const testMessage = 'Mirror node test message' + let portForwarder = null + let newTopicId = null + + before(() => { + bootstrapResp.opts.logger.showUser(`------------------------- START: ${testName} ----------------------------`) + }) + + after(async function () { + this.timeout(3 * MINUTES) + + await getNodeLogs(k8, namespace) + await k8.deleteNamespace(namespace) + await accountManager.close() + + bootstrapResp.opts.logger.showUser(`------------------------- END: ${testName} ----------------------------`) + }) + + // give a few ticks so that connections can close + afterEach(async () => await sleep(500)) + + balanceQueryShouldSucceed(accountManager, mirrorNodeCmd, namespace) + + it('mirror node deploy should success', async () => { + try { + await expect(mirrorNodeCmd.deploy(argv)).to.eventually.be.ok + } catch (e) { + mirrorNodeCmd.logger.showUserError(e) + expect.fail() + } - after(async function () { - this.timeout(3 * MINUTES) + expect(mirrorNodeCmd.getUnusedConfigs(MirrorNodeCommand.DEPLOY_CONFIGS_NAME)).to.deep.equal([ + flags.hederaExplorerTlsHostName.constName, + flags.hederaExplorerTlsLoadBalancerIp.constName, + flags.profileFile.constName, + flags.profileName.constName, + flags.quiet.constName, + flags.tlsClusterIssuerType.constName + ]) + }).timeout(10 * MINUTES) + + it('mirror node API should be running', async () => { + await accountManager.loadNodeClient(namespace) + try { + // find hedera explorer pod + const pods = await k8.getPodsByLabel(['app.kubernetes.io/name=hedera-explorer']) + const explorerPod = pods[0] + + portForwarder = await k8.portForward(explorerPod.metadata.name as PodName, 8_080, 8_080) + await sleep(2 * SECONDS) - await getNodeLogs(k8, namespace) - await k8.deleteNamespace(namespace) - await accountManager.close() + // check if mirror node api server is running + const apiURL = 'http://127.0.0.1:8080/api/v1/transactions' + await expect(downloader.urlExists(apiURL)).to.eventually.be.ok + await sleep(2 * SECONDS) + } catch (e) { + mirrorNodeCmd.logger.showUserError(e) + expect.fail() + } + }).timeout(MINUTES) - bootstrapResp.opts.logger.showUser(`------------------------- END: ${testName} ----------------------------`) - }) + it('Explorer GUI should be running', async () => { + try { + const guiURL = 'http://127.0.0.1:8080/localnet/dashboard' + await expect(downloader.urlExists(guiURL)).to.eventually.be.ok + await sleep(2 * SECONDS) - // give a few ticks so that connections can close - afterEach(async () => await sleep(500)) - - balanceQueryShouldSucceed(accountManager, mirrorNodeCmd, namespace) - - it('mirror node deploy should success', async () => { - try { - await expect(mirrorNodeCmd.deploy(argv)).to.eventually.be.ok - } catch (e) { - mirrorNodeCmd.logger.showUserError(e) - expect.fail() - } - - expect(mirrorNodeCmd.getUnusedConfigs(MirrorNodeCommand.DEPLOY_CONFIGS_NAME)).to.deep.equal([ - flags.hederaExplorerTlsHostName.constName, - flags.hederaExplorerTlsLoadBalancerIp.constName, - flags.profileFile.constName, - flags.profileName.constName, - flags.quiet.constName, - flags.tlsClusterIssuerType.constName - ]) - }).timeout(10 * MINUTES) - - it('mirror node API should be running', async () => { - await accountManager.loadNodeClient(namespace) - try { - // find hedera explorer pod - const pods = await k8.getPodsByLabel(['app.kubernetes.io/name=hedera-explorer']) - const explorerPod = pods[0] - - portForwarder = await k8.portForward(explorerPod.metadata.name as PodName, 8_080, 8_080) - await sleep(2 * SECONDS) - - // check if mirror node api server is running - const apiURL = 'http://127.0.0.1:8080/api/v1/transactions' - await expect(downloader.urlExists(apiURL)).to.eventually.be.ok - await sleep(2 * SECONDS) - } catch (e) { - mirrorNodeCmd.logger.showUserError(e) - expect.fail() - } - }).timeout(MINUTES) - - it('Explorer GUI should be running', async () => { - try { - const guiURL = 'http://127.0.0.1:8080/localnet/dashboard' - await expect(downloader.urlExists(guiURL)).to.eventually.be.ok - await sleep(2 * SECONDS) - - mirrorNodeCmd.logger.debug('mirror node API and explorer GUI are running') - } catch (e) { - mirrorNodeCmd.logger.showUserError(e) - expect.fail() - } - }).timeout(MINUTES) - - it('Create topic and submit message should success', async () => { - try { - // Create a new public topic and submit a message - const txResponse = await new TopicCreateTransaction().execute(accountManager._nodeClient) - const receipt = await txResponse.getReceipt(accountManager._nodeClient) - newTopicId = receipt.topicId - mirrorNodeCmd.logger.debug(`Newly created topic ID is: ${newTopicId}`) - - const submitResponse = await new TopicMessageSubmitTransaction({ - topicId: newTopicId, - message: testMessage - }).execute(accountManager._nodeClient) - - const submitReceipt = await submitResponse.getReceipt(accountManager._nodeClient) - expect(submitReceipt.status).to.deep.equal(Status.Success) - } catch (e) { - mirrorNodeCmd.logger.showUserError(e) - expect.fail() - } - }).timeout(MINUTES) - - // trigger some extra transactions to trigger MirrorNode to fetch the transactions - accountCreationShouldSucceed(accountManager, mirrorNodeCmd, namespace) - accountCreationShouldSucceed(accountManager, mirrorNodeCmd, namespace) - - it('Check submit message result should success', async () => { - try { - const queryURL = `http://localhost:8080/api/v1/topics/${newTopicId}/messages` - let received = false - let receivedMessage = '' - - // wait until the transaction reached consensus and retrievable from the mirror node API - while (!received) { - const req = http.request(queryURL, - { method: 'GET', timeout: 100, headers: { Connection: 'close' } }, - (res) => { - res.setEncoding('utf8') - res.on('data', (chunk) => { - // convert chunk to json object - const obj = JSON.parse(chunk) - if (obj.messages.length === 0) { - mirrorNodeCmd.logger.debug('No messages yet') - } else { - // convert message from base64 to utf-8 - const base64 = obj.messages[0].message - const buff = Buffer.from(base64, 'base64') - receivedMessage = buff.toString('utf-8') - mirrorNodeCmd.logger.debug(`Received message: ${receivedMessage}`) - received = true - } - }) + mirrorNodeCmd.logger.debug('mirror node API and explorer GUI are running') + } catch (e) { + mirrorNodeCmd.logger.showUserError(e) + expect.fail() + } + }).timeout(MINUTES) + + it('Create topic and submit message should success', async () => { + try { + // Create a new public topic and submit a message + const txResponse = await new TopicCreateTransaction().execute(accountManager._nodeClient) + const receipt = await txResponse.getReceipt(accountManager._nodeClient) + newTopicId = receipt.topicId + mirrorNodeCmd.logger.debug(`Newly created topic ID is: ${newTopicId}`) + + const submitResponse = await new TopicMessageSubmitTransaction({ + topicId: newTopicId, + message: testMessage + }).execute(accountManager._nodeClient) + + const submitReceipt = await submitResponse.getReceipt(accountManager._nodeClient) + expect(submitReceipt.status).to.deep.equal(Status.Success) + } catch (e) { + mirrorNodeCmd.logger.showUserError(e) + expect.fail() + } + }).timeout(MINUTES) + + // trigger some extra transactions to trigger MirrorNode to fetch the transactions + accountCreationShouldSucceed(accountManager, mirrorNodeCmd, namespace) + accountCreationShouldSucceed(accountManager, mirrorNodeCmd, namespace) + + it('Check submit message result should success', async () => { + try { + const queryURL = `http://localhost:8080/api/v1/topics/${newTopicId}/messages` + let received = false + let receivedMessage = '' + + // wait until the transaction reached consensus and retrievable from the mirror node API + while (!received) { + const req = http.request(queryURL, + { method: 'GET', timeout: 100, headers: { Connection: 'close' } }, + (res) => { + res.setEncoding('utf8') + res.on('data', (chunk) => { + // convert chunk to json object + const obj = JSON.parse(chunk) + if (obj.messages.length === 0) { + mirrorNodeCmd.logger.debug('No messages yet') + } else { + // convert message from base64 to utf-8 + const base64 = obj.messages[0].message + const buff = Buffer.from(base64, 'base64') + receivedMessage = buff.toString('utf-8') + mirrorNodeCmd.logger.debug(`Received message: ${receivedMessage}`) + received = true + } + }) + }) + req.on('error', (e) => { + mirrorNodeCmd.logger.debug(`problem with request: ${e.message}`) }) - req.on('error', (e) => { - mirrorNodeCmd.logger.debug(`problem with request: ${e.message}`) - }) - req.end() // make the request - await sleep(2 * SECONDS) + req.end() // make the request + await sleep(2 * SECONDS) + } + await sleep(SECONDS) + expect(receivedMessage).to.equal(testMessage) + await k8.stopPortForward(portForwarder) + } catch (e) { + mirrorNodeCmd.logger.showUserError(e) + expect.fail() + } + }).timeout(5 * MINUTES) + + it('mirror node destroy should success', async () => { + try { + await expect(mirrorNodeCmd.destroy(argv)).to.eventually.be.ok + } catch (e) { + mirrorNodeCmd.logger.showUserError(e) + expect.fail() } - await sleep(SECONDS) - expect(receivedMessage).to.equal(testMessage) - await k8.stopPortForward(portForwarder) - } catch (e) { - mirrorNodeCmd.logger.showUserError(e) - expect.fail() - } - }).timeout(5 * MINUTES) - - it('mirror node destroy should success', async () => { - try { - await expect(mirrorNodeCmd.destroy(argv)).to.eventually.be.ok - } catch (e) { - mirrorNodeCmd.logger.showUserError(e) - expect.fail() - } - }).timeout(MINUTES) - - it('should apply the mirror node version from the --mirror-node-version flag', async () => { - const mirrorNodeVersion = '0.111.1' - const customArgv = { [flags.mirrorNodeVersion.constName]: mirrorNodeVersion, ...argv } - - const valuesArg = await mirrorNodeCmd.prepareValuesArg(customArgv) - - expect(valuesArg).to.contain(`--set global.image.tag=${mirrorNodeVersion}`) - }).timeout(5 * SECONDS) - - it('should not apply the mirror node version from the --mirror-node-version flag if left empty', async () => { - const mirrorNodeVersion = '' - const customArgv = { [flags.mirrorNodeVersion.constName]: mirrorNodeVersion, ...argv } - - const valuesArg = await mirrorNodeCmd.prepareValuesArg(customArgv) - - expect(valuesArg).not.to.contain('--set global.image.tag=') - }).timeout(5 * SECONDS) + }).timeout(MINUTES) + + it('should apply the mirror node version from the --mirror-node-version flag', async () => { + const mirrorNodeVersion = '0.111.1' + const customArgv = { [flags.mirrorNodeVersion.constName]: mirrorNodeVersion, ...argv } + + const valuesArg = await mirrorNodeCmd.prepareValuesArg(customArgv) + + expect(valuesArg).to.contain(`--set global.image.tag=${mirrorNodeVersion}`) + }).timeout(5 * SECONDS) + + it('should not apply the mirror node version from the --mirror-node-version flag if left empty', async () => { + const mirrorNodeVersion = '' + const customArgv = { [flags.mirrorNodeVersion.constName]: mirrorNodeVersion, ...argv } + + const valuesArg = await mirrorNodeCmd.prepareValuesArg(customArgv) + + expect(valuesArg).not.to.contain('--set global.image.tag=') + }).timeout(5 * SECONDS) + }) }) diff --git a/test/e2e/commands/network.test.ts b/test/e2e/commands/network.test.ts index a052c11f2..50cafe7a8 100644 --- a/test/e2e/commands/network.test.ts +++ b/test/e2e/commands/network.test.ts @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. * - * @mocha-environment steps */ import { it, describe, after, before } from 'mocha' import { expect } from 'chai' diff --git a/test/e2e/commands/node_add.test.ts b/test/e2e/commands/node_add.test.ts index ca7a3a588..2950f3351 100644 --- a/test/e2e/commands/node_add.test.ts +++ b/test/e2e/commands/node_add.test.ts @@ -13,14 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. * - * @mocha-environment steps */ -import { describe } from 'mocha' - import { testNodeAdd } from '../../test_add.ts' import { MINUTES } from '../../../src/core/constants.ts' -describe('Node add with released hedera', () => { - const localBuildPath = '' - testNodeAdd(localBuildPath) -}).timeout(3 * MINUTES) +const localBuildPath = '' +testNodeAdd(localBuildPath, 'Node add with released hedera', 3 * MINUTES) diff --git a/test/e2e/commands/node_add_local.test.ts b/test/e2e/commands/node_add_local.test.ts index cddfbc8ed..96afcc79e 100644 --- a/test/e2e/commands/node_add_local.test.ts +++ b/test/e2e/commands/node_add_local.test.ts @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. * - * @mocha-environment steps */ import { describe } from 'mocha' diff --git a/test/e2e/commands/node_delete.test.ts b/test/e2e/commands/node_delete.test.ts index 4239f2991..616fec7d5 100644 --- a/test/e2e/commands/node_delete.test.ts +++ b/test/e2e/commands/node_delete.test.ts @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. * - * @mocha-environment steps */ import { it, describe, after } from 'mocha' import { expect } from 'chai' @@ -22,7 +21,7 @@ import { flags } from '../../../src/commands/index.ts' import { accountCreationShouldSucceed, balanceQueryShouldSucceed, - bootstrapNetwork, + e2eTestSuite, getDefaultArgv, HEDERA_PLATFORM_VERSION_TAG } from '../../test_util.ts' @@ -32,63 +31,61 @@ import { HEDERA_HAPI_PATH, MINUTES, ROOT_CONTAINER } from '../../../src/core/con import fs from 'fs' import type { PodName } from '../../../src/types/aliases.ts' -describe('Node delete', async () => { - const namespace = 'node-delete' - const nodeAlias = 'node1' - const argv = getDefaultArgv() - argv[flags.nodeAliasesUnparsed.name] = 'node1,node2,node3,node4' - argv[flags.nodeAlias.name] = nodeAlias - argv[flags.generateGossipKeys.name] = true - argv[flags.generateTlsKeys.name] = true - argv[flags.persistentVolumeClaims.name] = true - // set the env variable SOLO_CHARTS_DIR if developer wants to use local Solo charts - argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ?? undefined - argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG - argv[flags.namespace.name] = namespace - argv[flags.quiet.name] = true - const bootstrapResp = await bootstrapNetwork(namespace, argv) - const nodeCmd = bootstrapResp.cmd.nodeCmd - const accountCmd = bootstrapResp.cmd.accountCmd - const k8 = bootstrapResp.opts.k8 +const namespace = 'node-delete' +const nodeAlias = 'node1' +const argv = getDefaultArgv() +argv[flags.nodeAliasesUnparsed.name] = 'node1,node2,node3,node4' +argv[flags.nodeAlias.name] = nodeAlias +argv[flags.generateGossipKeys.name] = true +argv[flags.generateTlsKeys.name] = true +argv[flags.persistentVolumeClaims.name] = true +// set the env variable SOLO_CHARTS_DIR if developer wants to use local Solo charts +argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ?? undefined +argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG +argv[flags.namespace.name] = namespace +argv[flags.quiet.name] = true +e2eTestSuite(namespace, argv, undefined, undefined, undefined, undefined, undefined, undefined, true, (bootstrapResp) => { + describe('Node delete', async () => { + const nodeCmd = bootstrapResp.cmd.nodeCmd + const accountCmd = bootstrapResp.cmd.accountCmd + const k8 = bootstrapResp.opts.k8 - after(async function () { - this.timeout(10 * MINUTES) - await getNodeLogs(k8, namespace) - await k8.deleteNamespace(namespace) - }) + after(async function () { + this.timeout(10 * MINUTES) + await getNodeLogs(k8, namespace) + await k8.deleteNamespace(namespace) + }) - it('should succeed with init command', async () => { - const status = await accountCmd.init(argv) - expect(status).to.be.ok - }).timeout(8 * MINUTES) + it('should succeed with init command', async () => { + const status = await accountCmd.init(argv) + expect(status).to.be.ok + }).timeout(8 * MINUTES) - it('should delete a node from the network successfully', async () => { - await nodeCmd.delete(argv) - expect(nodeCmd.getUnusedConfigs(NodeCommand.DELETE_CONFIGS_NAME)).to.deep.equal([ - flags.app.constName, - flags.devMode.constName, - flags.endpointType.constName, - flags.quiet.constName - ]) + it('should delete a node from the network successfully', async () => { + await nodeCmd.delete(argv) + expect(nodeCmd.getUnusedConfigs(NodeCommand.DELETE_CONFIGS_NAME)).to.deep.equal([ + flags.app.constName, + flags.devMode.constName, + flags.endpointType.constName, + flags.quiet.constName + ]) - // @ts-ignore in order to access the private member - await nodeCmd.accountManager.close() - }).timeout(10 * MINUTES) + await bootstrapResp.opts.accountManager.close() + }).timeout(10 * MINUTES) - // @ts-ignore in order to access the private member - balanceQueryShouldSucceed(nodeCmd.accountManager, nodeCmd, namespace) + balanceQueryShouldSucceed(bootstrapResp.opts.accountManager, nodeCmd, namespace) - // @ts-ignore in order to access the private member - accountCreationShouldSucceed(nodeCmd.accountManager, nodeCmd, namespace) + accountCreationShouldSucceed(bootstrapResp.opts.accountManager, nodeCmd, namespace) - it('config.txt should no longer contain removed node alias name', async () => { - // read config.txt file from first node, read config.txt line by line, it should not contain value of nodeAlias - const pods = await k8.getPodsByLabel(['solo.hedera.com/type=network-node']) - const podName = pods[0].metadata.name as PodName - const tmpDir = getTmpDir() - await k8.copyFrom(podName, ROOT_CONTAINER, `${HEDERA_HAPI_PATH}/config.txt`, tmpDir) - const configTxt = fs.readFileSync(`${tmpDir}/config.txt`, 'utf8') - console.log('config.txt:', configTxt) - expect(configTxt).not.to.contain(nodeAlias) - }).timeout(10 * MINUTES) + it('config.txt should no longer contain removed node alias name', async () => { + // read config.txt file from first node, read config.txt line by line, it should not contain value of nodeAlias + const pods = await k8.getPodsByLabel(['solo.hedera.com/type=network-node']) + const podName = pods[0].metadata.name as PodName + const tmpDir = getTmpDir() + await k8.copyFrom(podName, ROOT_CONTAINER, `${HEDERA_HAPI_PATH}/config.txt`, tmpDir) + const configTxt = fs.readFileSync(`${tmpDir}/config.txt`, 'utf8') + console.log('config.txt:', configTxt) + expect(configTxt).not.to.contain(nodeAlias) + }).timeout(10 * MINUTES) + }) }) diff --git a/test/e2e/commands/node_local_hedera.test.ts b/test/e2e/commands/node_local_hedera.test.ts index ec1bf8878..8fe9f3176 100644 --- a/test/e2e/commands/node_local_hedera.test.ts +++ b/test/e2e/commands/node_local_hedera.test.ts @@ -14,11 +14,11 @@ * limitations under the License. * */ -import { describe, after } from 'mocha' +import { describe } from 'mocha' import { flags } from '../../../src/commands/index.ts' import { - bootstrapNetwork, + e2eTestSuite, getDefaultArgv, TEST_CLUSTER } from '../../test_util.ts' @@ -26,29 +26,28 @@ import { getNodeLogs } from '../../../src/core/helpers.ts' import { MINUTES } from '../../../src/core/constants.ts' import type { K8 } from '../../../src/core/index.ts' -describe('Node local build', () => { - const LOCAL_HEDERA = 'local-hedera-app' - const argv = getDefaultArgv() - argv[flags.nodeAliasesUnparsed.name] = 'node1,node2,node3' - argv[flags.generateGossipKeys.name] = true - argv[flags.generateTlsKeys.name] = true - argv[flags.clusterName.name] = TEST_CLUSTER - // set the env variable SOLO_CHARTS_DIR if developer wants to use local Solo charts - argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ?? undefined - argv[flags.quiet.name] = true +const LOCAL_HEDERA = 'local-hedera-app' +const argv = getDefaultArgv() +argv[flags.nodeAliasesUnparsed.name] = 'node1,node2,node3' +argv[flags.generateGossipKeys.name] = true +argv[flags.generateTlsKeys.name] = true +argv[flags.clusterName.name] = TEST_CLUSTER +// set the env variable SOLO_CHARTS_DIR if developer wants to use local Solo charts +argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ?? undefined +argv[flags.quiet.name] = true - let hederaK8: K8 - after(async function () { - this.timeout(10 * MINUTES) - await getNodeLogs(hederaK8, LOCAL_HEDERA) - await hederaK8.deleteNamespace(LOCAL_HEDERA) - }) +let hederaK8: K8 +console.log('Starting local build for Hedera app') +argv[flags.localBuildPath.name] = 'node1=../hedera-services/hedera-node/data/,../hedera-services/hedera-node/data,node3=../hedera-services/hedera-node/data' +argv[flags.namespace.name] = LOCAL_HEDERA - describe('Node for hedera app should start successfully', async () => { - console.log('Starting local build for Hedera app') - argv[flags.localBuildPath.name] = 'node1=../hedera-services/hedera-node/data/,../hedera-services/hedera-node/data,node3=../hedera-services/hedera-node/data' - argv[flags.namespace.name] = LOCAL_HEDERA - const bootstrapResp = await bootstrapNetwork(LOCAL_HEDERA, argv) +e2eTestSuite(LOCAL_HEDERA, argv, undefined, undefined, undefined, undefined, undefined, undefined, true, (bootstrapResp) => { + describe('Node for hedera app should have started successfully', () => { hederaK8 = bootstrapResp.opts.k8 + + it('get the logs and delete the namespace', async function () { + await getNodeLogs(hederaK8, LOCAL_HEDERA) + await hederaK8.deleteNamespace(LOCAL_HEDERA) + }).timeout(10 * MINUTES) }) }) diff --git a/test/e2e/commands/node_local_ptt.test.ts b/test/e2e/commands/node_local_ptt.test.ts index fc9af7fdc..f5a15c574 100644 --- a/test/e2e/commands/node_local_ptt.test.ts +++ b/test/e2e/commands/node_local_ptt.test.ts @@ -13,42 +13,36 @@ * See the License for the specific language governing permissions and * limitations under the License. * - * @mocha-environment steps */ -import { describe, after } from 'mocha' +import { describe, } from 'mocha' import { flags } from '../../../src/commands/index.ts' -import { bootstrapNetwork, getDefaultArgv, TEST_CLUSTER } from '../../test_util.ts' +import { e2eTestSuite, getDefaultArgv, TEST_CLUSTER } from '../../test_util.ts' import { getNodeLogs } from '../../../src/core/helpers.ts' import { MINUTES } from '../../../src/core/constants.ts' -import type { K8 } from '../../../src/core/index.ts' -describe('Node local build', () => { - const LOCAL_PTT = 'local-ptt-app' - const argv = getDefaultArgv() - argv[flags.nodeAliasesUnparsed.name] = 'node1,node2,node3' - argv[flags.generateGossipKeys.name] = true - argv[flags.generateTlsKeys.name] = true - argv[flags.clusterName.name] = TEST_CLUSTER - // set the env variable SOLO_CHARTS_DIR if developer wants to use local Solo charts - argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ?? undefined - argv[flags.quiet.name] = true +const LOCAL_PTT = 'local-ptt-app' +const argv = getDefaultArgv() +argv[flags.nodeAliasesUnparsed.name] = 'node1,node2,node3' +argv[flags.generateGossipKeys.name] = true +argv[flags.generateTlsKeys.name] = true +argv[flags.clusterName.name] = TEST_CLUSTER +// set the env variable SOLO_CHARTS_DIR if developer wants to use local Solo charts +argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ?? undefined +argv[flags.quiet.name] = true +console.log('Starting local build for Platform app') +argv[flags.localBuildPath.name] = '../hedera-services/platform-sdk/sdk/data,node1=../hedera-services/platform-sdk/sdk/data,node2=../hedera-services/platform-sdk/sdk/data' +argv[flags.app.name] = 'PlatformTestingTool.jar' +argv[flags.appConfig.name] = '../hedera-services/platform-sdk/platform-apps/tests/PlatformTestingTool/src/main/resources/FCMFCQ-Basic-2.5k-5m.json' +argv[flags.namespace.name] = LOCAL_PTT - let pttK8: K8 - after(async function () { - this.timeout(2 * MINUTES) +e2eTestSuite(LOCAL_PTT, argv, undefined, undefined, undefined, undefined, undefined, undefined, true, (bootstrapResp) => { + describe('Node for platform app should start successfully', () => { + const pttK8 = bootstrapResp.opts.k8 - await getNodeLogs(pttK8, LOCAL_PTT) - await pttK8.deleteNamespace(LOCAL_PTT) - }) - - describe('Node for platform app should start successfully', async () => { - console.log('Starting local build for Platform app') - argv[flags.localBuildPath.name] = '../hedera-services/platform-sdk/sdk/data,node1=../hedera-services/platform-sdk/sdk/data,node2=../hedera-services/platform-sdk/sdk/data' - argv[flags.app.name] = 'PlatformTestingTool.jar' - argv[flags.appConfig.name] = '../hedera-services/platform-sdk/platform-apps/tests/PlatformTestingTool/src/main/resources/FCMFCQ-Basic-2.5k-5m.json' - argv[flags.namespace.name] = LOCAL_PTT - const bootstrapResp = await bootstrapNetwork(LOCAL_PTT, argv) - pttK8 = bootstrapResp.opts.k8 + it('get the logs and delete the namespace',async function () { + await getNodeLogs(pttK8, LOCAL_PTT) + await pttK8.deleteNamespace(LOCAL_PTT) + }).timeout(2 * MINUTES) }) }) diff --git a/test/e2e/commands/node_pem_kill.test.ts b/test/e2e/commands/node_pem_kill.test.ts index 9bb72d35e..2a1374f2f 100644 --- a/test/e2e/commands/node_pem_kill.test.ts +++ b/test/e2e/commands/node_pem_kill.test.ts @@ -13,12 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. * - * @mocha-environment steps */ -import { describe } from 'mocha' - import { e2eNodeKeyRefreshTest } from '../e2e_node_util.ts' -describe('NodeCommand', () => { - e2eNodeKeyRefreshTest('node-cmd-e2e-pem-kill', 'kill') -}) +e2eNodeKeyRefreshTest('node-cmd-e2e-pem-kill', 'kill') diff --git a/test/e2e/commands/node_pem_stop.test.ts b/test/e2e/commands/node_pem_stop.test.ts index 98656ddc2..b09d9e08c 100644 --- a/test/e2e/commands/node_pem_stop.test.ts +++ b/test/e2e/commands/node_pem_stop.test.ts @@ -13,12 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. * - * @mocha-environment steps */ -import { describe } from 'mocha' - import { e2eNodeKeyRefreshTest } from '../e2e_node_util.ts' -describe('NodeCommand', () => { - e2eNodeKeyRefreshTest('node-cmd-e2e-pem-stop', 'stop') -}) +e2eNodeKeyRefreshTest('node-cmd-e2e-pem-stop', 'stop') diff --git a/test/e2e/commands/node_update.test.ts b/test/e2e/commands/node_update.test.ts index 8498f722e..1da33f175 100644 --- a/test/e2e/commands/node_update.test.ts +++ b/test/e2e/commands/node_update.test.ts @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. * - * @mocha-environment steps */ import { it, describe, after } from 'mocha' import { expect } from 'chai' @@ -23,7 +22,7 @@ import { constants } from '../../../src/core/index.ts' import { accountCreationShouldSucceed, balanceQueryShouldSucceed, - bootstrapNetwork, + e2eTestSuite, getDefaultArgv, getNodeAliasesPrivateKeysHash, getTmpDir, HEDERA_PLATFORM_VERSION_TAG } from '../../test_util.ts' @@ -33,116 +32,110 @@ import { HEDERA_HAPI_PATH, MINUTES, ROOT_CONTAINER } from '../../../src/core/con import fs from 'fs' import type { PodName } from '../../../src/types/aliases.ts' -describe('Node update', async () => { - const defaultTimeout = 2 * MINUTES - const namespace = 'node-update' - const updateNodeId = 'node2' - const newAccountId = '0.0.7' - const argv = getDefaultArgv() - argv[flags.nodeAliasesUnparsed.name] = 'node1,node2,node3' - argv[flags.nodeAlias.name] = updateNodeId - - argv[flags.newAccountNumber.name] = newAccountId - argv[flags.newAdminKey.name] = '302e020100300506032b6570042204200cde8d512569610f184b8b399e91e46899805c6171f7c2b8666d2a417bcc66c2' - - argv[flags.generateGossipKeys.name] = true - argv[flags.generateTlsKeys.name] = true - // set the env variable SOLO_CHARTS_DIR if developer wants to use local Solo charts - argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ?? undefined - argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG - argv[flags.namespace.name] = namespace - argv[flags.persistentVolumeClaims.name] = true - argv[flags.quiet.name] = true - const bootstrapResp = await bootstrapNetwork(namespace, argv) - const nodeCmd = bootstrapResp.cmd.nodeCmd - const accountCmd = bootstrapResp.cmd.accountCmd - const k8 = bootstrapResp.opts.k8 - let existingServiceMap - let existingNodeIdsPrivateKeysHash - - after(async function () { - this.timeout(10 * MINUTES) - - await getNodeLogs(k8, namespace) - await nodeCmd.stop(argv) - await k8.deleteNamespace(namespace) - }) - - it('cache current version of private keys', async () => { - // @ts-ignore - existingServiceMap = await nodeCmd.accountManager.getNodeServiceMap(namespace) - existingNodeIdsPrivateKeysHash = await getNodeAliasesPrivateKeysHash(existingServiceMap, namespace, k8, getTmpDir()) - }).timeout(defaultTimeout) - - it('should succeed with init command', async () => { - const status = await accountCmd.init(argv) - expect(status).to.be.ok - }).timeout(8 * MINUTES) - - it('should update a new node property successfully', async () => { - // generate gossip and tls keys for the updated node - const tmpDir = getTmpDir() - - // @ts-ignore - const signingKey = await nodeCmd.keyManager.generateSigningKey(updateNodeId) - // @ts-ignore - const signingKeyFiles = await nodeCmd.keyManager.storeSigningKey(updateNodeId, signingKey, tmpDir) - nodeCmd.logger.debug(`generated test gossip signing keys for node ${updateNodeId} : ${signingKeyFiles.certificateFile}`) - argv[flags.gossipPublicKey.name] = signingKeyFiles.certificateFile - argv[flags.gossipPrivateKey.name] = signingKeyFiles.privateKeyFile - - // @ts-ignore - const tlsKey = await nodeCmd.keyManager.generateGrpcTLSKey(updateNodeId) - // @ts-ignore - const tlsKeyFiles = await nodeCmd.keyManager.storeTLSKey(updateNodeId, tlsKey, tmpDir) - nodeCmd.logger.debug(`generated test TLS keys for node ${updateNodeId} : ${tlsKeyFiles.certificateFile}`) - argv[flags.tlsPublicKey.name] = tlsKeyFiles.certificateFile - argv[flags.tlsPrivateKey.name] = tlsKeyFiles.privateKeyFile - - await nodeCmd.update(argv) - expect(nodeCmd.getUnusedConfigs(NodeCommand.UPDATE_CONFIGS_NAME)).to.deep.equal([ - flags.app.constName, - flags.devMode.constName, - flags.quiet.constName - ]) - // @ts-ignore - await nodeCmd.accountManager.close() - }).timeout(30 * MINUTES) - - // @ts-ignore - balanceQueryShouldSucceed(nodeCmd.accountManager, nodeCmd, namespace) - - // @ts-ignore - accountCreationShouldSucceed(nodeCmd.accountManager, nodeCmd, namespace) - - it('signing key and tls key should not match previous one', async () => { - const currentNodeIdsPrivateKeysHash = await getNodeAliasesPrivateKeysHash(existingServiceMap, namespace, k8, getTmpDir()) - - for (const [nodeAlias, existingKeyHashMap] of existingNodeIdsPrivateKeysHash.entries()) { - const currentNodeKeyHashMap = currentNodeIdsPrivateKeysHash.get(nodeAlias) - - for (const [keyFileName, existingKeyHash] of existingKeyHashMap.entries()) { - if (nodeAlias === updateNodeId && - (keyFileName.startsWith(constants.SIGNING_KEY_PREFIX) || keyFileName.startsWith('hedera'))) { - expect(`${nodeAlias}:${keyFileName}:${currentNodeKeyHashMap.get(keyFileName)}`).not.to.equal( - `${nodeAlias}:${keyFileName}:${existingKeyHash}`) - } else { - expect(`${nodeAlias}:${keyFileName}:${currentNodeKeyHashMap.get(keyFileName)}`).to.equal( - `${nodeAlias}:${keyFileName}:${existingKeyHash}`) +const defaultTimeout = 2 * MINUTES +const namespace = 'node-update' +const updateNodeId = 'node2' +const newAccountId = '0.0.7' +const argv = getDefaultArgv() +argv[flags.nodeAliasesUnparsed.name] = 'node1,node2,node3' +argv[flags.nodeAlias.name] = updateNodeId + +argv[flags.newAccountNumber.name] = newAccountId +argv[flags.newAdminKey.name] = '302e020100300506032b6570042204200cde8d512569610f184b8b399e91e46899805c6171f7c2b8666d2a417bcc66c2' + +argv[flags.generateGossipKeys.name] = true +argv[flags.generateTlsKeys.name] = true +// set the env variable SOLO_CHARTS_DIR if developer wants to use local Solo charts +argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ?? undefined +argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG +argv[flags.namespace.name] = namespace +argv[flags.persistentVolumeClaims.name] = true +argv[flags.quiet.name] = true + +e2eTestSuite(namespace, argv, undefined, undefined, undefined, undefined, undefined, undefined, true, (bootstrapResp) => { + describe('Node update', async () => { + const nodeCmd = bootstrapResp.cmd.nodeCmd + const accountCmd = bootstrapResp.cmd.accountCmd + const k8 = bootstrapResp.opts.k8 + let existingServiceMap + let existingNodeIdsPrivateKeysHash + + after(async function () { + this.timeout(10 * MINUTES) + + await getNodeLogs(k8, namespace) + await nodeCmd.stop(argv) + await k8.deleteNamespace(namespace) + }) + + it('cache current version of private keys', async () => { + existingServiceMap = await bootstrapResp.opts.accountManager.getNodeServiceMap(namespace) + existingNodeIdsPrivateKeysHash = await getNodeAliasesPrivateKeysHash(existingServiceMap, namespace, k8, getTmpDir()) + }).timeout(defaultTimeout) + + it('should succeed with init command', async () => { + const status = await accountCmd.init(argv) + expect(status).to.be.ok + }).timeout(8 * MINUTES) + + it('should update a new node property successfully', async () => { + // generate gossip and tls keys for the updated node + const tmpDir = getTmpDir() + + const signingKey = await bootstrapResp.opts.keyManager.generateSigningKey(updateNodeId) + const signingKeyFiles = await bootstrapResp.opts.keyManager.storeSigningKey(updateNodeId, signingKey, tmpDir) + nodeCmd.logger.debug(`generated test gossip signing keys for node ${updateNodeId} : ${signingKeyFiles.certificateFile}`) + argv[flags.gossipPublicKey.name] = signingKeyFiles.certificateFile + argv[flags.gossipPrivateKey.name] = signingKeyFiles.privateKeyFile + + const tlsKey = await bootstrapResp.opts.keyManager.generateGrpcTLSKey(updateNodeId) + const tlsKeyFiles = await bootstrapResp.opts.keyManager.storeTLSKey(updateNodeId, tlsKey, tmpDir) + nodeCmd.logger.debug(`generated test TLS keys for node ${updateNodeId} : ${tlsKeyFiles.certificateFile}`) + argv[flags.tlsPublicKey.name] = tlsKeyFiles.certificateFile + argv[flags.tlsPrivateKey.name] = tlsKeyFiles.privateKeyFile + + await nodeCmd.update(argv) + expect(nodeCmd.getUnusedConfigs(NodeCommand.UPDATE_CONFIGS_NAME)).to.deep.equal([ + flags.app.constName, + flags.devMode.constName, + flags.quiet.constName + ]) + await bootstrapResp.opts.accountManager.close() + }).timeout(30 * MINUTES) + + balanceQueryShouldSucceed(bootstrapResp.opts.accountManager, nodeCmd, namespace) + + accountCreationShouldSucceed(bootstrapResp.opts.accountManager, nodeCmd, namespace) + + it('signing key and tls key should not match previous one', async () => { + const currentNodeIdsPrivateKeysHash = await getNodeAliasesPrivateKeysHash(existingServiceMap, namespace, k8, getTmpDir()) + + for (const [nodeAlias, existingKeyHashMap] of existingNodeIdsPrivateKeysHash.entries()) { + const currentNodeKeyHashMap = currentNodeIdsPrivateKeysHash.get(nodeAlias) + + for (const [keyFileName, existingKeyHash] of existingKeyHashMap.entries()) { + if (nodeAlias === updateNodeId && + (keyFileName.startsWith(constants.SIGNING_KEY_PREFIX) || keyFileName.startsWith('hedera'))) { + expect(`${nodeAlias}:${keyFileName}:${currentNodeKeyHashMap.get(keyFileName)}`).not.to.equal( + `${nodeAlias}:${keyFileName}:${existingKeyHash}`) + } else { + expect(`${nodeAlias}:${keyFileName}:${currentNodeKeyHashMap.get(keyFileName)}`).to.equal( + `${nodeAlias}:${keyFileName}:${existingKeyHash}`) + } } } - } - }).timeout(defaultTimeout) - - it('config.txt should be changed with new account id', async () => { - // read config.txt file from first node, read config.txt line by line, it should not contain value of newAccountId - const pods = await k8.getPodsByLabel(['solo.hedera.com/type=network-node']) - const podName = pods[0].metadata.name as PodName - const tmpDir = getTmpDir() - await k8.copyFrom(podName, ROOT_CONTAINER, `${HEDERA_HAPI_PATH}/config.txt`, tmpDir) - const configTxt = fs.readFileSync(`${tmpDir}/config.txt`, 'utf8') - console.log('config.txt:', configTxt) - - expect(configTxt).to.contain(newAccountId) - }).timeout(10 * MINUTES) + }).timeout(defaultTimeout) + + it('config.txt should be changed with new account id', async () => { + // read config.txt file from first node, read config.txt line by line, it should not contain value of newAccountId + const pods = await k8.getPodsByLabel(['solo.hedera.com/type=network-node']) + const podName = pods[0].metadata.name as PodName + const tmpDir = getTmpDir() + await k8.copyFrom(podName, ROOT_CONTAINER, `${HEDERA_HAPI_PATH}/config.txt`, tmpDir) + const configTxt = fs.readFileSync(`${tmpDir}/config.txt`, 'utf8') + console.log('config.txt:', configTxt) + + expect(configTxt).to.contain(newAccountId) + }).timeout(10 * MINUTES) + }) }) diff --git a/test/e2e/commands/node_upgrade.test.ts b/test/e2e/commands/node_upgrade.test.ts index d41f4a73a..8c3546602 100644 --- a/test/e2e/commands/node_upgrade.test.ts +++ b/test/e2e/commands/node_upgrade.test.ts @@ -13,74 +13,76 @@ * See the License for the specific language governing permissions and * limitations under the License. * - * @mocha-environment steps */ import { it, describe, after } from 'mocha' import { expect } from 'chai' import { flags } from '../../../src/commands/index.ts' import { - bootstrapNetwork, + e2eTestSuite, getDefaultArgv, HEDERA_PLATFORM_VERSION_TAG } from '../../test_util.ts' import { getNodeLogs } from '../../../src/core/helpers.ts' -import { PREPARE_UPGRADE_CONFIGS_NAME, DOWNLOAD_GENERATED_FILES_CONFIGS_NAME } from '../../../src/commands/node/configs.ts' +import { + PREPARE_UPGRADE_CONFIGS_NAME, + DOWNLOAD_GENERATED_FILES_CONFIGS_NAME +} from '../../../src/commands/node/configs.ts' import { MINUTES } from '../../../src/core/constants.ts' -describe('Node upgrade', async () => { - const namespace = 'node-upgrade' - const argv = getDefaultArgv() - argv[flags.nodeAliasesUnparsed.name] = 'node1,node2,node3' - argv[flags.generateGossipKeys.name] = true - argv[flags.generateTlsKeys.name] = true - argv[flags.persistentVolumeClaims.name] = true - // set the env variable SOLO_CHARTS_DIR if developer wants to use local solo charts - argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ? process.env.SOLO_CHARTS_DIR : undefined - argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG - argv[flags.namespace.name] = namespace +const namespace = 'node-upgrade' +const argv = getDefaultArgv() +argv[flags.nodeAliasesUnparsed.name] = 'node1,node2,node3' +argv[flags.generateGossipKeys.name] = true +argv[flags.generateTlsKeys.name] = true +argv[flags.persistentVolumeClaims.name] = true +// set the env variable SOLO_CHARTS_DIR if developer wants to use local solo charts +argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ? process.env.SOLO_CHARTS_DIR : undefined +argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG +argv[flags.namespace.name] = namespace - const upgradeArgv = getDefaultArgv() +const upgradeArgv = getDefaultArgv() - const bootstrapResp = await bootstrapNetwork(namespace, argv) - const nodeCmd = bootstrapResp.cmd.nodeCmd - const accountCmd = bootstrapResp.cmd.accountCmd - const k8 = bootstrapResp.opts.k8 +e2eTestSuite(namespace, argv, undefined, undefined, undefined, undefined, undefined, undefined, true, (bootstrapResp) => { + describe('Node upgrade', async () => { + const nodeCmd = bootstrapResp.cmd.nodeCmd + const accountCmd = bootstrapResp.cmd.accountCmd + const k8 = bootstrapResp.opts.k8 - after(async function () { - this.timeout(10 * MINUTES) + after(async function () { + this.timeout(10 * MINUTES) - await getNodeLogs(k8, namespace) - await k8.deleteNamespace(namespace) - }) + await getNodeLogs(k8, namespace) + await k8.deleteNamespace(namespace) + }) - it('should succeed with init command', async () => { - const status = await accountCmd.init(argv) - expect(status).to.be.ok - }).timeout(8 * MINUTES) + it('should succeed with init command', async () => { + const status = await accountCmd.init(argv) + expect(status).to.be.ok + }).timeout(8 * MINUTES) - it('should prepare network upgrade successfully', async () => { - await nodeCmd.prepareUpgrade(upgradeArgv) - expect(nodeCmd.getUnusedConfigs(PREPARE_UPGRADE_CONFIGS_NAME)).to.deep.equal([ - flags.devMode.constName - ]) - }).timeout(5 * MINUTES) + it('should prepare network upgrade successfully', async () => { + await nodeCmd.prepareUpgrade(upgradeArgv) + expect(nodeCmd.getUnusedConfigs(PREPARE_UPGRADE_CONFIGS_NAME)).to.deep.equal([ + flags.devMode.constName + ]) + }).timeout(5 * MINUTES) - it('should download generated files successfully', async () => { - await nodeCmd.downloadGeneratedFiles(upgradeArgv) - expect(nodeCmd.getUnusedConfigs(DOWNLOAD_GENERATED_FILES_CONFIGS_NAME)).to.deep.equal([ - flags.devMode.constName, - 'allNodeAliases' - ]) - }).timeout(5 * MINUTES) + it('should download generated files successfully', async () => { + await nodeCmd.downloadGeneratedFiles(upgradeArgv) + expect(nodeCmd.getUnusedConfigs(DOWNLOAD_GENERATED_FILES_CONFIGS_NAME)).to.deep.equal([ + flags.devMode.constName, + 'allNodeAliases' + ]) + }).timeout(5 * MINUTES) - it('should upgrade all nodes on the network successfully', async () => { - await nodeCmd.freezeUpgrade(upgradeArgv) - expect(nodeCmd.getUnusedConfigs(PREPARE_UPGRADE_CONFIGS_NAME)).to.deep.equal([ - flags.devMode.constName - ]) + it('should upgrade all nodes on the network successfully', async () => { + await nodeCmd.freezeUpgrade(upgradeArgv) + expect(nodeCmd.getUnusedConfigs(PREPARE_UPGRADE_CONFIGS_NAME)).to.deep.equal([ + flags.devMode.constName + ]) - // @ts-ignore in order to access the private member - await nodeCmd.accountManager.close() - }).timeout(5 * MINUTES) + await bootstrapResp.opts.accountManager.close() + }).timeout(5 * MINUTES) + }) }) diff --git a/test/e2e/commands/relay.test.ts b/test/e2e/commands/relay.test.ts index 78a6773b1..e73536a58 100644 --- a/test/e2e/commands/relay.test.ts +++ b/test/e2e/commands/relay.test.ts @@ -19,40 +19,40 @@ import { expect } from 'chai' import each from 'mocha-each' import { flags } from '../../../src/commands/index.ts' -import { bootstrapNetwork, getDefaultArgv, HEDERA_PLATFORM_VERSION_TAG, TEST_CLUSTER } from '../../test_util.ts' +import { e2eTestSuite, getDefaultArgv, HEDERA_PLATFORM_VERSION_TAG, TEST_CLUSTER } from '../../test_util.ts' import * as version from '../../../version.ts' import { getNodeLogs, sleep } from '../../../src/core/helpers.ts' import { RelayCommand } from '../../../src/commands/relay.ts' import { MINUTES } from '../../../src/core/constants.ts' -describe('RelayCommand', async () => { - const testName = 'relay-cmd-e2e' - const namespace = testName - const argv = getDefaultArgv() - argv[flags.namespace.name] = namespace - argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG - argv[flags.nodeAliasesUnparsed.name] = 'node1,node2' - argv[flags.generateGossipKeys.name] = true - argv[flags.generateTlsKeys.name] = true - argv[flags.clusterName.name] = TEST_CLUSTER - argv[flags.soloChartVersion.name] = version.SOLO_CHART_VERSION - argv[flags.force.name] = true - argv[flags.relayReleaseTag.name] = flags.relayReleaseTag.definition.defaultValue - argv[flags.quiet.name] = true +const testName = 'relay-cmd-e2e' +const namespace = testName +const argv = getDefaultArgv() +argv[flags.namespace.name] = namespace +argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG +argv[flags.nodeAliasesUnparsed.name] = 'node1,node2' +argv[flags.generateGossipKeys.name] = true +argv[flags.generateTlsKeys.name] = true +argv[flags.clusterName.name] = TEST_CLUSTER +argv[flags.soloChartVersion.name] = version.SOLO_CHART_VERSION +argv[flags.force.name] = true +argv[flags.relayReleaseTag.name] = flags.relayReleaseTag.definition.defaultValue +argv[flags.quiet.name] = true - const bootstrapResp = await bootstrapNetwork(testName, argv) - const k8 = bootstrapResp.opts.k8 - const configManager = bootstrapResp.opts.configManager - const relayCmd = new RelayCommand(bootstrapResp.opts) +e2eTestSuite(testName, argv, undefined, undefined, undefined, undefined, undefined, undefined, true, (bootstrapResp) => { + describe('RelayCommand', async () => { + const k8 = bootstrapResp.opts.k8 + const configManager = bootstrapResp.opts.configManager + const relayCmd = new RelayCommand(bootstrapResp.opts) - after(async () => { - await getNodeLogs(k8, namespace) - await k8.deleteNamespace(namespace) - }) + after(async () => { + await getNodeLogs(k8, namespace) + await k8.deleteNamespace(namespace) + }) - afterEach(async () => await sleep(5)) + afterEach(async () => await sleep(5)) - each(['node1', 'node1,node2']) + each(['node1', 'node1,node2']) .it('relay deploy and destroy should work with $value', async function (relayNodes) { this.timeout(5 * MINUTES) @@ -81,4 +81,5 @@ describe('RelayCommand', async () => { expect.fail() } }) + }) }) diff --git a/test/e2e/commands/separate_node_add.test.ts b/test/e2e/commands/separate_node_add.test.ts index fb29da8fb..c5d0fb161 100644 --- a/test/e2e/commands/separate_node_add.test.ts +++ b/test/e2e/commands/separate_node_add.test.ts @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. * - * @mocha-environment steps */ import { it, describe, after } from 'mocha' import { expect } from 'chai' @@ -22,7 +21,7 @@ import { flags } from '../../../src/commands/index.ts' import { accountCreationShouldSucceed, balanceQueryShouldSucceed, - bootstrapNetwork, + e2eTestSuite, getDefaultArgv, getNodeAliasesPrivateKeysHash, getTmpDir, HEDERA_PLATFORM_VERSION_TAG @@ -31,96 +30,97 @@ import { getNodeLogs } from '../../../src/core/helpers.ts' import { NodeCommand } from '../../../src/commands/node.ts' import { MINUTES } from '../../../src/core/constants.ts' -describe('Node add via separated commands should success', async () => { - const defaultTimeout = 2 * MINUTES - const namespace = 'node-add-separated' - const argv = getDefaultArgv() - argv[flags.nodeAliasesUnparsed.name] = 'node1,node2,node3' - argv[flags.generateGossipKeys.name] = true - argv[flags.generateTlsKeys.name] = true - // set the env variable SOLO_CHARTS_DIR if developer wants to use local Solo charts - argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ?? undefined - argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG - argv[flags.namespace.name] = namespace - argv[flags.force.name] = true - argv[flags.persistentVolumeClaims.name] = true - argv[flags.quiet.name] = true - - const argvPrepare = Object.assign({}, argv) - - const tempDir = 'contextDir' - argvPrepare[flags.outputDir.name] = tempDir - - const argvExecute = getDefaultArgv() - argvExecute[flags.inputDir.name] = tempDir - - const bootstrapResp = await bootstrapNetwork(namespace, argv) - const nodeCmd = bootstrapResp.cmd.nodeCmd - const accountCmd = bootstrapResp.cmd.accountCmd - const networkCmd = bootstrapResp.cmd.networkCmd - const k8 = bootstrapResp.opts.k8 - let existingServiceMap - let existingNodeIdsPrivateKeysHash - - after(async function () { - this.timeout(10 * MINUTES) - - await getNodeLogs(k8, namespace) - // @ts-ignore - await nodeCmd.accountManager.close() - await nodeCmd.stop(argv) - await networkCmd.destroy(argv) - await k8.deleteNamespace(namespace) - }) +const defaultTimeout = 2 * MINUTES +const namespace = 'node-add-separated' +const argv = getDefaultArgv() +argv[flags.nodeAliasesUnparsed.name] = 'node1,node2,node3' +argv[flags.generateGossipKeys.name] = true +argv[flags.generateTlsKeys.name] = true +// set the env variable SOLO_CHARTS_DIR if developer wants to use local Solo charts +argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ?? undefined +argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG +argv[flags.namespace.name] = namespace +argv[flags.force.name] = true +argv[flags.persistentVolumeClaims.name] = true +argv[flags.quiet.name] = true + +const argvPrepare = Object.assign({}, argv) + +const tempDir = 'contextDir' +argvPrepare[flags.outputDir.name] = tempDir + +const argvExecute = getDefaultArgv() +argvExecute[flags.inputDir.name] = tempDir + +e2eTestSuite(namespace, argv, undefined, undefined, undefined, undefined, undefined, undefined, true, (bootstrapResp) => { + describe('Node add via separated commands should success', async () => { + const nodeCmd = bootstrapResp.cmd.nodeCmd + const accountCmd = bootstrapResp.cmd.accountCmd + const networkCmd = bootstrapResp.cmd.networkCmd + const k8 = bootstrapResp.opts.k8 + let existingServiceMap + let existingNodeIdsPrivateKeysHash + + after(async function () { + this.timeout(10 * MINUTES) + + await getNodeLogs(k8, namespace) + // @ts-ignore + await nodeCmd.accountManager.close() + await nodeCmd.stop(argv) + await networkCmd.destroy(argv) + await k8.deleteNamespace(namespace) + }) + + it('cache current version of private keys', async () => { + // @ts-ignore + existingServiceMap = await nodeCmd.accountManager.getNodeServiceMap(namespace) + existingNodeIdsPrivateKeysHash = await getNodeAliasesPrivateKeysHash(existingServiceMap, namespace, k8, getTmpDir()) + }).timeout(defaultTimeout) + + it('should succeed with init command', async () => { + const status = await accountCmd.init(argv) + expect(status).to.be.ok + }).timeout(8 * MINUTES) + + it('should add a new node to the network via the segregated commands successfully', async () => { + await nodeCmd.addPrepare(argvPrepare) + await nodeCmd.addSubmitTransactions(argvExecute) + await nodeCmd.addExecute(argvExecute) + expect(nodeCmd.getUnusedConfigs(NodeCommand.ADD_CONFIGS_NAME)).to.deep.equal([ + flags.app.constName, + flags.chainId.constName, + flags.devMode.constName, + flags.generateGossipKeys.constName, + flags.generateTlsKeys.constName, + flags.gossipEndpoints.constName, + flags.grpcEndpoints.constName, + flags.quiet.constName, + flags.adminKey.constName, + 'curDate', + 'freezeAdminPrivateKey' + ]) + await bootstrapResp.opts.accountManager.close() + }).timeout(12 * MINUTES) - it('cache current version of private keys', async () => { - // @ts-ignore - existingServiceMap = await nodeCmd.accountManager.getNodeServiceMap(namespace) - existingNodeIdsPrivateKeysHash = await getNodeAliasesPrivateKeysHash(existingServiceMap, namespace, k8, getTmpDir()) - }).timeout(defaultTimeout) - - it('should succeed with init command', async () => { - const status = await accountCmd.init(argv) - expect(status).to.be.ok - }).timeout(8 * MINUTES) - - it('should add a new node to the network via the segregated commands successfully', async () => { - await nodeCmd.addPrepare(argvPrepare) - await nodeCmd.addSubmitTransactions(argvExecute) - await nodeCmd.addExecute(argvExecute) - expect(nodeCmd.getUnusedConfigs(NodeCommand.ADD_CONFIGS_NAME)).to.deep.equal([ - flags.app.constName, - flags.chainId.constName, - flags.devMode.constName, - flags.generateGossipKeys.constName, - flags.generateTlsKeys.constName, - flags.gossipEndpoints.constName, - flags.grpcEndpoints.constName, - flags.quiet.constName, - flags.adminKey.constName, - 'curDate', - 'freezeAdminPrivateKey' - ]) // @ts-ignore - await nodeCmd.accountManager.close() - }).timeout(12 * MINUTES) - - // @ts-ignore - balanceQueryShouldSucceed(nodeCmd.accountManager, nodeCmd, namespace) + balanceQueryShouldSucceed(bootstrapResp.opts.accountManager, nodeCmd, namespace) - // @ts-ignore - accountCreationShouldSucceed(nodeCmd.accountManager, nodeCmd, namespace) + // @ts-ignore + accountCreationShouldSucceed(bootstrapResp.opts.accountManager, nodeCmd, namespace) - it('existing nodes private keys should not have changed', async () => { - const currentNodeIdsPrivateKeysHash = await getNodeAliasesPrivateKeysHash(existingServiceMap, namespace, k8, getTmpDir()) + it('existing nodes private keys should not have changed', async () => { + const currentNodeIdsPrivateKeysHash = await getNodeAliasesPrivateKeysHash(existingServiceMap, namespace, k8, getTmpDir()) - for (const [nodeAlias, existingKeyHashMap] of existingNodeIdsPrivateKeysHash.entries()) { - const currentNodeKeyHashMap = currentNodeIdsPrivateKeysHash.get(nodeAlias) + for (const [nodeAlias, existingKeyHashMap] of existingNodeIdsPrivateKeysHash.entries()) { + const currentNodeKeyHashMap = currentNodeIdsPrivateKeysHash.get(nodeAlias) - for (const [keyFileName, existingKeyHash] of existingKeyHashMap.entries()) { - expect(`${nodeAlias}:${keyFileName}:${currentNodeKeyHashMap.get(keyFileName)}`).to.equal( - `${nodeAlias}:${keyFileName}:${existingKeyHash}`) + for (const [keyFileName, existingKeyHash] of existingKeyHashMap.entries()) { + expect(`${nodeAlias}:${keyFileName}:${currentNodeKeyHashMap.get(keyFileName)}`).to.equal( + `${nodeAlias}:${keyFileName}:${existingKeyHash}`) + } } - } - }).timeout(defaultTimeout) -}).timeout(3 * MINUTES) + }).timeout(defaultTimeout) + }).timeout(3 * MINUTES) +}) + diff --git a/test/e2e/commands/separate_node_delete.test.ts b/test/e2e/commands/separate_node_delete.test.ts index 2081aa52f..ee87c6e0b 100644 --- a/test/e2e/commands/separate_node_delete.test.ts +++ b/test/e2e/commands/separate_node_delete.test.ts @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. * - * @mocha-environment steps */ import { it, describe, after } from 'mocha' import { expect } from 'chai' @@ -22,7 +21,7 @@ import { flags } from '../../../src/commands/index.ts' import { accountCreationShouldSucceed, balanceQueryShouldSucceed, - bootstrapNetwork, + e2eTestSuite, getDefaultArgv, HEDERA_PLATFORM_VERSION_TAG } from '../../test_util.ts' @@ -32,75 +31,74 @@ import { HEDERA_HAPI_PATH, MINUTES, ROOT_CONTAINER } from '../../../src/core/con import fs from 'fs' import type { NodeAlias, PodName } from '../../../src/types/aliases.ts' -describe('Node delete via separated commands', async () => { - const namespace = 'node-delete-separate' - const nodeAlias = 'node1' as NodeAlias - const argv = getDefaultArgv() - argv[flags.nodeAliasesUnparsed.name] = 'node1,node2,node3,node4' - argv[flags.nodeAlias.name] = nodeAlias - argv[flags.generateGossipKeys.name] = true - argv[flags.generateTlsKeys.name] = true - argv[flags.persistentVolumeClaims.name] = true - // set the env variable SOLO_CHARTS_DIR if developer wants to use local Solo charts - argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ?? undefined - argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG - argv[flags.namespace.name] = namespace +const namespace = 'node-delete-separate' +const nodeAlias = 'node1' as NodeAlias +const argv = getDefaultArgv() +argv[flags.nodeAliasesUnparsed.name] = 'node1,node2,node3,node4' +argv[flags.nodeAlias.name] = nodeAlias +argv[flags.generateGossipKeys.name] = true +argv[flags.generateTlsKeys.name] = true +argv[flags.persistentVolumeClaims.name] = true +// set the env variable SOLO_CHARTS_DIR if developer wants to use local Solo charts +argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ?? undefined +argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG +argv[flags.namespace.name] = namespace - const tempDir = 'contextDir' - const argvPrepare = Object.assign({}, argv) - argvPrepare[flags.outputDir.name] = tempDir +const tempDir = 'contextDir' +const argvPrepare = Object.assign({}, argv) +argvPrepare[flags.outputDir.name] = tempDir - const argvExecute = getDefaultArgv() - argvExecute[flags.inputDir.name] = tempDir +const argvExecute = getDefaultArgv() +argvExecute[flags.inputDir.name] = tempDir - const bootstrapResp = await bootstrapNetwork(namespace, argv) - const nodeCmd = bootstrapResp.cmd.nodeCmd - const accountCmd = bootstrapResp.cmd.accountCmd - const k8 = bootstrapResp.opts.k8 +e2eTestSuite(namespace, argv, undefined, undefined, undefined, undefined, undefined, undefined, true, (bootstrapResp) => { + describe('Node delete via separated commands', async () => { + const nodeCmd = bootstrapResp.cmd.nodeCmd + const accountCmd = bootstrapResp.cmd.accountCmd + const k8 = bootstrapResp.opts.k8 - after(async function () { - this.timeout(10 * MINUTES) + after(async function () { + this.timeout(10 * MINUTES) - await getNodeLogs(k8, namespace) - await k8.deleteNamespace(namespace) - }) + await getNodeLogs(k8, namespace) + await k8.deleteNamespace(namespace) + }) - it('should succeed with init command', async () => { - const status = await accountCmd.init(argv) - expect(status).to.be.ok - }).timeout(8 * MINUTES) + it('should succeed with init command', async () => { + const status = await accountCmd.init(argv) + expect(status).to.be.ok + }).timeout(8 * MINUTES) - it('should delete a node from the network successfully', async () => { - await nodeCmd.deletePrepare(argvPrepare) - await nodeCmd.deleteSubmitTransactions(argvExecute) - await nodeCmd.deleteExecute(argvExecute) - expect(nodeCmd.getUnusedConfigs(NodeCommand.DELETE_CONFIGS_NAME)).to.deep.equal([ - flags.app.constName, - flags.devMode.constName, - flags.endpointType.constName, - flags.quiet.constName, - flags.adminKey.constName, - 'freezeAdminPrivateKey' - ]) + it('should delete a node from the network successfully', async () => { + await nodeCmd.deletePrepare(argvPrepare) + await nodeCmd.deleteSubmitTransactions(argvExecute) + await nodeCmd.deleteExecute(argvExecute) + expect(nodeCmd.getUnusedConfigs(NodeCommand.DELETE_CONFIGS_NAME)).to.deep.equal([ + flags.app.constName, + flags.devMode.constName, + flags.endpointType.constName, + flags.quiet.constName, + flags.adminKey.constName, + 'freezeAdminPrivateKey' + ]) - // @ts-ignore - await nodeCmd.accountManager.close() - }).timeout(10 * MINUTES) + await bootstrapResp.opts.accountManager.close() + }).timeout(10 * MINUTES) - // @ts-ignore - balanceQueryShouldSucceed(nodeCmd.accountManager, nodeCmd, namespace) + balanceQueryShouldSucceed(bootstrapResp.opts.accountManager, nodeCmd, namespace) - // @ts-ignore - accountCreationShouldSucceed(nodeCmd.accountManager, nodeCmd, namespace) + accountCreationShouldSucceed(bootstrapResp.opts.accountManager, nodeCmd, namespace) - it('config.txt should no longer contain removed nodeAlias', async () => { - // read config.txt file from first node, read config.txt line by line, it should not contain value of nodeAlias - const pods = await k8.getPodsByLabel(['solo.hedera.com/type=network-node']) - const podName = pods[0].metadata.name as PodName - const tmpDir = getTmpDir() - await k8.copyFrom(podName, ROOT_CONTAINER, `${HEDERA_HAPI_PATH}/config.txt`, tmpDir) - const configTxt = fs.readFileSync(`${tmpDir}/config.txt`, 'utf8') - console.log('config.txt:', configTxt) - expect(configTxt).not.to.contain(nodeAlias) - }).timeout(10 * MINUTES) + it('config.txt should no longer contain removed nodeAlias', async () => { + // read config.txt file from first node, read config.txt line by line, it should not contain value of nodeAlias + const pods = await k8.getPodsByLabel(['solo.hedera.com/type=network-node']) + const podName = pods[0].metadata.name as PodName + const tmpDir = getTmpDir() + await k8.copyFrom(podName, ROOT_CONTAINER, `${HEDERA_HAPI_PATH}/config.txt`, tmpDir) + const configTxt = fs.readFileSync(`${tmpDir}/config.txt`, 'utf8') + console.log('config.txt:', configTxt) + expect(configTxt).not.to.contain(nodeAlias) + }).timeout(10 * MINUTES) + }) }) + diff --git a/test/e2e/e2e_node_util.ts b/test/e2e/e2e_node_util.ts index fe0643760..38352361f 100644 --- a/test/e2e/e2e_node_util.ts +++ b/test/e2e/e2e_node_util.ts @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. * - * @mocha-environment steps */ import { it, describe, after, before, afterEach } from 'mocha' import { expect } from 'chai' @@ -22,7 +21,7 @@ import { flags } from '../../src/commands/index.ts' import { accountCreationShouldSucceed, balanceQueryShouldSucceed, - bootstrapNetwork, + e2eTestSuite, getDefaultArgv, getTestConfigManager, HEDERA_PLATFORM_VERSION_TAG, @@ -37,67 +36,62 @@ import type { ListrTaskWrapper } from 'listr2' import type { K8 } from '../../src/core/index.ts' export function e2eNodeKeyRefreshTest (testName: string, mode: string, releaseTag = HEDERA_PLATFORM_VERSION_TAG) { - const defaultTimeout = 2 * MINUTES - - describe(`NodeCommand [testName ${testName}, mode ${mode}, release ${releaseTag}]`, async () => { - const namespace = testName - const argv = getDefaultArgv() - argv[flags.namespace.name] = namespace - argv[flags.releaseTag.name] = releaseTag - argv[flags.nodeAliasesUnparsed.name] = 'node1,node2,node3' - argv[flags.generateGossipKeys.name] = true - argv[flags.generateTlsKeys.name] = true - argv[flags.clusterName.name] = TEST_CLUSTER - argv[flags.devMode.name] = true - // set the env variable SOLO_CHARTS_DIR if developer wants to use local Solo charts - argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ?? undefined - argv[flags.quiet.name] = true - - const bootstrapResp = await bootstrapNetwork(testName, argv) - const accountManager = bootstrapResp.opts.accountManager - const k8 = bootstrapResp.opts.k8 - const nodeCmd = bootstrapResp.cmd.nodeCmd - - afterEach(async function () { - this.timeout(defaultTimeout) - - await nodeCmd.close() - await accountManager.close() - }) + const namespace = testName + const argv = getDefaultArgv() + argv[flags.namespace.name] = namespace + argv[flags.releaseTag.name] = releaseTag + argv[flags.nodeAliasesUnparsed.name] = 'node1,node2,node3' + argv[flags.generateGossipKeys.name] = true + argv[flags.generateTlsKeys.name] = true + argv[flags.clusterName.name] = TEST_CLUSTER + argv[flags.devMode.name] = true + // set the env variable SOLO_CHARTS_DIR if developer wants to use local Solo charts + argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ?? undefined + argv[flags.quiet.name] = true + + e2eTestSuite(testName, argv, undefined, undefined, undefined, undefined, undefined, undefined, true, (bootstrapResp) => { + const defaultTimeout = 2 * MINUTES + + describe(`NodeCommand [testName ${testName}, mode ${mode}, release ${releaseTag}]`, async () => { + const accountManager = bootstrapResp.opts.accountManager + const k8 = bootstrapResp.opts.k8 + const nodeCmd = bootstrapResp.cmd.nodeCmd + + afterEach(async function () { + this.timeout(defaultTimeout) + + await nodeCmd.close() + await accountManager.close() + }) - after(async function () { - this.timeout(10 * MINUTES) + after(async function () { + this.timeout(10 * MINUTES) - await getNodeLogs(k8, namespace) - await k8.deleteNamespace(namespace) - }) + await getNodeLogs(k8, namespace) + await k8.deleteNamespace(namespace) + }) - describe( - `Node should have started successfully [mode ${mode}, release ${releaseTag}]`, - () => { + describe(`Node should have started successfully [mode ${mode}, release ${releaseTag}]`, () => { balanceQueryShouldSucceed(accountManager, nodeCmd, namespace) accountCreationShouldSucceed(accountManager, nodeCmd, namespace) - it(`Node Proxy should be UP [mode ${mode}, release ${releaseTag}`, - async () => { - try { - await expect(k8.waitForPodReady( + it(`Node Proxy should be UP [mode ${mode}, release ${releaseTag}`, async () => { + try { + await expect(k8.waitForPodReady( ['app=haproxy-node1', 'solo.hedera.com/type=haproxy'], 1, 300, 1000)).to.eventually.be.ok - } catch (e) { - nodeCmd.logger.showUserError(e) - expect.fail() - } finally { - await nodeCmd.close() - } - }).timeout(defaultTimeout) + } catch (e) { + nodeCmd.logger.showUserError(e) + expect.fail() + } finally { + await nodeCmd.close() + } + }).timeout(defaultTimeout) }) - describe( - `Node should refresh successfully [mode ${mode}, release ${releaseTag}]`, - () => { + describe(`Node should refresh successfully [mode ${mode}, release ${releaseTag}]`, () => { const nodeAlias = 'node1' before(async function () { @@ -106,7 +100,7 @@ export function e2eNodeKeyRefreshTest (testName: string, mode: string, releaseTa const podName = await nodeRefreshTestSetup(argv, testName, k8, nodeAlias) if (mode === 'kill') { const resp = await k8.kubeClient.deleteNamespacedPod(podName, - namespace) + namespace) expect(resp.response.statusCode).to.equal(200) await sleep(20 * SECONDS) // sleep to wait for pod to finish terminating } else if (mode === 'stop') { @@ -128,72 +122,73 @@ export function e2eNodeKeyRefreshTest (testName: string, mode: string, releaseTa accountCreationShouldSucceed(accountManager, nodeCmd, namespace) }) - function nodePodShouldBeRunning (nodeCmd: NodeCommand, namespace: string, nodeAlias: NodeAlias) { - it(`${nodeAlias} should be running`, async () => { - try { - // @ts-ignore to access tasks which is a private property - await expect(nodeCmd.tasks.checkNetworkNodePod(namespace, - nodeAlias)).to.eventually.be.ok - } catch (e) { - nodeCmd.logger.showUserError(e) - expect.fail() - } finally { - await nodeCmd.close() - } - }).timeout(defaultTimeout) - } - - function nodeRefreshShouldSucceed (nodeAlias: NodeAlias, nodeCmd: NodeCommand, argv: Record) { - it(`${nodeAlias} refresh should succeed`, async () => { - try { - await expect(nodeCmd.refresh(argv)).to.eventually.be.ok - expect(nodeCmd.getUnusedConfigs( - NodeCommand.REFRESH_CONFIGS_NAME)).to.deep.equal([ - flags.devMode.constName, - flags.quiet.constName - ]) - } catch (e) { - nodeCmd.logger.showUserError(e) - expect.fail() - } finally { - await nodeCmd.close() - await sleep(10 * SECONDS) // sleep to wait for node to finish starting - } - }).timeout(20 * MINUTES) - } - - function nodeShouldNotBeActive (nodeCmd: NodeCommand, nodeAlias: NodeAlias) { - it(`${nodeAlias} should not be ACTIVE`, async () => { - expect(2) - try { - await expect( - nodeCmd.checkNetworkNodeActiveness(namespace, nodeAlias, { title: '' } as ListrTaskWrapper, - '', 44, undefined, 15) - ).to.be.rejected - } catch (e) { - expect(e).not.to.be.null - } finally { - await nodeCmd.close() + function nodePodShouldBeRunning (nodeCmd: NodeCommand, namespace: string, nodeAlias: NodeAlias) { + it(`${nodeAlias} should be running`, async () => { + try { + // @ts-ignore to access tasks which is a private property + await expect(nodeCmd.tasks.checkNetworkNodePod(namespace, + nodeAlias)).to.eventually.be.ok + } catch (e) { + nodeCmd.logger.showUserError(e) + expect.fail() + } finally { + await nodeCmd.close() + } + }).timeout(defaultTimeout) + } + + function nodeRefreshShouldSucceed (nodeAlias: NodeAlias, nodeCmd: NodeCommand, argv: Record) { + it(`${nodeAlias} refresh should succeed`, async () => { + try { + await expect(nodeCmd.refresh(argv)).to.eventually.be.ok + expect(nodeCmd.getUnusedConfigs( + NodeCommand.REFRESH_CONFIGS_NAME)).to.deep.equal([ + flags.devMode.constName, + flags.quiet.constName + ]) + } catch (e) { + nodeCmd.logger.showUserError(e) + expect.fail() + } finally { + await nodeCmd.close() + await sleep(10 * SECONDS) // sleep to wait for node to finish starting + } + }).timeout(20 * MINUTES) + } + + function nodeShouldNotBeActive (nodeCmd: NodeCommand, nodeAlias: NodeAlias) { + it(`${nodeAlias} should not be ACTIVE`, async () => { + expect(2) + try { + await expect( + nodeCmd.checkNetworkNodeActiveness(namespace, nodeAlias, { title: '' } as ListrTaskWrapper, + '', 44, undefined, 15) + ).to.be.rejected + } catch (e) { + expect(e).not.to.be.null + } finally { + await nodeCmd.close() + } + }).timeout(defaultTimeout) + } + + async function nodeRefreshTestSetup (argv: Record, testName: string, k8: K8, nodeAliases: string) { + argv[flags.nodeAliasesUnparsed.name] = nodeAliases + const configManager = getTestConfigManager(`${testName}-solo.yaml`) + configManager.update(argv, true) + + const podArray = await k8.getPodsByLabel( + [`app=network-${nodeAliases}`, + 'solo.hedera.com/type=network-node']) + + if (podArray.length > 0) { + const podName = podArray[0].metadata.name + k8.logger.info(`nodeRefreshTestSetup: podName: ${podName}`) + return podName } - }).timeout(defaultTimeout) - } - - async function nodeRefreshTestSetup (argv: Record, testName: string, k8: K8, nodeAliases: string) { - argv[flags.nodeAliasesUnparsed.name] = nodeAliases - const configManager = getTestConfigManager(`${testName}-solo.yaml`) - configManager.update(argv, true) - - const podArray = await k8.getPodsByLabel( - [`app=network-${nodeAliases}`, - 'solo.hedera.com/type=network-node']) - - if (podArray.length > 0) { - const podName = podArray[0].metadata.name - k8.logger.info(`nodeRefreshTestSetup: podName: ${podName}`) - return podName - } throw new Error(`pod for ${nodeAliases} not found`) - - } + + } + }) }) } diff --git a/test/e2e/integration/core/account_manager.test.ts b/test/e2e/integration/core/account_manager.test.ts index fcd4391e3..0828f2758 100644 --- a/test/e2e/integration/core/account_manager.test.ts +++ b/test/e2e/integration/core/account_manager.test.ts @@ -18,65 +18,67 @@ import { it, describe, after } from 'mocha' import { expect } from 'chai' import { flags } from '../../../../src/commands/index.ts' -import { bootstrapNetwork, getDefaultArgv, TEST_CLUSTER } from '../../../test_util.ts' +import { e2eTestSuite, getDefaultArgv, TEST_CLUSTER } from '../../../test_util.ts' import * as version from '../../../../version.ts' import { MINUTES } from '../../../../src/core/constants.ts' import type { PodName } from '../../../../src/types/aliases.ts' -describe('AccountManager', async () => { - const namespace = 'account-mngr-e2e' - const argv = getDefaultArgv() - argv[flags.namespace.name] = namespace - argv[flags.nodeAliasesUnparsed.name] = 'node1' - argv[flags.clusterName.name] = TEST_CLUSTER - argv[flags.soloChartVersion.name] = version.SOLO_CHART_VERSION - argv[flags.generateGossipKeys.name] = true - argv[flags.generateTlsKeys.name] = true - // set the env variable SOLO_CHARTS_DIR if developer wants to use local Solo charts - argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ?? undefined - const bootstrapResp = await bootstrapNetwork(namespace, argv, undefined, undefined, undefined, undefined, undefined, undefined, false) - const k8 = bootstrapResp.opts.k8 - const accountManager = bootstrapResp.opts.accountManager - const configManager = bootstrapResp.opts.configManager +const namespace = 'account-mngr-e2e' +const argv = getDefaultArgv() +argv[flags.namespace.name] = namespace +argv[flags.nodeAliasesUnparsed.name] = 'node1' +argv[flags.clusterName.name] = TEST_CLUSTER +argv[flags.soloChartVersion.name] = version.SOLO_CHART_VERSION +argv[flags.generateGossipKeys.name] = true +argv[flags.generateTlsKeys.name] = true +// set the env variable SOLO_CHARTS_DIR if developer wants to use local Solo charts +argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ?? undefined - after(async function () { - this.timeout(3 * MINUTES) +e2eTestSuite(namespace, argv, undefined, undefined, undefined, undefined, undefined, undefined, false, (bootstrapResp) => { + describe('AccountManager', async () => { + const k8 = bootstrapResp.opts.k8 + const accountManager = bootstrapResp.opts.accountManager + const configManager = bootstrapResp.opts.configManager - await k8.deleteNamespace(namespace) - await accountManager.close() - }) + after(async function () { + this.timeout(3 * MINUTES) - it('should be able to stop port forwards', async () => { - await accountManager.close() - const localHost = '127.0.0.1' + await k8.deleteNamespace(namespace) + await accountManager.close() + }) - const podName = 'minio-console' as PodName // use a svc that is less likely to be used by other tests - const podPort = 9_090 - const localPort = 19_090 + it('should be able to stop port forwards', async () => { + await accountManager.close() + const localHost = '127.0.0.1' - // @ts-ignore - expect(accountManager._portForwards, 'starting accountManager port forwards lengths should be zero').to.have.lengthOf(0) + const podName = 'minio-console' as PodName // use a svc that is less likely to be used by other tests + const podPort = 9_090 + const localPort = 19_090 - // ports should be opened - // @ts-ignore - accountManager._portForwards.push(await k8.portForward(podName, localPort, podPort)) - const status = await k8.testConnection(localHost, localPort) - expect(status, 'test connection status should be true').to.be.ok + // @ts-ignore + expect(accountManager._portForwards, 'starting accountManager port forwards lengths should be zero').to.have.lengthOf(0) - // ports should be closed - await accountManager.close() - try { - await k8.testConnection(localHost, localPort) - } catch (e) { - expect(e.message, 'expect failed test connection').to.include(`failed to connect to '${localHost}:${localPort}'`) - } - // @ts-ignore - expect(accountManager._portForwards, 'expect that the closed account manager should have no port forwards').to.have.lengthOf(0) - }) + // ports should be opened + // @ts-ignore + accountManager._portForwards.push(await k8.portForward(podName, localPort, podPort)) + const status = await k8.testConnection(localHost, localPort) + expect(status, 'test connection status should be true').to.be.ok + + // ports should be closed + await accountManager.close() + try { + await k8.testConnection(localHost, localPort) + } catch (e) { + expect(e.message, 'expect failed test connection').to.include(`failed to connect to '${localHost}:${localPort}'`) + } + // @ts-ignore + expect(accountManager._portForwards, 'expect that the closed account manager should have no port forwards').to.have.lengthOf(0) + }) - it('should be able to load a new client', async () => { - await accountManager.loadNodeClient(configManager.getFlag(flags.namespace)) - expect(accountManager._nodeClient).not.to.be.null - await accountManager.close() + it('should be able to load a new client', async () => { + await accountManager.loadNodeClient(configManager.getFlag(flags.namespace)) + expect(accountManager._nodeClient).not.to.be.null + await accountManager.close() + }) }) }) diff --git a/test/e2e/integration/core/platform_installer_e2e.test.ts b/test/e2e/integration/core/platform_installer_e2e.test.ts index 42db45064..ba1ca07bb 100644 --- a/test/e2e/integration/core/platform_installer_e2e.test.ts +++ b/test/e2e/integration/core/platform_installer_e2e.test.ts @@ -21,7 +21,7 @@ import { constants } from '../../../../src/core/index.ts' import * as fs from 'fs' import { - bootstrapNetwork, + e2eTestSuite, getDefaultArgv, getTestCacheDir, TEST_CLUSTER, @@ -33,74 +33,77 @@ import { MINUTES, SECONDS } from '../../../../src/core/constants.ts' const defaultTimeout = 20 * SECONDS -describe('PackageInstallerE2E', async () => { - const namespace = 'pkg-installer-e2e' - const argv = getDefaultArgv() - const testCacheDir = getTestCacheDir() - argv[flags.cacheDir.name] = testCacheDir - argv[flags.namespace.name] = namespace - argv[flags.nodeAliasesUnparsed.name] = 'node1' - argv[flags.clusterName.name] = TEST_CLUSTER - argv[flags.soloChartVersion.name] = version.SOLO_CHART_VERSION - argv[flags.generateGossipKeys.name] = true - argv[flags.generateTlsKeys.name] = true - // set the env variable SOLO_CHARTS_DIR if developer wants to use local Solo charts - argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ?? undefined - const bootstrapResp = await bootstrapNetwork(namespace, argv, undefined, undefined, undefined, undefined, undefined, undefined, false) - const k8 = bootstrapResp.opts.k8 - const accountManager = bootstrapResp.opts.accountManager - const configManager = bootstrapResp.opts.configManager - const installer = bootstrapResp.opts.platformInstaller - const podName = 'network-node1-0' - const packageVersion = 'v0.42.5' +const namespace = 'pkg-installer-e2e' +const argv = getDefaultArgv() +const testCacheDir = getTestCacheDir() +argv[flags.cacheDir.name] = testCacheDir +argv[flags.namespace.name] = namespace +argv[flags.nodeAliasesUnparsed.name] = 'node1' +argv[flags.clusterName.name] = TEST_CLUSTER +argv[flags.soloChartVersion.name] = version.SOLO_CHART_VERSION +argv[flags.generateGossipKeys.name] = true +argv[flags.generateTlsKeys.name] = true +// set the env variable SOLO_CHARTS_DIR if developer wants to use local Solo charts +argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ?? undefined - after(async function () { - this.timeout(3 * MINUTES) +e2eTestSuite(namespace, argv, undefined, undefined, undefined, undefined, undefined, undefined, false, (bootstrapResp) => { + describe('PackageInstallerE2E', async () => { + const k8 = bootstrapResp.opts.k8 + const accountManager = bootstrapResp.opts.accountManager + const configManager = bootstrapResp.opts.configManager + const installer = bootstrapResp.opts.platformInstaller + const podName = 'network-node1-0' + const packageVersion = 'v0.42.5' - await k8.deleteNamespace(namespace) - await accountManager.close() - }) + after(async function () { + this.timeout(3 * MINUTES) - before(function () { - this.timeout(defaultTimeout) + await k8.deleteNamespace(namespace) + await accountManager.close() + }) - if (!fs.existsSync(testCacheDir)) { - fs.mkdirSync(testCacheDir) - } - configManager.load() - }) + before(function () { + this.timeout(defaultTimeout) - describe('fetchPlatform', () => { - it('should fail with invalid pod', async () => { - try { - // @ts-ignore - await installer.fetchPlatform('', packageVersion) - throw new Error() - } catch (e) { - expect(e.message).to.include('podName is required') + if (!fs.existsSync(testCacheDir)) { + fs.mkdirSync(testCacheDir) } + configManager.load() + }) - try { - // @ts-ignore - await installer.fetchPlatform('INVALID', packageVersion) - } catch (e) { - expect(e.message).to.include('failed to extract platform code in this pod') - } - }).timeout(defaultTimeout) + describe('fetchPlatform', () => { + it('should fail with invalid pod', async () => { + try { + // @ts-ignore + await installer.fetchPlatform('', packageVersion) + throw new Error() // fail-safe, should not reach here + } catch (e) { + expect(e.message).to.include('podName is required') + } - it('should fail with invalid tag', async () => { - try { - await installer.fetchPlatform(podName, 'INVALID') - throw new Error() - } catch (e) { - expect(e.message).to.include('curl: (22) The requested URL returned error: 404') - } - }).timeout(defaultTimeout) + try { + // @ts-ignore + await installer.fetchPlatform('INVALID', packageVersion) + throw new Error() // fail-safe, should not reach here + } catch (e) { + expect(e.message).to.include('failed to extract platform code in this pod') + } + }).timeout(defaultTimeout) + + it('should fail with invalid tag', async () => { + try { + await installer.fetchPlatform(podName, 'INVALID') + throw new Error() // fail-safe, should not reach here + } catch (e) { + expect(e.message).to.include('curl: (22) The requested URL returned error: 404') + } + }).timeout(defaultTimeout) - it('should succeed with valid tag and pod', async () => { - await expect(installer.fetchPlatform(podName, packageVersion)).to.eventually.be.ok - const outputs = await k8.execContainer(podName, constants.ROOT_CONTAINER, `ls -la ${constants.HEDERA_HAPI_PATH}`) - testLogger.showUser(outputs) - }).timeout(MINUTES) + it('should succeed with valid tag and pod', async () => { + await expect(installer.fetchPlatform(podName, packageVersion)).to.eventually.be.ok + const outputs = await k8.execContainer(podName, constants.ROOT_CONTAINER, `ls -la ${constants.HEDERA_HAPI_PATH}`) + testLogger.showUser(outputs) + }).timeout(MINUTES) + }) }) }) diff --git a/test/test_add.ts b/test/test_add.ts index 48d02b20e..677185a27 100644 --- a/test/test_add.ts +++ b/test/test_add.ts @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. * - * @mocha-environment steps */ import { expect } from 'chai' import { describe, it, after } from 'mocha' @@ -21,7 +20,7 @@ import { describe, it, after } from 'mocha' import { accountCreationShouldSucceed, balanceQueryShouldSucceed, - bootstrapNetwork, + e2eTestSuite, getDefaultArgv, getNodeAliasesPrivateKeysHash, getTmpDir, @@ -34,83 +33,80 @@ import { MINUTES } from '../src/core/constants.ts' import type { NodeAlias } from '../src/types/aliases.ts' import type { NetworkNodeServices } from '../src/core/network_node_services.ts' -export function testNodeAdd (localBuildPath: string) { - describe('Node add should success', async () => { - const suffix = localBuildPath.substring(0, 5) - const defaultTimeout = 2 * MINUTES - const namespace = 'node-add' + suffix - const argv = getDefaultArgv() - argv[flags.nodeAliasesUnparsed.name] = 'node1,node2,node3' - argv[flags.generateGossipKeys.name] = true - argv[flags.generateTlsKeys.name] = true - // set the env variable SOLO_CHARTS_DIR if developer wants to use local Solo charts - argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ?? undefined - argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG - argv[flags.namespace.name] = namespace - argv[flags.force.name] = true - argv[flags.persistentVolumeClaims.name] = true - argv[flags.localBuildPath.name] = localBuildPath - argv[flags.quiet.name] = true +const defaultTimeout = 2 * MINUTES - const bootstrapResp = await bootstrapNetwork(namespace, argv) - const nodeCmd = bootstrapResp.cmd.nodeCmd - const accountCmd = bootstrapResp.cmd.accountCmd - const networkCmd = bootstrapResp.cmd.networkCmd - const k8 = bootstrapResp.opts.k8 - let existingServiceMap: Map - let existingNodeIdsPrivateKeysHash: Map> +export function testNodeAdd (localBuildPath: string, testDescription: string = 'Node add should success', timeout: number = defaultTimeout): void { + const suffix = localBuildPath.substring(0, 5) + const namespace = 'node-add' + suffix + const argv = getDefaultArgv() + argv[flags.nodeAliasesUnparsed.name] = 'node1,node2,node3' + argv[flags.generateGossipKeys.name] = true + argv[flags.generateTlsKeys.name] = true + // set the env variable SOLO_CHARTS_DIR if developer wants to use local Solo charts + argv[flags.chartDirectory.name] = process.env.SOLO_CHARTS_DIR ?? undefined + argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG + argv[flags.namespace.name] = namespace + argv[flags.force.name] = true + argv[flags.persistentVolumeClaims.name] = true + argv[flags.localBuildPath.name] = localBuildPath + argv[flags.quiet.name] = true - after(async function () { - this.timeout(10 * MINUTES) + e2eTestSuite(namespace, argv, undefined, undefined, undefined, undefined, undefined, undefined, true, (bootstrapResp) => { + describe(testDescription, async () => { + const nodeCmd = bootstrapResp.cmd.nodeCmd + const accountCmd = bootstrapResp.cmd.accountCmd + const networkCmd = bootstrapResp.cmd.networkCmd + const k8 = bootstrapResp.opts.k8 + let existingServiceMap: Map + let existingNodeIdsPrivateKeysHash: Map> - await getNodeLogs(k8, namespace) - // @ts-ignore: Accessing private property for test purposes - await nodeCmd.accountManager.close() - await nodeCmd.stop(argv) - await networkCmd.destroy(argv) - await k8.deleteNamespace(namespace) - }) + after(async function () { + this.timeout(10 * MINUTES) + + await getNodeLogs(k8, namespace) + await bootstrapResp.opts.accountManager.close() + await nodeCmd.stop(argv) + await networkCmd.destroy(argv) + await k8.deleteNamespace(namespace) + }) - it('cache current version of private keys', async () => { - // @ts-ignore: Accessing private property for test purposes - existingServiceMap = await nodeCmd.accountManager.getNodeServiceMap(namespace) - existingNodeIdsPrivateKeysHash = await getNodeAliasesPrivateKeysHash(existingServiceMap, namespace, k8, getTmpDir()) - }).timeout(defaultTimeout) + it('cache current version of private keys', async () => { + existingServiceMap = await bootstrapResp.opts.accountManager.getNodeServiceMap(namespace) + existingNodeIdsPrivateKeysHash = await getNodeAliasesPrivateKeysHash(existingServiceMap, namespace, k8, getTmpDir()) + }).timeout(defaultTimeout) - it('should succeed with init command', async () => { - await expect(accountCmd.init(argv)).to.eventually.be.ok - }).timeout(8 * MINUTES) + it('should succeed with init command', async () => { + await expect(accountCmd.init(argv)).to.eventually.be.ok + }).timeout(8 * MINUTES) - it('should add a new node to the network successfully', async () => { - await nodeCmd.add(argv) - expect(nodeCmd.getUnusedConfigs(NodeCommand.ADD_CONFIGS_NAME)).to.deep.equal([ - flags.app.constName, - flags.chainId.constName, - flags.devMode.constName, - flags.quiet.constName, - flags.adminKey.constName - ]) - // @ts-ignore: Accessing private property for test purposes - await nodeCmd.accountManager.close() - }).timeout(12 * MINUTES) + it('should add a new node to the network successfully', async () => { + await nodeCmd.add(argv) + expect(nodeCmd.getUnusedConfigs(NodeCommand.ADD_CONFIGS_NAME)).to.deep.equal([ + flags.app.constName, + flags.chainId.constName, + flags.devMode.constName, + flags.quiet.constName, + flags.adminKey.constName + ]) + await bootstrapResp.opts.accountManager.close() + }).timeout(12 * MINUTES) - // @ts-ignore: Accessing private property for test purposes - balanceQueryShouldSucceed(nodeCmd.accountManager, nodeCmd, namespace) + balanceQueryShouldSucceed(bootstrapResp.opts.accountManager, nodeCmd, namespace) - // @ts-ignore: Accessing private property for test purposes - accountCreationShouldSucceed(nodeCmd.accountManager, nodeCmd, namespace) + accountCreationShouldSucceed(bootstrapResp.opts.accountManager, nodeCmd, namespace) - it('existing nodes private keys should not have changed', async () => { - const currentNodeIdsPrivateKeysHash = await getNodeAliasesPrivateKeysHash(existingServiceMap, namespace, k8, getTmpDir()) + it('existing nodes private keys should not have changed', async () => { + const currentNodeIdsPrivateKeysHash = await getNodeAliasesPrivateKeysHash(existingServiceMap, namespace, k8, getTmpDir()) - for (const [nodeAlias, existingKeyHashMap] of existingNodeIdsPrivateKeysHash.entries()) { - const currentNodeKeyHashMap = currentNodeIdsPrivateKeysHash.get(nodeAlias) + for (const [nodeAlias, existingKeyHashMap] of existingNodeIdsPrivateKeysHash.entries()) { + const currentNodeKeyHashMap = currentNodeIdsPrivateKeysHash.get(nodeAlias) - for (const [keyFileName, existingKeyHash] of existingKeyHashMap.entries()) { - expect(`${nodeAlias}:${keyFileName}:${currentNodeKeyHashMap.get(keyFileName)}`).to.deep.equal( - `${nodeAlias}:${keyFileName}:${existingKeyHash}`) + for (const [keyFileName, existingKeyHash] of existingKeyHashMap.entries()) { + expect(`${nodeAlias}:${keyFileName}:${currentNodeKeyHashMap.get(keyFileName)}`).to.deep.equal( + `${nodeAlias}:${keyFileName}:${existingKeyHash}`) + } } - } - }).timeout(defaultTimeout) + }).timeout(timeout) + }) }) } diff --git a/test/test_util.ts b/test/test_util.ts index e79fd2d61..85abc0a37 100644 --- a/test/test_util.ts +++ b/test/test_util.ts @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. * - * @mocha-environment steps */ import 'chai-as-promised' @@ -32,7 +31,7 @@ import { DependencyManager, HelmDependencyManager } from '../src/core/dependency_managers/index.ts' -import { sleep } from '../src/core/helpers.ts' +import { getNodeLogs, sleep } from '../src/core/helpers.ts' import { ChartManager, ConfigManager, @@ -128,14 +127,14 @@ interface BootstrapResponse { /** Initialize common test variables */ export function bootstrapTestVariables ( - testName: string, - argv: any, - k8Arg: K8 | null = null, - initCmdArg: InitCommand | null = null, - clusterCmdArg: ClusterCommand | null = null, - networkCmdArg: NetworkCommand | null = null, - nodeCmdArg: NodeCommand | null = null, - accountCmdArg: AccountCommand | null = null + testName: string, + argv: any, + k8Arg: K8 | null = null, + initCmdArg: InitCommand | null = null, + clusterCmdArg: ClusterCommand | null = null, + networkCmdArg: NetworkCommand | null = null, + nodeCmdArg: NodeCommand | null = null, + accountCmdArg: AccountCommand | null = null ): BootstrapResponse { const namespace: string = argv[flags.namespace.name] || 'bootstrap-ns' const cacheDir: string = argv[flags.cacheDir.name] || getTestCacheDir(testName) @@ -187,17 +186,19 @@ export function bootstrapTestVariables ( } } -/** Bootstrap network in a given namespace */ -export function bootstrapNetwork ( - testName: string, - argv: Record, - k8Arg: K8 | null = null, - initCmdArg: InitCommand | null = null, - clusterCmdArg: ClusterCommand | null = null, - networkCmdArg: NetworkCommand | null = null, - nodeCmdArg: NodeCommand | null = null, - accountCmdArg: AccountCommand | null = null, - startNodes = true +/** Bootstrap network in a given namespace, then run the test call back providing the bootstrap response */ +export function e2eTestSuite ( + testName: string, + argv: Record, + k8Arg: K8 | null = null, + initCmdArg: InitCommand | null = null, + clusterCmdArg: ClusterCommand | null = null, + networkCmdArg: NetworkCommand | null = null, + nodeCmdArg: NodeCommand | null = null, + accountCmdArg: AccountCommand | null = null, + startNodes = true, + testsCallBack: (bootstrapResp: BootstrapResponse) => void = () => { + } ) { const bootstrapResp = bootstrapTestVariables(testName, argv, k8Arg, initCmdArg, clusterCmdArg, networkCmdArg, nodeCmdArg, accountCmdArg) const namespace = bootstrapResp.namespace @@ -208,15 +209,18 @@ export function bootstrapNetwork ( const nodeCmd = bootstrapResp.cmd.nodeCmd const chartManager = bootstrapResp.opts.chartManager - return new Promise((resolve) => { + describe(`E2E Test Suite for '${testName}'`, function () { + this.bail(true) // stop on first failure, nothing else will matter if network doesn't come up correctly + describe(`Bootstrap network for test [release ${argv[flags.releaseTag.name]}}]`, () => { before(() => { bootstrapResp.opts.logger.showUser(`------------------------- START: bootstrap (${testName}) ----------------------------`) }) - after(() => { + after(async function () { + this.timeout(3 * MINUTES) + await getNodeLogs(k8, namespace) bootstrapResp.opts.logger.showUser(`------------------------- END: bootstrap (${testName}) ----------------------------`) - resolve(bootstrapResp) }) it('should cleanup previous deployment', async () => { @@ -288,6 +292,10 @@ export function bootstrapNetwork ( }).timeout(30 * MINUTES) } }) + + describe(testName, () => { + testsCallBack(bootstrapResp) + }) }) } @@ -299,8 +307,8 @@ export function balanceQueryShouldSucceed (accountManager: AccountManager, cmd: expect(accountManager._nodeClient).not.to.be.null const balance = await new AccountBalanceQuery() - .setAccountId(accountManager._nodeClient.getOperator().accountId) - .execute(accountManager._nodeClient) + .setAccountId(accountManager._nodeClient.getOperator().accountId) + .execute(accountManager._nodeClient) expect(balance.hbars).not.be.null } catch (e) { @@ -320,9 +328,9 @@ export function accountCreationShouldSucceed (accountManager: AccountManager, no const amount = 100 const newAccount = await new AccountCreateTransaction() - .setKey(privateKey) - .setInitialBalance(Hbar.from(amount, HbarUnit.Hbar)) - .execute(accountManager._nodeClient) + .setKey(privateKey) + .setInitialBalance(Hbar.from(amount, HbarUnit.Hbar)) + .execute(accountManager._nodeClient) // Get the new account ID const getReceipt = await newAccount.getReceipt(accountManager._nodeClient) @@ -362,10 +370,10 @@ export async function getNodeAliasesPrivateKeysHash (networkNodeServicesMap: Map async function addKeyHashToMap (k8: K8, nodeAlias: NodeAlias, keyDir: string, uniqueNodeDestDir: string, keyHashMap: Map, privateKeyFileName: string) { await k8.copyFrom( - Templates.renderNetworkPodName(nodeAlias), - ROOT_CONTAINER, - path.join(keyDir, privateKeyFileName), - uniqueNodeDestDir) + Templates.renderNetworkPodName(nodeAlias), + ROOT_CONTAINER, + path.join(keyDir, privateKeyFileName), + uniqueNodeDestDir) const keyBytes = fs.readFileSync(path.join(uniqueNodeDestDir, privateKeyFileName)) const keyString = keyBytes.toString() keyHashMap.set(privateKeyFileName, crypto.createHash('sha256').update(keyString).digest('base64'))