Skip to content

Commit b45a8a3

Browse files
committed
updates for addressing errors
Signed-off-by: Jeromy Cannon <[email protected]>
1 parent 9129890 commit b45a8a3

File tree

3 files changed

+59
-19
lines changed

3 files changed

+59
-19
lines changed

src/commands/node.mjs

Lines changed: 36 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,8 @@ export class NodeCommand extends BaseCommand {
107107
try {
108108
const output = await this.k8.execContainer(podName, constants.ROOT_CONTAINER, ['tail', '-10', logfilePath])
109109
if (output && output.indexOf('Terminating Netty') < 0 && // make sure we are not at the beginning of a restart
110-
output.indexOf(`Now current platform status = ${status}`) > 0) {
110+
(output.indexOf(`Now current platform status = ${status}`) > 0 ||
111+
output.indexOf(`is ${status}`))) { // 'is ACTIVE' is for newer versions, first seen in v0.49.0
111112
this.logger.debug(`Node ${nodeId} is ${status} [ attempt: ${attempt}/${maxAttempt}]`)
112113
isActive = true
113114
break
@@ -693,33 +694,51 @@ export class NodeCommand extends BaseCommand {
693694
* @param delay the delay between attempts
694695
* @returns {Promise<boolean>} true if the proxy is up
695696
*/
696-
async checkNetworkNodeProxyUp (nodeId, localPort, maxAttempts = 20, delay = 5000) {
697-
const podArray = await this.k8.getPodsByLabel([`app=haproxy-${nodeId}`, 'fullstack.hedera.com/type=haproxy'])
697+
async checkNetworkNodeProxyUp (nodeId, localPort, maxAttempts = 30, delay = 2000) {
698+
const podLabels = [`app=haproxy-${nodeId}`, 'fullstack.hedera.com/type=haproxy']
699+
let podArray = await this.k8.getPodsByLabel(podLabels)
698700

699701
let attempts = 0
702+
let status = null
700703
if (podArray.length > 0) {
701-
const podName = podArray[0].metadata.name
702-
this._portForwards.push(await this.k8.portForward(podName, localPort, 5555))
704+
let podName = podArray[0].metadata.name
705+
let portForwarder = null
706+
703707
try {
704-
await this.k8.testConnection('localhost', localPort)
705-
} catch (e) {
706-
throw new FullstackTestingError(`failed to create port forward for '${nodeId}' proxy on port ${localPort}`, e)
707-
}
708+
while (attempts < maxAttempts) {
709+
if (attempts === 0) {
710+
portForwarder = await this.k8.portForward(podName, localPort, 5555)
711+
await this.k8.testConnection('localhost', localPort)
712+
} else if (attempts % 5 === 0) {
713+
this.logger.debug(`Recycling proxy ${podName} [attempt: ${attempts}/${maxAttempts}]`)
714+
await this.k8.stopPortForward(portForwarder)
715+
await this.k8.recyclePodByLabels(podLabels, 50)
716+
podArray = await this.k8.getPodsByLabel(podLabels)
717+
podName = podArray[0].metadata.name
718+
portForwarder = await this.k8.portForward(podName, localPort, 5555)
719+
await this.k8.testConnection('localhost', localPort)
720+
}
708721

709-
while (attempts < maxAttempts) {
710-
try {
711-
const status = await this.getNodeProxyStatus(`http://localhost:${localPort}/v2/services/haproxy/stats/native?type=backend`)
722+
status = await this.getNodeProxyStatus(`http://localhost:${localPort}/v2/services/haproxy/stats/native?type=backend`)
712723
if (status === 'UP') {
713-
this.logger.debug(`Proxy ${podName} is UP. [attempt: ${attempts}/${maxAttempts}]`)
714-
return true
724+
break
715725
}
716726

717-
attempts++
718727
this.logger.debug(`Proxy ${podName} is not UP. Checking again in ${delay}ms ... [attempt: ${attempts}/${maxAttempts}]`)
728+
attempts++
719729
await sleep(delay)
720-
} catch (e) {
721-
throw new FullstackTestingError(`failed to create port forward for '${nodeId}' proxy on port ${localPort}`, e)
722730
}
731+
} catch (e) {
732+
throw new FullstackTestingError(`failed to check proxy for '${nodeId}' on port ${localPort}: ${e.message}`, e)
733+
} finally {
734+
if (portForwarder !== null) {
735+
this._portForwards.push(portForwarder)
736+
}
737+
}
738+
739+
if (status === 'UP') {
740+
this.logger.debug(`Proxy ${podName} is UP. [attempt: ${attempts}/${maxAttempts}]`)
741+
return true
723742
}
724743
}
725744

src/core/k8.mjs

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ import * as tar from 'tar'
2626
import { v4 as uuid4 } from 'uuid'
2727
import { V1ObjectMeta, V1Secret } from '@kubernetes/client-node'
2828
import { constants } from './index.mjs'
29+
import { sleep } from './helpers.mjs'
2930

3031
/**
3132
* A kubernetes API wrapper class providing custom functionalities required by solo
@@ -727,6 +728,27 @@ export class K8 {
727728
})
728729
}
729730

731+
async recyclePodByLabels (podLabels, maxAttempts = 50) {
732+
const podArray = await this.getPodsByLabel(podLabels)
733+
for (const pod of podArray) {
734+
const podName = pod.metadata.name
735+
await this.kubeClient.deleteNamespacedPod(podName, this.configManager.getFlag(flags.namespace))
736+
}
737+
738+
let attempts = 0
739+
while (attempts++ < maxAttempts) {
740+
const status = await this.waitForPod(constants.POD_STATUS_RUNNING, podLabels)
741+
if (status) {
742+
const newPods = await this.getPodsByLabel(podLabels)
743+
if (newPods.length === podArray.length) return newPods
744+
}
745+
746+
await sleep(2000)
747+
}
748+
749+
throw new FullstackTestingError(`pods are not running after deletion with labels [${podLabels.join(',')}]`)
750+
}
751+
730752
/**
731753
* Wait for pod
732754
* @param status phase of the pod

test/e2e/commands/node.test.mjs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -48,14 +48,13 @@ describe.each([
4848
argv[flags.generateGossipKeys.name] = true
4949
argv[flags.generateTlsKeys.name] = true
5050
argv[flags.clusterName.name] = TEST_CLUSTER
51-
argv[flags.chartDirectory.name] = 'charts' // TODO remove before merging PR, this allows pulling local charts
5251
const bootstrapResp = bootstrapNetwork(testName, argv)
5352
const accountManager = bootstrapResp.opts.accountManager
5453
const k8 = bootstrapResp.opts.k8
5554
const nodeCmd = bootstrapResp.cmd.nodeCmd
5655

5756
afterAll(async () => {
58-
await k8.deleteNamespace(namespace) // TODO uncomment this line before merging PR
57+
await k8.deleteNamespace(namespace)
5958
await accountManager.close()
6059
})
6160

0 commit comments

Comments
 (0)