Skip to content

Commit cd38519

Browse files
Pearl1594Pearl Dsilvashwstppr
authored
tests: Fix k8s test failures on VMware (#4896)
This PR fixes the k8s test failures noticed on vmware. Co-authored-by: Pearl Dsilva <pearl.dsilva@shapeblue.com> Co-authored-by: Abhishek Kumar <abhishek.mrt22@gmail.com>
1 parent 2622856 commit cd38519

2 files changed

Lines changed: 63 additions & 44 deletions

File tree

plugins/integrations/kubernetes-service/src/main/java/com/cloud/kubernetes/cluster/actionworkers/KubernetesClusterResourceModifierActionWorker.java

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -307,7 +307,6 @@ protected void startKubernetesVM(final UserVm vm) throws ManagementServerExcepti
307307
Field f = startVm.getClass().getDeclaredField("id");
308308
f.setAccessible(true);
309309
f.set(startVm, vm.getId());
310-
resizeNodeVolume(vm);
311310
userVmService.startVirtualMachine(startVm);
312311
if (LOGGER.isInfoEnabled()) {
313312
LOGGER.info(String.format("Started VM : %s in the Kubernetes cluster : %s", vm.getDisplayName(), kubernetesCluster.getName()));

test/integration/smoke/test_kubernetes_clusters.py

Lines changed: 63 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@
3737
from marvin.lib.base import (Template,
3838
ServiceOffering,
3939
Account,
40+
StoragePool,
4041
Configurations)
4142
from marvin.lib.utils import (cleanup_resources,
4243
validateList,
@@ -81,7 +82,7 @@ def setUpClass(cls):
8182
"cloud.kubernetes.service.enabled",
8283
"true")
8384
cls.restartServer()
84-
85+
cls.updateVmwareSettings(False)
8586
cls.cks_template = None
8687
cls.initial_configuration_cks_template_name = None
8788
cls.cks_service_offering = None
@@ -120,12 +121,13 @@ def setUpClass(cls):
120121
(cls.services["cks_kubernetes_versions"]["1.16.3"]["semanticversion"], cls.services["cks_kubernetes_versions"]["1.16.3"]["url"], e))
121122

122123
if cls.setup_failed == False:
123-
cls.cks_template = cls.getKubernetesTemplate()
124+
cls.cks_template, existAlready = cls.getKubernetesTemplate()
124125
if cls.cks_template == FAILED:
125126
assert False, "getKubernetesTemplate() failed to return template for hypervisor %s" % cls.hypervisor
126127
cls.setup_failed = True
127128
else:
128-
cls._cleanup.append(cls.cks_template)
129+
if not existAlready:
130+
cls._cleanup.append(cls.cks_template)
129131

130132
if cls.setup_failed == False:
131133
cls.initial_configuration_cks_template_name = Configurations.list(cls.apiclient,
@@ -162,8 +164,6 @@ def tearDownClass(cls):
162164
cls.debug("Error: Exception during cleanup for added Kubernetes supported versions: %s" % e)
163165
try:
164166
# Restore original CKS template
165-
if cls.cks_template != None:
166-
cls.cks_template.delete(cls.apiclient)
167167
if cls.hypervisorNotSupported == False and cls.initial_configuration_cks_template_name != None:
168168
Configurations.update(cls.apiclient,
169169
cls.cks_template_name_key,
@@ -176,13 +176,33 @@ def tearDownClass(cls):
176176
"false")
177177
cls.restartServer()
178178

179+
cls.updateVmwareSettings(True)
180+
179181
cleanup_resources(cls.apiclient, cls._cleanup)
180182
except Exception as e:
181183
raise Exception("Warning: Exception during cleanup : %s" % e)
182184
if version_delete_failed == True:
183185
raise Exception("Warning: Exception during cleanup, unable to delete Kubernetes supported versions")
184186
return
185187

188+
@classmethod
189+
def updateVmwareSettings(cls, tearDown):
190+
value = "false"
191+
if not tearDown:
192+
value = "true"
193+
if cls.hypervisor.lower() == 'vmware':
194+
Configurations.update(cls.apiclient,
195+
"vmware.create.full.clone",
196+
value)
197+
allStoragePools = StoragePool.list(
198+
cls.apiclient
199+
)
200+
for pool in allStoragePools:
201+
Configurations.update(cls.apiclient,
202+
storageid=pool.id,
203+
name="vmware.create.full.clone",
204+
value=value)
205+
186206
@classmethod
187207
def restartServer(cls):
188208
"""Restart management server"""
@@ -227,7 +247,7 @@ def getKubernetesTemplate(cls, cks_templates=None):
227247

228248
if hypervisor not in cks_templates.keys():
229249
cls.debug("Provided hypervisor has no CKS template")
230-
return FAILED
250+
return FAILED, False
231251

232252
cks_template = cks_templates[hypervisor]
233253

@@ -244,13 +264,13 @@ def getKubernetesTemplate(cls, cks_templates=None):
244264
details = [{"keyboard": "us"}]
245265
template = Template.register(cls.apiclient, cks_template, zoneid=cls.zone.id, hypervisor=hypervisor.lower(), randomize_name=False, details=details)
246266
template.download(cls.apiclient)
247-
return template
267+
return template, False
248268

249269
for template in templates:
250270
if template.isready and template.ispublic:
251-
return Template(template.__dict__)
271+
return Template(template.__dict__), True
252272

253-
return FAILED
273+
return FAILED, False
254274

255275
@classmethod
256276
def waitForKubernetesSupportedVersionIsoReadyState(cls, version_id, retries=30, interval=60):
@@ -313,38 +333,7 @@ def tearDown(self):
313333

314334
@attr(tags=["advanced", "smoke"], required_hardware="true")
315335
@skipTestIf("hypervisorNotSupported")
316-
def test_01_deploy_kubernetes_cluster(self):
317-
"""Test to deploy a new Kubernetes cluster
318-
319-
# Validate the following:
320-
# 1. createKubernetesCluster should return valid info for new cluster
321-
# 2. The Cloud Database contains the valid information
322-
# 3. stopKubernetesCluster should stop the cluster
323-
"""
324-
if self.setup_failed == True:
325-
self.fail("Setup incomplete")
326-
global k8s_cluster
327-
k8s_cluster = self.getValidKubernetesCluster()
328-
329-
self.debug("Kubernetes cluster with ID: %s successfully deployed, now stopping it" % k8s_cluster.id)
330-
331-
self.stopAndVerifyKubernetesCluster(k8s_cluster.id)
332-
333-
self.debug("Kubernetes cluster with ID: %s successfully stopped, now starting it again" % k8s_cluster.id)
334-
335-
try:
336-
k8s_cluster = self.startKubernetesCluster(k8s_cluster.id)
337-
except Exception as e:
338-
self.deleteKubernetesClusterAndVerify(k8s_cluster.id, False, True)
339-
self.fail("Failed to start Kubernetes cluster due to: %s" % e)
340-
341-
self.verifyKubernetesClusterState(k8s_cluster, 'Running')
342-
343-
return
344-
345-
@attr(tags=["advanced", "smoke"], required_hardware="true")
346-
@skipTestIf("hypervisorNotSupported")
347-
def test_02_invalid_upgrade_kubernetes_cluster(self):
336+
def test_01_invalid_upgrade_kubernetes_cluster(self):
348337
"""Test to check for failure while tying to upgrade a Kubernetes cluster to a lower version
349338
350339
# Validate the following:
@@ -364,12 +353,13 @@ def test_02_invalid_upgrade_kubernetes_cluster(self):
364353
self.fail("Kubernetes cluster upgraded to a lower Kubernetes supported version. Must be an error.")
365354
except Exception as e:
366355
self.debug("Upgrading Kubernetes cluster with invalid Kubernetes supported version check successful, API failure: %s" % e)
356+
self.deleteKubernetesClusterAndVerify(k8s_cluster.id, False, True)
367357

368358
return
369359

370360
@attr(tags=["advanced", "smoke"], required_hardware="true")
371361
@skipTestIf("hypervisorNotSupported")
372-
def test_03_deploy_and_upgrade_kubernetes_cluster(self):
362+
def test_02_deploy_and_upgrade_kubernetes_cluster(self):
373363
"""Test to deploy a new Kubernetes cluster and upgrade it to newer version
374364
375365
# Validate the following:
@@ -395,7 +385,7 @@ def test_03_deploy_and_upgrade_kubernetes_cluster(self):
395385

396386
@attr(tags=["advanced", "smoke"], required_hardware="true")
397387
@skipTestIf("hypervisorNotSupported")
398-
def test_04_deploy_and_scale_kubernetes_cluster(self):
388+
def test_03_deploy_and_scale_kubernetes_cluster(self):
399389
"""Test to deploy a new Kubernetes cluster and check for failure while tying to scale it
400390
401391
# Validate the following:
@@ -431,6 +421,36 @@ def test_04_deploy_and_scale_kubernetes_cluster(self):
431421

432422
return
433423

424+
@attr(tags=["advanced", "smoke"], required_hardware="true")
425+
@skipTestIf("hypervisorNotSupported")
426+
def test_04_basic_lifecycle_kubernetes_cluster(self):
427+
"""Test to deploy a new Kubernetes cluster
428+
429+
# Validate the following:
430+
# 1. createKubernetesCluster should return valid info for new cluster
431+
# 2. The Cloud Database contains the valid information
432+
# 3. stopKubernetesCluster should stop the cluster
433+
"""
434+
if self.setup_failed == True:
435+
self.fail("Setup incomplete")
436+
global k8s_cluster
437+
k8s_cluster = self.getValidKubernetesCluster()
438+
439+
self.debug("Kubernetes cluster with ID: %s successfully deployed, now stopping it" % k8s_cluster.id)
440+
441+
self.stopAndVerifyKubernetesCluster(k8s_cluster.id)
442+
443+
self.debug("Kubernetes cluster with ID: %s successfully stopped, now starting it again" % k8s_cluster.id)
444+
445+
try:
446+
k8s_cluster = self.startKubernetesCluster(k8s_cluster.id)
447+
except Exception as e:
448+
self.deleteKubernetesClusterAndVerify(k8s_cluster.id, False, True)
449+
self.fail("Failed to start Kubernetes cluster due to: %s" % e)
450+
451+
self.verifyKubernetesClusterState(k8s_cluster, 'Running')
452+
return
453+
434454
@attr(tags=["advanced", "smoke"], required_hardware="true")
435455
@skipTestIf("hypervisorNotSupported")
436456
def test_05_delete_kubernetes_cluster(self):

0 commit comments

Comments
 (0)