From 1c01e4943d0f2229ceec1e78608011b9e5be7112 Mon Sep 17 00:00:00 2001 From: Gayathri M Date: Mon, 21 Oct 2024 16:06:01 +0530 Subject: [PATCH 1/2] IBM Cloud node stop and start added --- ocs_ci/ocs/platform_nodes.py | 22 +++++++++++++++++ ocs_ci/utility/ibmcloud.py | 47 ++++++++++++++++++++++++++++++++++++ 2 files changed, 69 insertions(+) diff --git a/ocs_ci/ocs/platform_nodes.py b/ocs_ci/ocs/platform_nodes.py index 2a7dc2a5937..afe3a972079 100644 --- a/ocs_ci/ocs/platform_nodes.py +++ b/ocs_ci/ocs/platform_nodes.py @@ -2744,6 +2744,28 @@ def __init__(self): super(IBMCloud, self).__init__() self.ibmcloud = ibmcloud.IBMCloud() + def stop_nodes(self, nodes, wait=True): + """ + Stop nodes on IBM Cloud + + Args: + nodes (list): The OCS objects of the nodes + wait (bool): True for waiting the instances to stop, False otherwise + + """ + self.ibmcloud.start_nodes(nodes, wait=True) + + def start_nodes(self, nodes, wait=True): + """ + Start nodes on IBM Cloud + + Args: + nodes (list): The OCS objects of the nodes + wait (bool): True for waiting the instances to start, False otherwise + + """ + self.ibmcloud.start_nodes(nodes, wait=True) + def restart_nodes(self, nodes, timeout=900, wait=True): """ Restart all the ibmcloud vm instances diff --git a/ocs_ci/utility/ibmcloud.py b/ocs_ci/utility/ibmcloud.py index 44b0093f1c2..6d3f6d0c28b 100644 --- a/ocs_ci/utility/ibmcloud.py +++ b/ocs_ci/utility/ibmcloud.py @@ -25,6 +25,7 @@ NodeHasNoAttachedVolume, TimeoutExpiredError, ) +from ocs_ci.ocs.node import wait_for_nodes_status from ocs_ci.ocs.resources.ocs import OCS from ocs_ci.utility import version as util_version from ocs_ci.utility.utils import get_infra_id, get_ocp_version, run_cmd, TimeoutSampler @@ -337,6 +338,52 @@ class IBMCloud(object): Wrapper for Ibm Cloud """ + def start_nodes(self, nodes, wait=True): + """ + Start nodes on IBM Cloud. + + Args: + nodes (list): The OCS objects of the nodes + wait (bool): True for waiting the instances to start, False otherwise + + """ + if not nodes: + raise ValueError("No nodes found to start") + + node_names = [n.name for n in nodes] + self.ibmcloud.restart_nodes(nodes) + + if wait: + # When the node is reachable then the node reaches status Ready. + logger.info(f"Waiting for nodes: {node_names} to reach ready state") + wait_for_nodes_status( + node_names=node_names, status=constants.NODE_READY, timeout=180, sleep=5 + ) + + def stop_nodes(self, nodes, wait=True): + """ + Stop nodes on IBM Cloud + + Args: + nodes (list): The OCS objects of the nodes + wait (bool): True for waiting the instances to stop, False otherwise + + """ + if not nodes: + raise ValueError("No nodes found to stop") + + cmd = "oc debug node/{} -- chroot /host shutdown" + node_names = [n.name for n in nodes] + for node in node_names: + run_cmd(cmd.format(node)) + + if wait: + # When the node is reachable then the node reaches status Ready. + logger.info(f"Waiting for nodes: {node_names} to reach not ready state") + wait_for_nodes_status( + node_names, constants.NODE_NOT_READY, timeout=180, sleep=5 + ) + def restart_nodes(self, nodes, timeout=900, wait=True): """ Reboot the nodes on IBM Cloud. From 2a401ae67a4634ab6a019033d86e1ca70b9a5a63 Mon Sep 17 00:00:00 2001 From: Gayathri M Date: Tue, 22 Oct 2024 20:42:32 +0530 Subject: [PATCH 2/2] test fix on IBMCloud --- ocs_ci/ocs/platform_nodes.py | 2 +- ocs_ci/utility/ibmcloud.py | 8 ++++++-- tests/functional/object/mcg/ui/test_mcg_ui.py | 2 ++ tests/functional/object/mcg/ui/test_namespace_store.py | 7 ++++++- .../storageclass/test_delete_rbd_pool_attached_to_sc.py | 2 ++ .../nodes/test_check_pods_status_after_node_failure.py | 2 -- 6 files changed, 17 insertions(+), 6 deletions(-) diff --git a/ocs_ci/ocs/platform_nodes.py b/ocs_ci/ocs/platform_nodes.py index afe3a972079..43a2c16f830 100644 --- a/ocs_ci/ocs/platform_nodes.py +++ b/ocs_ci/ocs/platform_nodes.py @@ -2753,7 +2753,7 @@ def stop_nodes(self, nodes, wait=True): wait (bool): True for waiting the instances to stop, False otherwise """ - self.ibmcloud.start_nodes(nodes, wait=True) + self.ibmcloud.stop_nodes(nodes, wait=True) def start_nodes(self, nodes, wait=True): """ diff --git a/ocs_ci/utility/ibmcloud.py b/ocs_ci/utility/ibmcloud.py index 6d3f6d0c28b..4b80c8ca334 100644 --- a/ocs_ci/utility/ibmcloud.py +++ b/ocs_ci/utility/ibmcloud.py @@ -345,13 +345,15 @@ def start_nodes(self, nodes, wait=True): Args: nodes (list): The OCS objects of the nodes wait (bool): True for waiting the instances to start, False otherwise - + + Raises: + ValueError if the list of nodes is empty """ if not nodes: raise ValueError("No nodes found to start") node_names = [n.name for n in nodes] - self.ibmcloud.restart_nodes(nodes) + self.restart_nodes(nodes) if wait: # When the node is reachable then the node reaches status Ready. @@ -368,6 +370,8 @@ def stop_nodes(self, nodes, wait=True): nodes (list): The OCS objects of the nodes wait (bool): True for waiting the instances to stop, False otherwise + Raises: + ValueError if the list of nodes is empty """ if not nodes: raise ValueError("No nodes found to stop") diff --git a/tests/functional/object/mcg/ui/test_mcg_ui.py b/tests/functional/object/mcg/ui/test_mcg_ui.py index 8e078af2128..a4a3ee40637 100644 --- a/tests/functional/object/mcg/ui/test_mcg_ui.py +++ b/tests/functional/object/mcg/ui/test_mcg_ui.py @@ -7,6 +7,7 @@ black_squad, runs_on_provider, mcg, + skipif_ibm_cloud_managed, ) from ocs_ci.ocs import constants from ocs_ci.helpers.helpers import create_unique_resource_name @@ -58,6 +59,7 @@ def teardown(self): @tier1 @runs_on_provider @skipif_disconnected_cluster + @skipif_ibm_cloud_managed @pytest.mark.parametrize( argnames=["kind", "provider", "region"], argvalues=[ diff --git a/tests/functional/object/mcg/ui/test_namespace_store.py b/tests/functional/object/mcg/ui/test_namespace_store.py index 206d08cfa59..38c19fdbd22 100644 --- a/tests/functional/object/mcg/ui/test_namespace_store.py +++ b/tests/functional/object/mcg/ui/test_namespace_store.py @@ -4,7 +4,11 @@ from ocs_ci.ocs.ocp import OCP from ocs_ci.ocs import constants from ocs_ci.framework import config -from ocs_ci.framework.pytest_customization.marks import black_squad, runs_on_provider +from ocs_ci.framework.pytest_customization.marks import ( + black_squad, + runs_on_provider, + skipif_ibm_cloud_managed, + ) from ocs_ci.framework.testlib import tier1, ui, polarion_id from ocs_ci.ocs.ui.mcg_ui import NamespaceStoreUI from ocs_ci.ocs.resources.namespacestore import NamespaceStore @@ -32,6 +36,7 @@ def teardown(self): @ui @tier1 @runs_on_provider + @skipif_ibm_cloud_managed @pytest.mark.bugzilla("2158922") @polarion_id("OCS-5125") def test_create_namespace_store_ui(self, setup_ui_class_factory, pvc_factory): diff --git a/tests/functional/storageclass/test_delete_rbd_pool_attached_to_sc.py b/tests/functional/storageclass/test_delete_rbd_pool_attached_to_sc.py index 57293b125fe..251ebdd6be1 100644 --- a/tests/functional/storageclass/test_delete_rbd_pool_attached_to_sc.py +++ b/tests/functional/storageclass/test_delete_rbd_pool_attached_to_sc.py @@ -11,6 +11,7 @@ ignore_resource_not_found_error_label, tier1, green_squad, + skipif_ibm_cloud_managed, ) from ocs_ci.framework.testlib import ManageTest from ocs_ci.ocs import constants @@ -184,6 +185,7 @@ def test_delete_rbd_pool_associated_with_sc( @tier1 @skipif_external_mode + @skipif_ibm_cloud_managed @pytest.mark.parametrize( argnames=["replica", "compression", "volume_binding_mode", "pvc_status"], argvalues=[ diff --git a/tests/functional/z_cluster/nodes/test_check_pods_status_after_node_failure.py b/tests/functional/z_cluster/nodes/test_check_pods_status_after_node_failure.py index 58e2e85eb3b..d299f0f80ed 100644 --- a/tests/functional/z_cluster/nodes/test_check_pods_status_after_node_failure.py +++ b/tests/functional/z_cluster/nodes/test_check_pods_status_after_node_failure.py @@ -7,7 +7,6 @@ ManageTest, tier4a, ignore_leftovers, - skipif_ibm_cloud, skipif_managed_service, skipif_hci_provider_and_client, skipif_external_mode, @@ -141,7 +140,6 @@ def finalizer(): request.addfinalizer(finalizer) - @skipif_ibm_cloud def test_check_pods_status_after_node_failure(self, nodes, node_restart_teardown): """ Test check pods status after a node failure event.