Merge "For test VM validation resource were added."
diff --git a/tempest/api/compute/admin/test_servers_on_multinodes.py b/tempest/api/compute/admin/test_servers_on_multinodes.py
index b5ee9b1..c5d5b19 100644
--- a/tempest/api/compute/admin/test_servers_on_multinodes.py
+++ b/tempest/api/compute/admin/test_servers_on_multinodes.py
@@ -150,6 +150,15 @@
         compute.shelve_server(self.servers_client, server['id'],
                               force_shelve_offload=True)
 
+        # Work around https://bugs.launchpad.net/nova/+bug/2045785
+        # This can be removed when ^ is fixed.
+        def _check_server_host_is_none():
+            server_details = self.os_admin.servers_client.show_server(
+                server['id'])
+            self.assertIsNone(server_details['server']['OS-EXT-SRV-ATTR:host'])
+
+        self.wait_for(_check_server_host_is_none)
+
         self.os_admin.servers_client.unshelve_server(
             server['id'],
             body={'unshelve': {'host': host}}
diff --git a/tempest/lib/common/http.py b/tempest/lib/common/http.py
index d163968..5bdcecd 100644
--- a/tempest/lib/common/http.py
+++ b/tempest/lib/common/http.py
@@ -60,6 +60,14 @@
             retry = urllib3.util.Retry(redirect=False)
         r = super(ClosingProxyHttp, self).request(method, url, retries=retry,
                                                   *args, **new_kwargs)
+
+        # Clearing the pool is necessary to free memory that holds certificates
+        # loaded by the HTTPConnection class in urllib3. This line can be
+        # removed once we require a newer version of urllib3 (e.g., 2.2.3) that
+        # does not retain certificates in memory for each HTTPConnection
+        # managed by the PoolManager.
+        self.clear()
+
         if not kwargs.get('preload_content', True):
             # This means we asked urllib3 for streaming content, so we
             # need to return the raw response and not read any data yet
@@ -114,6 +122,14 @@
             retry = urllib3.util.Retry(redirect=False)
         r = super(ClosingHttp, self).request(method, url, retries=retry,
                                              *args, **new_kwargs)
+
+        # Clearing the pool is necessary to free memory that holds certificates
+        # loaded by the HTTPConnection class in urllib3. This line can be
+        # removed once we require a newer version of urllib3 (e.g., 2.2.3) that
+        # does not retain certificates in memory for each HTTPConnection
+        # managed by the PoolManager.
+        self.clear()
+
         if not kwargs.get('preload_content', True):
             # This means we asked urllib3 for streaming content, so we
             # need to return the raw response and not read any data yet
diff --git a/tempest/scenario/manager.py b/tempest/scenario/manager.py
index 369efcc..be2b2d6 100644
--- a/tempest/scenario/manager.py
+++ b/tempest/scenario/manager.py
@@ -923,6 +923,19 @@
         if not isinstance(exc, lib_exc.SSHTimeout):
             LOG.debug('Network information on a devstack host')
 
+    def get_snapshot_id(self, bdms):
+        if isinstance(bdms, str):
+            bdms = json.loads(bdms)
+        snapshot_id = None
+        for bdm in bdms:
+            # Look for the block device mapping that actually has a
+            # snapshot. If the server has ephemeral or swap disk, their
+            # block device mappings will be present with snapshot_id = None
+            if 'snapshot_id' in bdm and bdm['snapshot_id'] is not None:
+                snapshot_id = bdm['snapshot_id']
+                break
+        return snapshot_id
+
     def create_server_snapshot(self, server, name=None, **kwargs):
         """Creates server snapshot"""
         # Glance client
@@ -949,20 +962,19 @@
         snapshot_image = _image_client.show_image(image_id)
         image_props = snapshot_image
 
-        bdm = image_props.get('block_device_mapping')
-        if bdm:
-            bdm = json.loads(bdm)
-            if bdm and 'snapshot_id' in bdm[0]:
-                snapshot_id = bdm[0]['snapshot_id']
-                self.addCleanup(
-                    self.snapshots_client.wait_for_resource_deletion,
-                    snapshot_id)
-                self.addCleanup(test_utils.call_and_ignore_notfound_exc,
-                                self.snapshots_client.delete_snapshot,
-                                snapshot_id)
-                waiters.wait_for_volume_resource_status(self.snapshots_client,
-                                                        snapshot_id,
-                                                        'available')
+        bdms = image_props.get('block_device_mapping')
+        if bdms:
+            snapshot_id = self.get_snapshot_id(bdms)
+            self.assertIsNotNone(snapshot_id)
+            self.addCleanup(
+                self.snapshots_client.wait_for_resource_deletion,
+                snapshot_id)
+            self.addCleanup(test_utils.call_and_ignore_notfound_exc,
+                            self.snapshots_client.delete_snapshot,
+                            snapshot_id)
+            waiters.wait_for_volume_resource_status(
+                self.snapshots_client, snapshot_id, 'available')
+
         image_name = snapshot_image['name']
         self.assertEqual(name, image_name)
         LOG.debug("Created snapshot image %s for server %s",
diff --git a/tempest/scenario/test_volume_boot_pattern.py b/tempest/scenario/test_volume_boot_pattern.py
index 5e28ecd..febc2f6 100644
--- a/tempest/scenario/test_volume_boot_pattern.py
+++ b/tempest/scenario/test_volume_boot_pattern.py
@@ -11,7 +11,6 @@
 #    under the License.
 
 from oslo_log import log as logging
-from oslo_serialization import jsonutils as json
 import testtools
 
 from tempest.common import utils
@@ -245,8 +244,7 @@
         bdms = image.get('block_device_mapping')
         if not bdms:
             bdms = image['properties']['block_device_mapping']
-        bdms = json.loads(bdms)
-        snapshot_id = bdms[0]['snapshot_id']
+        snapshot_id = self.get_snapshot_id(bdms)
         self._delete_snapshot(snapshot_id)
 
         # Now, delete the first server which will also delete the first
diff --git a/test-requirements.txt b/test-requirements.txt
index bd4d772..b925921 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -1,4 +1,4 @@
-hacking>=6.1.0,<6.2.0
+hacking>=7.0.0,<7.1.0
 coverage!=4.4,>=4.0 # Apache-2.0
 oslotest>=3.2.0 # Apache-2.0
 flake8-import-order>=0.18.0,<0.19.0 # LGPLv3
diff --git a/tox.ini b/tox.ini
index d9d2bad..0fbc252 100644
--- a/tox.ini
+++ b/tox.ini
@@ -387,14 +387,14 @@
 [testenv:pep8]
 deps =
     {[testenv]deps}
-    autopep8
+    autopep8>=2.1.0
 commands =
     autopep8 --exit-code --max-line-length=79 --experimental --diff -r tempest setup.py
     flake8 {posargs}
     check-uuid
 
 [testenv:autopep8]
-deps = autopep8
+deps = autopep8>=2.1.0
 commands =
     {toxinidir}/tools/format.sh
 
diff --git a/zuul.d/integrated-gate.yaml b/zuul.d/integrated-gate.yaml
index fb08297..a9aa384 100644
--- a/zuul.d/integrated-gate.yaml
+++ b/zuul.d/integrated-gate.yaml
@@ -255,10 +255,10 @@
 - job:
     name: tempest-multinode-full-py3
     parent: tempest-multinode-full-base
-    nodeset: openstack-two-node-jammy
-    # This job runs on ubuntu Jammy and after unmaintained/zed.
+    nodeset: openstack-two-node-noble
+    # This job runs on ubuntu Noble from 2025.1 onwards.
     branches:
-      regex: ^.*/(victoria|wallaby|xena|yoga|zed)$
+      regex: ^.*/(victoria|wallaby|xena|yoga|zed|2023.1|2023.2|2024.1|2024.2)$
       negate: true
     vars:
       # NOTE(gmann): Default concurrency is higher (number of cpu -2) which
@@ -267,8 +267,6 @@
       # oom issue, setting the concurrency to 4 in this job.
       tempest_concurrency: 4
       tempest_set_src_dest_host: true
-      devstack_localrc:
-        USE_PYTHON3: true
       devstack_plugins:
         neutron: https://opendev.org/openstack/neutron
       devstack_services:
@@ -277,8 +275,6 @@
         br-int-flows: true
     group-vars:
       subnode:
-        devstack_localrc:
-          USE_PYTHON3: true
         devstack_services:
           br-ex-tcpdump: true
           br-int-flows: true
@@ -344,6 +340,27 @@
         devstack_localrc:
           ENABLE_VOLUME_MULTIATTACH: true
 
+# TODO(gmann): As per the 2025.1 testing runtime, we need to run at least
+# one job set on Jammy. These jammy job can be removed in the next
+# cycle(2025.2).
+- job:
+    name: tempest-full-ubuntu-jammy
+    description: This is tempest-full python3 job on Ubuntu Jammy(22.04)
+    parent: tempest-full-py3
+    nodeset: openstack-single-node-jammy
+
+- job:
+    name: tempest-multinode-full-ubuntu-jammy
+    description: This is tempest-multinode-full-py3 python3 job on Ubuntu Jammy(22.04)
+    parent: tempest-multinode-full-py3
+    nodeset: openstack-two-node-jammy
+
+- job:
+    name: tempest-extra-tests-ubuntu-jammy
+    description: This is tempest-extra-tests python3 job on Ubuntu Jammy(22.04)
+    parent: tempest-extra-tests
+    nodeset: openstack-single-node-jammy
+
 - job:
     name: tempest-cinder-v2-api
     parent: devstack-tempest
diff --git a/zuul.d/project.yaml b/zuul.d/project.yaml
index 240af53..2f21c2d 100644
--- a/zuul.d/project.yaml
+++ b/zuul.d/project.yaml
@@ -27,6 +27,14 @@
               - ^.gitignore$
               - ^.gitreview$
               - ^.mailmap$
+        # NOTE(gmann): Running jobs on Jammy as per the additional testing
+        # for 2025.1 cycle and these can be removed in 2025.2 cycle.
+        - tempest-full-ubuntu-jammy:
+            irrelevant-files: *tempest-irrelevant-files
+        - tempest-multinode-full-ubuntu-jammy:
+            irrelevant-files: *tempest-irrelevant-files
+        - tempest-extra-tests-ubuntu-jammy:
+            irrelevant-files: *tempest-irrelevant-files
         - tempest-extra-tests:
             irrelevant-files: *tempest-irrelevant-files
         - glance-multistore-cinder-import:
@@ -43,10 +51,7 @@
             irrelevant-files: *tempest-irrelevant-files
         - tempest-multinode-full-py3:
             irrelevant-files: *tempest-irrelevant-files
-        # TODO(gmann): make it non voting once failure is fixed in plugin
-        # https://zuul.opendev.org/t/openstack/build/2c2ed82bd12948b98d9ea7e2ebe625a8
         - tempest-tox-plugin-sanity-check:
-            voting: false
             irrelevant-files: &tempest-irrelevant-files-2
               - ^.*\.rst$
               - ^doc/.*$
@@ -157,6 +162,14 @@
             irrelevant-files: *tempest-irrelevant-files
         - ironic-tempest-bios-ipmi-direct-tinyipa:
             irrelevant-files: *tempest-irrelevant-files
+        # NOTE(gmann): Running jobs on Jammy as per the additional testing
+        # for 2025.1 cycle and these can be removed in 2025.2 cycle.
+        - tempest-full-ubuntu-jammy:
+            irrelevant-files: *tempest-irrelevant-files
+        - tempest-multinode-full-ubuntu-jammy:
+            irrelevant-files: *tempest-irrelevant-files
+        - tempest-extra-tests-ubuntu-jammy:
+            irrelevant-files: *tempest-irrelevant-files
     experimental:
       jobs:
         - nova-multi-cell
diff --git a/zuul.d/stable-jobs.yaml b/zuul.d/stable-jobs.yaml
index efa771e..5785ec6 100644
--- a/zuul.d/stable-jobs.yaml
+++ b/zuul.d/stable-jobs.yaml
@@ -90,6 +90,26 @@
 - job:
     name: tempest-multinode-full-py3
     parent: tempest-multinode-full
+    nodeset: openstack-two-node-jammy
+    # This job runs on Jammy and supposed to run until 2024.2.
+    branches:
+      - ^.*/2023.1
+      - ^.*/2023.2
+      - ^.*/2024.1
+      - ^.*/2024.2
+    vars:
+      devstack_plugins:
+        neutron: https://opendev.org/openstack/neutron
+      devstack_services:
+        neutron-trunk: true
+    group-vars:
+      subnode:
+        devstack_localrc:
+          USE_PYTHON3: true
+
+- job:
+    name: tempest-multinode-full-py3
+    parent: tempest-multinode-full
     nodeset: openstack-two-node-focal
     # This job runs on Focal and supposed to run until unmaintained/zed.
     branches:
diff --git a/zuul.d/tempest-specific.yaml b/zuul.d/tempest-specific.yaml
index 296682e..deb4157 100644
--- a/zuul.d/tempest-specific.yaml
+++ b/zuul.d/tempest-specific.yaml
@@ -76,7 +76,7 @@
     parent: tox
     description: |
       Run tempest plugin sanity check script using tox.
-    nodeset: ubuntu-jammy
+    nodeset: ubuntu-noble
     vars:
       tox_envlist: plugin-sanity-check
     timeout: 5000