If tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_lock_unlock_server test case fails, the server is not deleted by tearDown:
[root@vm-tempest tempest]# ./run_tempest.sh id-80a8094c-211e-440a-ab88-9e59d556c7ee
running=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \
OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \
OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-500} \
OS_TEST_LOCK_PATH=${OS_TEST_LOCK_PATH:-${TMPDIR:-'/tmp'}} \
${PYTHON:-python} -m subunit.run discover -t ${OS_TOP_LEVEL:-./} ${OS_TEST_PATH:-./tempest/test_discover} --list
running=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \
OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \
OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-500} \
OS_TEST_LOCK_PATH=${OS_TEST_LOCK_PATH:-${TMPDIR:-'/tmp'}} \
${PYTHON:-python} -m subunit.run discover -t ${OS_TOP_LEVEL:-./} ${OS_TEST_PATH:-./tempest/test_discover} --load-list /tmp/tmp8tQqxV
2015-06-25 08:25:34.296 29055 ERROR tempest.api.compute.base [-] Request timed out
Details: (ServerActionsTestJSON:tearDown) Server ca76589a-ce88-459a-a387-e8bfda54fa39 failed to reach ACTIVE status and task state "None" within the required time (180 s). Current status: SHUTOFF. Current task state: None.
2015-06-25 08:25:34.296 29055 ERROR tempest.api.compute.base Traceback (most recent call last):
2015-06-25 08:25:34.296 29055 ERROR tempest.api.compute.base File "/opt/tempest/tempest/api/compute/base.py", line 145, in server_check_teardown
2015-06-25 08:25:34.296 29055 ERROR tempest.api.compute.base 'ACTIVE')
2015-06-25 08:25:34.296 29055 ERROR tempest.api.compute.base File "/opt/tempest/tempest/services/compute/json/servers_client.py", line 182, in wait_for_server_status
2015-06-25 08:25:34.296 29055 ERROR tempest.api.compute.base ready_wait=ready_wait)
2015-06-25 08:25:34.296 29055 ERROR tempest.api.compute.base File "/opt/tempest/tempest/common/waiters.py", line 94, in wait_for_server_status
2015-06-25 08:25:34.296 29055 ERROR tempest.api.compute.base raise exceptions.TimeoutException(message)
2015-06-25 08:25:34.296 29055 ERROR tempest.api.compute.base TimeoutException: Request timed out
2015-06-25 08:25:34.296 29055 ERROR tempest.api.compute.base Details: (ServerActionsTestJSON:tearDown) Server ca76589a-ce88-459a-a387-e8bfda54fa39 failed to reach ACTIVE status and task state "None" within the required time (180 s). Current status: SHUTOFF. Current task state: None.
2015-06-25 08:25:34.296 29055 ERROR tempest.api.compute.base
tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON
test_lock_unlock_server[id-80a8094c-211e-440a-ab88-9e59d556c7ee] FAIL
2015-06-25 08:25:34.703 29055 ERROR tempest.api.compute.base [-] Deleting server ca76589a-ce88-459a-a387-e8bfda54fa39 failed
2015-06-25 08:25:34.703 29055 ERROR tempest.api.compute.base Traceback (most recent call last):
2015-06-25 08:25:34.703 29055 ERROR tempest.api.compute.base File "/opt/tempest/tempest/api/compute/base.py", line 118, in clear_servers
2015-06-25 08:25:34.703 29055 ERROR tempest.api.compute.base cls.servers_client.delete_server(server['id'])
2015-06-25 08:25:34.703 29055 ERROR tempest.api.compute.base File "/opt/tempest/tempest/services/compute/json/servers_client.py", line 148, in delete_server
2015-06-25 08:25:34.703 29055 ERROR tempest.api.compute.base resp, body = self.delete("servers/%s" % str(server_id))
2015-06-25 08:25:34.703 29055 ERROR tempest.api.compute.base File "/opt/tempest/.venv/lib/python2.7/site-packages/tempest_lib/common/rest_client.py", line 287, in delete
2015-06-25 08:25:34.703 29055 ERROR tempest.api.compute.base return self.request('DELETE', url, extra_headers, headers, body)
2015-06-25 08:25:34.703 29055 ERROR tempest.api.compute.base File "/opt/tempest/.venv/lib/python2.7/site-packages/tempest_lib/common/rest_client.py", line 636, in request
2015-06-25 08:25:34.703 29055 ERROR tempest.api.compute.base resp, resp_body)
2015-06-25 08:25:34.703 29055 ERROR tempest.api.compute.base File "/opt/tempest/.venv/lib/python2.7/site-packages/tempest_lib/common/rest_client.py", line 698, in _error_checker
2015-06-25 08:25:34.703 29055 ERROR tempest.api.compute.base raise exceptions.Conflict(resp_body)
2015-06-25 08:25:34.703 29055 ERROR tempest.api.compute.base Conflict: An object with that identifier already exists
2015-06-25 08:25:34.703 29055 ERROR tempest.api.compute.base Details: {u'message': u'Instance ca76589a-ce88-459a-a387-e8bfda54fa39 is locked', u'code': 409}
2015-06-25 08:25:34.703 29055 ERROR tempest.api.compute.base
2015-06-25 08:28:35.593 29055 ERROR tempest.api.compute.base [-] Waiting for deletion of server ca76589a-ce88-459a-a387-e8bfda54fa39 failed
2015-06-25 08:28:35.593 29055 ERROR tempest.api.compute.base Traceback (most recent call last):
2015-06-25 08:28:35.593 29055 ERROR tempest.api.compute.base File "/opt/tempest/tempest/api/compute/base.py", line 128, in clear_servers
2015-06-25 08:28:35.593 29055 ERROR tempest.api.compute.base cls.servers_client.wait_for_server_termination(server['id'])
2015-06-25 08:28:35.593 29055 ERROR tempest.api.compute.base File "/opt/tempest/tempest/services/compute/json/servers_client.py", line 198, in wait_for_server_termination
2015-06-25 08:28:35.593 29055 ERROR tempest.api.compute.base raise exceptions.TimeoutException
2015-06-25 08:28:35.593 29055 ERROR tempest.api.compute.base TimeoutException: Request timed out
2015-06-25 08:28:35.593 29055 ERROR tempest.api.compute.base
Slowest 1 tests took 183.32 secs:
tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON
test_lock_unlock_server[id-80a8094c-211e-440a-ab88-9e59d556c7ee] 183.32
======================================================================
FAIL: tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_lock_unlock_server[id-80a8094c-211e-440a-ab88-9e59d556c7ee]
----------------------------------------------------------------------
Traceback (most recent call last):
testtools.testresult.real._StringException: Empty attachments:
stderr
stdout
pythonlogging:'': {{{
2015-06-25 08:25:34,296 29055 ERROR [tempest.api.compute.base] Request timed out
Details: (ServerActionsTestJSON:tearDown) Server ca76589a-ce88-459a-a387-e8bfda54fa39 failed to reach ACTIVE status and task state "None" within the required time (180 s). Current status: SHUTOFF. Current task state: None.
2015-06-25 08:25:34.296 29055 ERROR tempest.api.compute.base Traceback (most recent call last):
2015-06-25 08:25:34.296 29055 ERROR tempest.api.compute.base File "/opt/tempest/tempest/api/compute/base.py", line 145, in server_check_teardown
2015-06-25 08:25:34.296 29055 ERROR tempest.api.compute.base 'ACTIVE')
2015-06-25 08:25:34.296 29055 ERROR tempest.api.compute.base File "/opt/tempest/tempest/services/compute/json/servers_client.py", line 182, in wait_for_server_status
2015-06-25 08:25:34.296 29055 ERROR tempest.api.compute.base ready_wait=ready_wait)
2015-06-25 08:25:34.296 29055 ERROR tempest.api.compute.base File "/opt/tempest/tempest/common/waiters.py", line 94, in wait_for_server_status
2015-06-25 08:25:34.296 29055 ERROR tempest.api.compute.base raise exceptions.TimeoutException(message)
2015-06-25 08:25:34.296 29055 ERROR tempest.api.compute.base TimeoutException: Request timed out
2015-06-25 08:25:34.296 29055 ERROR tempest.api.compute.base Details: (ServerActionsTestJSON:tearDown) Server ca76589a-ce88-459a-a387-e8bfda54fa39 failed to reach ACTIVE status and task state "None" within the required time (180 s). Current status: SHUTOFF. Current task state: None.
2015-06-25 08:25:34.296 29055 ERROR tempest.api.compute.base
}}}
traceback-1: {{{
Traceback (most recent call last):
File "/opt/tempest/tempest/api/compute/servers/test_server_actions.py", line 54, in tearDown
self.server_check_teardown()
File "/opt/tempest/tempest/api/compute/base.py", line 148, in server_check_teardown
cls.servers_client.delete_server(cls.server_id)
File "/opt/tempest/tempest/services/compute/json/servers_client.py", line 148, in delete_server
resp, body = self.delete("servers/%s" % str(server_id))
File "/opt/tempest/.venv/lib/python2.7/site-packages/tempest_lib/common/rest_client.py", line 287, in delete
return self.request('DELETE', url, extra_headers, headers, body)
File "/opt/tempest/.venv/lib/python2.7/site-packages/tempest_lib/common/rest_client.py", line 636, in request
resp, resp_body)
File "/opt/tempest/.venv/lib/python2.7/site-packages/tempest_lib/common/rest_client.py", line 698, in _error_checker
raise exceptions.Conflict(resp_body)
tempest_lib.exceptions.Conflict: An object with that identifier already exists
Details: {u'message': u'Instance ca76589a-ce88-459a-a387-e8bfda54fa39 is locked', u'code': 409}
}}}
Traceback (most recent call last):
File "/opt/tempest/tempest/api/compute/servers/test_server_actions.py", line 457, in test_lock_unlock_server
self.servers_client.stop, self.server_id)
File "/opt/tempest/.venv/lib/python2.7/site-packages/testtools/testcase.py", line 422, in assertRaises
self.assertThat(our_callable, matcher)
File "/opt/tempest/.venv/lib/python2.7/site-packages/testtools/testcase.py", line 435, in assertThat
raise mismatch_error
testtools.matchers._impl.MismatchError: <bound method ServersClientJSON.stop of <tempest.services.compute.json.servers_client.ServersClientJSON object at 0x4785450>> returned {}
Ran 1 test in 382.704s
FAILED (failures=1)
[root@vm-tempest tempest]#
[root@controller-0 wrsroot(keystone_admin)]# nova list --all-tenants
+--------------------------------------+-------------------------------------------+---------+------------+-------------+---------------------------+
| ID | Name | Status | Task State | Power State | Networks |
+--------------------------------------+-------------------------------------------+---------+------------+-------------+---------------------------+
| ca76589a-ce88-459a-a387-e8bfda54fa39 | tempest.common.compute-instance-957176313 | SHUTOFF | - | Shutdown | vm-mgmt-net0=192.168.1.32 |
+--------------------------------------+-------------------------------------------+---------+------------+-------------+---------------------------+
[root@controller-0 wrsroot(keystone_admin)]#
Possible correction is to add addCleanup for unlocking the server.
Yes, if server gets stuck in locked state then teardown would not be able to delete the server.