This time, it is happened on different nodes. check the CMD running output below, And ovs log attached. [wrsroot@controller-0 tmp(keystone_admin)]$ system host-list +----+--------------+-------------+----------------+-------------+--------------+ | id | hostname | personality | administrative | operational | availability | +----+--------------+-------------+----------------+-------------+--------------+ | 1 | controller-0 | controller | unlocked | enabled | available | | 2 | controller-1 | controller | unlocked | enabled | available | | 3 | storage-0 | storage | unlocked | enabled | available | | 4 | storage-1 | storage | unlocked | enabled | available | | 5 | compute-0 | worker | unlocked | enabled | available | | 6 | compute-1 | worker | unlocked | enabled | available | | 7 | compute-2 | worker | unlocked | disabled | offline | | 8 | compute-3 | worker | unlocked | enabled | available | | 9 | compute-4 | worker | unlocked | enabled | available | +----+--------------+-------------+----------------+-------------+--------------+ [wrsroot@controller-0 tmp(keystone_admin)]$ on compute-2: compute-2:~$ sudo find /sys -name "free_hugepages*" Password: /sys/devices/system/node/node0/hugepages/hugepages-1048576kB/free_hugepages /sys/devices/system/node/node0/hugepages/hugepages-2048kB/free_hugepages /sys/devices/system/node/node1/hugepages/hugepages-1048576kB/free_hugepages /sys/devices/system/node/node1/hugepages/hugepages-2048kB/free_hugepages /sys/kernel/mm/hugepages/hugepages-1048576kB/free_hugepages /sys/kernel/mm/hugepages/hugepages-2048kB/free_hugepages compute-2:~$ sudo find /sys -name "free_hugepages" | xargs -L1 grep -E "^" 0 0 0 0 0 0 compute-2:~$ mount | grep -i huge cgroup on /sys/fs/cgroup/hugetlb type cgroup (rw,nosuid,nodev,noexec,relatime,hugetlb) none on /dev/huge-1048576kB type hugetlbfs (rw,relatime,pagesize=1048576kB) none on /mnt/huge-2048kB type hugetlbfs (rw,relatime,pagesize=2048kB) none on /dev/huge-2048kB type hugetlbfs (rw,relatime,pagesize=2048kB) none on /dev/hugepages type hugetlbfs (rw,relatime,pagesize=2M) none on /mnt/huge-1048576kB type hugetlbfs (rw,relatime,pagesize=1048576kB) compute-2:~$ sudo echo 3 > sudo tee /sys/devices/system/node/node0/hugepages/hugepages-1048576kB/free_hugepages compute-2:~$ sudo echo 3 > sudo tee /sys/devices/system/node/node1/hugepages/hugepages-1048576kB/free_hugepages compute-2:~$ sudo echo 5000 > sudo tee /sys/devices/system/node/node0/hugepages/hugepages-2048kB/free_hugepages compute-2:~$ sudo echo 5000 > sudo tee /sys/devices/system/node/node1/hugepages/hugepages-2048kB/free_hugepages compute-2:~$ sudo cat /sys/devices/system/node/node0/hugepages/hugepages-1048576kB/free_hugepages 0 compute-2:~$ sudo cat /sys/devices/system/node/node1/hugepages/hugepages-1048576kB/free_hugepages 0 compute-2:~$ sudo cat /sys/devices/system/node/node0/hugepages/hugepages-2048kB/free_hugepages 0 compute-2:~$ sudo cat /sys/devices/system/node/node1/hugepages/hugepages-2048kB/free_hugepages 0 compute-2:~$ systemctl status ovsdb-server ● ovsdb-server.service - Open vSwitch Database Unit Loaded: loaded (/usr/lib/systemd/system/ovsdb-server.service; static; vendor preset: disabled) Active: active (running) since Tue 2019-05-21 13:41:38 UTC; 37min ago Process: 36956 ExecStop=/usr/share/openvswitch/scripts/ovs-ctl --no-ovs-vswitchd stop (code=exited, status=0/SUCCESS) Process: 36985 ExecStart=/usr/share/openvswitch/scripts/ovs-ctl --no-ovs-vswitchd --no-monitor --system-id=random ${OVSUSER} start $OPTIONS (code=exited, status=0/SUCCESS) Process: 36980 ExecStartPre=/bin/sh -c rm -f /run/openvswitch/useropts; if [ "$${OVS_USER_ID/:*/}" != "root" ]; then /usr/bin/echo "OVSUSER=--ovs-user=${OVS_USER_ID}" > /run/openvswitch/useropts; fi (code=exited, status=0/SUCCESS) Process: 36978 ExecStartPre=/usr/bin/chown ${OVS_USER_ID} /var/run/openvswitch /var/log/openvswitch (code=exited, status=0/SUCCESS) Main PID: 37021 (ovsdb-server) Tasks: 1 Memory: 1.2M CGroup: /system.slice/ovsdb-server.service └─37021 ovsdb-server /etc/openvswitch/conf.db -vconsole:emer -vsyslog:err -vfile:info --remote=punix:/var/run/openvswitch/db.sock --private-key=db:Open_vSwitch,SSL,private_key --certificate=d... compute-2:~$ systemctl status ovs-vswitchd ● ovs-vswitchd.service - Open vSwitch Forwarding Unit Loaded: loaded (/usr/lib/systemd/system/ovs-vswitchd.service; static; vendor preset: disabled) Active: failed (Result: start-limit) since Tue 2019-05-21 13:41:43 UTC; 38min ago Process: 36931 ExecStop=/usr/share/openvswitch/scripts/ovs-ctl --no-ovsdb-server stop (code=exited, status=0/SUCCESS) Process: 37401 ExecStart=/usr/share/openvswitch/scripts/ovs-ctl --no-ovsdb-server --no-monitor --system-id=random ${OVSUSER} start $OPTIONS (code=exited, status=1/FAILURE) Main PID: 36710 (code=dumped, signal=ABRT) compute-2:~$ sudo systemctl restart ovs-vswitchd Job for ovs-vswitchd.service failed because the control process exited with error code. See "systemctl status ovs-vswitchd.service" and "journalctl -xe" for details. compute-2:~$ systemctl status ovs-vswitchd ● ovs-vswitchd.service - Open vSwitch Forwarding Unit Loaded: loaded (/usr/lib/systemd/system/ovs-vswitchd.service; static; vendor preset: disabled) Active: failed (Result: start-limit) since Tue 2019-05-21 14:20:08 UTC; 32s ago Process: 36931 ExecStop=/usr/share/openvswitch/scripts/ovs-ctl --no-ovsdb-server stop (code=exited, status=0/SUCCESS) Process: 61311 ExecStart=/usr/share/openvswitch/scripts/ovs-ctl --no-ovsdb-server --no-monitor --system-id=random ${OVSUSER} start $OPTIONS (code=exited, status=1/FAILURE) Main PID: 36710 (code=dumped, signal=ABRT)