ansible-playbook [core 2.12.6] config file = /etc/ansible/ansible.cfg configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/lib/python3.9/site-packages/ansible ansible collection location = /tmp/tmpu1heti3n executable location = /usr/bin/ansible-playbook python version = 3.9.13 (main, May 18 2022, 00:00:00) [GCC 11.3.1 20220421 (Red Hat 11.3.1-2)] jinja version = 2.11.3 libyaml = True Using /etc/ansible/ansible.cfg as config file Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: centos-8_setup.yml *************************************************** 1 plays in /cache/centos-8_setup.yml PLAY [Enable HA repos] ********************************************************* META: ran handlers TASK [Enable HA repos] ********************************************************* task path: /cache/centos-8_setup.yml:5 Thursday 21 July 2022 10:06:03 +0000 (0:00:00.019) 0:00:00.019 ********* changed: [/cache/centos-8.qcow2] => { "ansible_facts": { "discovered_interpreter_python": "/usr/libexec/platform-python" }, "changed": true, "cmd": [ "dnf", "config-manager", "--set-enabled", "ha" ], "delta": "0:00:00.376320", "end": "2022-07-21 10:06:04.162669", "rc": 0, "start": "2022-07-21 10:06:03.786349" } META: ran handlers META: ran handlers PLAY RECAP ********************************************************************* /cache/centos-8.qcow2 : ok=1 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 Thursday 21 July 2022 10:06:04 +0000 (0:00:00.981) 0:00:01.001 ********* =============================================================================== Enable HA repos --------------------------------------------------------- 0.98s /cache/centos-8_setup.yml:5 --------------------------------------------------- PLAYBOOK: tests_create_thinp_then_remove_nvme_generated.yml ******************** 2 plays in /tmp/tmpa3egnbq5/tests/tests_create_thinp_then_remove_nvme_generated.yml PLAY [all] ********************************************************************* TASK [Gathering Facts] ********************************************************* task path: /tmp/tmpa3egnbq5/tests/tests_create_thinp_then_remove_nvme_generated.yml:3 Thursday 21 July 2022 10:06:04 +0000 (0:00:00.017) 0:00:01.018 ********* ok: [/cache/centos-8.qcow2] META: ran handlers TASK [set disk interface for test] ********************************************* task path: /tmp/tmpa3egnbq5/tests/tests_create_thinp_then_remove_nvme_generated.yml:7 Thursday 21 July 2022 10:06:05 +0000 (0:00:01.108) 0:00:02.126 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_use_interface": "nvme" }, "changed": false } META: ran handlers META: ran handlers PLAY [all] ********************************************************************* TASK [Gathering Facts] ********************************************************* task path: /tmp/tmpa3egnbq5/tests/tests_create_thinp_then_remove.yml:2 Thursday 21 July 2022 10:06:05 +0000 (0:00:00.044) 0:00:02.171 ********* ok: [/cache/centos-8.qcow2] META: ran handlers TASK [include_role : linux-system-roles.storage] ******************************* task path: /tmp/tmpa3egnbq5/tests/tests_create_thinp_then_remove.yml:14 Thursday 21 July 2022 10:06:06 +0000 (0:00:00.854) 0:00:03.026 ********* TASK [linux-system-roles.storage : set platform/version specific variables] **** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Thursday 21 July 2022 10:06:06 +0000 (0:00:00.034) 0:00:03.060 ********* included: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for /cache/centos-8.qcow2 TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Thursday 21 July 2022 10:06:06 +0000 (0:00:00.030) 0:00:03.091 ********* ok: [/cache/centos-8.qcow2] TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Thursday 21 July 2022 10:06:07 +0000 (0:00:00.906) 0:00:03.998 ********* skipping: [/cache/centos-8.qcow2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [linux-system-roles.storage : define an empty list of pools to be used in testing] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Thursday 21 July 2022 10:06:07 +0000 (0:00:00.064) 0:00:04.063 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : define an empty list of volumes to be used in testing] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Thursday 21 July 2022 10:06:07 +0000 (0:00:00.029) 0:00:04.093 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : include the appropriate provider tasks] ***** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Thursday 21 July 2022 10:06:07 +0000 (0:00:00.030) 0:00:04.123 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for /cache/centos-8.qcow2 TASK [linux-system-roles.storage : get a list of rpm packages installed on host machine] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Thursday 21 July 2022 10:06:07 +0000 (0:00:00.046) 0:00:04.169 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : make sure blivet is available] ************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:7 Thursday 21 July 2022 10:06:07 +0000 (0:00:00.017) 0:00:04.187 ********* changed: [/cache/centos-8.qcow2] => { "changed": true, "rc": 0, "results": [ "Installed: ndctl-71.1-4.el8.x86_64", "Installed: libblockdev-loop-2.24-8.el8.x86_64", "Installed: ndctl-libs-71.1-4.el8.x86_64", "Installed: libblockdev-lvm-2.24-8.el8.x86_64", "Installed: lvm2-libs-8:2.03.14-2.el8.x86_64", "Installed: device-mapper-multipath-libs-0.8.4-20.el8.x86_64", "Installed: nss-softokn-freebl-3.79.0-5.el8.x86_64", "Installed: kernel-modules-4.18.0-408.el8.x86_64", "Installed: daxctl-libs-71.1-4.el8.x86_64", "Installed: libblockdev-mdraid-2.24-8.el8.x86_64", "Installed: libblockdev-mpath-2.24-8.el8.x86_64", "Installed: device-mapper-persistent-data-0.9.0-7.el8.x86_64", "Installed: nss-sysinit-3.79.0-5.el8.x86_64", "Installed: libblockdev-nvdimm-2.24-8.el8.x86_64", "Installed: userspace-rcu-0.10.1-4.el8.x86_64", "Installed: libblockdev-part-2.24-8.el8.x86_64", "Installed: nss-util-3.79.0-5.el8.x86_64", "Installed: libblockdev-swap-2.24-8.el8.x86_64", "Installed: vdo-6.2.6.14-14.el8.x86_64", "Installed: libblockdev-utils-2.24-8.el8.x86_64", "Installed: linux-firmware-20220713-109.gitdfa29317.el8.noarch", "Installed: nspr-4.34.0-3.el8.x86_64", "Installed: mdadm-4.2-3.el8.x86_64", "Installed: python3-pyparted-1:3.11.7-4.el8.x86_64", "Installed: device-mapper-event-8:1.02.181-2.el8.x86_64", "Installed: libbytesize-1.4-3.el8.x86_64", "Installed: libblockdev-2.24-8.el8.x86_64", "Installed: nss-3.79.0-5.el8.x86_64", "Installed: python3-blivet-1:3.4.0-12.el8.noarch", "Installed: kernel-core-4.18.0-408.el8.x86_64", "Installed: libblockdev-crypto-2.24-8.el8.x86_64", "Installed: device-mapper-event-libs-8:1.02.181-2.el8.x86_64", "Installed: lsof-4.93.2-1.el8.x86_64", "Installed: libblockdev-dm-2.24-8.el8.x86_64", "Installed: python3-blockdev-2.24-8.el8.x86_64", "Installed: volume_key-libs-0.3.11-5.el8.x86_64", "Installed: blivet-data-1:3.4.0-12.el8.noarch", "Installed: python3-bytesize-1.4-3.el8.x86_64", "Installed: lvm2-8:2.03.14-2.el8.x86_64", "Installed: libblockdev-fs-2.24-8.el8.x86_64", "Installed: kmod-kvdo-6.2.6.14-84.el8.x86_64", "Installed: libblockdev-kbd-2.24-8.el8.x86_64", "Installed: nss-softokn-3.79.0-5.el8.x86_64", "Installed: libaio-0.3.112-1.el8.x86_64", "Installed: device-mapper-multipath-0.8.4-20.el8.x86_64" ] } TASK [linux-system-roles.storage : show storage_pools] ************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:14 Thursday 21 July 2022 10:08:37 +0000 (0:02:30.355) 0:02:34.542 ********* ok: [/cache/centos-8.qcow2] => { "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined" } TASK [linux-system-roles.storage : show storage_volumes] *********************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:19 Thursday 21 July 2022 10:08:37 +0000 (0:00:00.034) 0:02:34.577 ********* ok: [/cache/centos-8.qcow2] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : get required packages] ********************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:24 Thursday 21 July 2022 10:08:37 +0000 (0:00:00.044) 0:02:34.621 ********* ok: [/cache/centos-8.qcow2] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : enable copr repositories if needed] ********* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:37 Thursday 21 July 2022 10:08:38 +0000 (0:00:00.736) 0:02:35.358 ********* included: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for /cache/centos-8.qcow2 TASK [linux-system-roles.storage : check if the COPR support packages should be installed] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Thursday 21 July 2022 10:08:38 +0000 (0:00:00.041) 0:02:35.400 ********* TASK [linux-system-roles.storage : make sure COPR support packages are present] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Thursday 21 July 2022 10:08:38 +0000 (0:00:00.031) 0:02:35.431 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : enable COPRs] ******************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:18 Thursday 21 July 2022 10:08:38 +0000 (0:00:00.035) 0:02:35.467 ********* TASK [linux-system-roles.storage : make sure required packages are installed] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:44 Thursday 21 July 2022 10:08:38 +0000 (0:00:00.031) 0:02:35.498 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [linux-system-roles.storage : get service facts] ************************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:51 Thursday 21 July 2022 10:08:40 +0000 (0:00:01.932) 0:02:37.431 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cockpit-motd.service": { "name": "cockpit-motd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-http.service": { "name": "cockpit-wsinstance-http.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-https-factory@.service": { "name": "cockpit-wsinstance-https-factory@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit-wsinstance-https@.service": { "name": "cockpit-wsinstance-https@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit.service": { "name": "cockpit.service", "source": "systemd", "state": "inactive", "status": "static" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "packagekit-offline-update.service": { "name": "packagekit-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "packagekit.service": { "name": "packagekit.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "rngd-wake-threshold.service": { "name": "rngd-wake-threshold.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "run-r01e426a27b1a47e0af6cce35281644cc.service": { "name": "run-r01e426a27b1a47e0af6cce35281644cc.service", "source": "systemd", "state": "running", "status": "transient" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "vdo-start-by-dev@.service": { "name": "vdo-start-by-dev@.service", "source": "systemd", "state": "unknown", "status": "static" }, "vdo.service": { "name": "vdo.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:58 Thursday 21 July 2022 10:08:42 +0000 (0:00:01.758) 0:02:39.190 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:71 Thursday 21 July 2022 10:08:42 +0000 (0:00:00.067) 0:02:39.258 ********* TASK [linux-system-roles.storage : manage the pools and volumes to match the specified state] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:77 Thursday 21 July 2022 10:08:42 +0000 (0:00:00.026) 0:02:39.284 ********* ok: [/cache/centos-8.qcow2] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:91 Thursday 21 July 2022 10:08:43 +0000 (0:00:00.551) 0:02:39.836 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:103 Thursday 21 July 2022 10:08:43 +0000 (0:00:00.039) 0:02:39.875 ********* TASK [linux-system-roles.storage : show blivet_output] ************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:109 Thursday 21 July 2022 10:08:43 +0000 (0:00:00.020) 0:02:39.896 ********* ok: [/cache/centos-8.qcow2] => { "blivet_output": { "actions": [], "changed": false, "crypts": [], "failed": false, "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } } TASK [linux-system-roles.storage : set the list of pools for test verification] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:114 Thursday 21 July 2022 10:08:43 +0000 (0:00:00.033) 0:02:39.929 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : set the list of volumes for test verification] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:118 Thursday 21 July 2022 10:08:43 +0000 (0:00:00.033) 0:02:39.963 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : remove obsolete mounts] ********************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:134 Thursday 21 July 2022 10:08:43 +0000 (0:00:00.076) 0:02:40.039 ********* TASK [linux-system-roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:146 Thursday 21 July 2022 10:08:43 +0000 (0:00:00.037) 0:02:40.076 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : set up new/current mounts] ****************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:151 Thursday 21 July 2022 10:08:43 +0000 (0:00:00.022) 0:02:40.099 ********* TASK [linux-system-roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:163 Thursday 21 July 2022 10:08:43 +0000 (0:00:00.034) 0:02:40.134 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : retrieve facts for the /etc/crypttab file] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:171 Thursday 21 July 2022 10:08:43 +0000 (0:00:00.023) 0:02:40.157 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "atime": 1658398003.7492578, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1643141385.117, "dev": 64513, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 135, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1643141019.537, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "3147672035", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : manage /etc/crypttab to account for changes we just made] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:176 Thursday 21 July 2022 10:08:43 +0000 (0:00:00.520) 0:02:40.677 ********* TASK [linux-system-roles.storage : Update facts] ******************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:198 Thursday 21 July 2022 10:08:43 +0000 (0:00:00.020) 0:02:40.698 ********* ok: [/cache/centos-8.qcow2] META: role_complete for /cache/centos-8.qcow2 TASK [include_tasks] *********************************************************** task path: /tmp/tmpa3egnbq5/tests/tests_create_thinp_then_remove.yml:17 Thursday 21 July 2022 10:08:44 +0000 (0:00:00.977) 0:02:41.676 ********* included: /tmp/tmpa3egnbq5/tests/get_unused_disk.yml for /cache/centos-8.qcow2 TASK [Find unused disks in the system] ***************************************** task path: /tmp/tmpa3egnbq5/tests/get_unused_disk.yml:2 Thursday 21 July 2022 10:08:45 +0000 (0:00:00.035) 0:02:41.711 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ] } TASK [Set unused_disks if necessary] ******************************************* task path: /tmp/tmpa3egnbq5/tests/get_unused_disk.yml:9 Thursday 21 July 2022 10:08:45 +0000 (0:00:00.504) 0:02:42.216 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "unused_disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ] }, "changed": false } TASK [Exit playbook when there's not enough unused disks in the system] ******** task path: /tmp/tmpa3egnbq5/tests/get_unused_disk.yml:14 Thursday 21 July 2022 10:08:45 +0000 (0:00:00.036) 0:02:42.253 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print unused disks] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/get_unused_disk.yml:19 Thursday 21 July 2022 10:08:45 +0000 (0:00:00.036) 0:02:42.289 ********* ok: [/cache/centos-8.qcow2] => { "unused_disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ] } TASK [Create a thinpool device] ************************************************ task path: /tmp/tmpa3egnbq5/tests/tests_create_thinp_then_remove.yml:21 Thursday 21 July 2022 10:08:45 +0000 (0:00:00.033) 0:02:42.323 ********* TASK [linux-system-roles.storage : set platform/version specific variables] **** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Thursday 21 July 2022 10:08:45 +0000 (0:00:00.041) 0:02:42.364 ********* included: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for /cache/centos-8.qcow2 TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Thursday 21 July 2022 10:08:45 +0000 (0:00:00.032) 0:02:42.397 ********* ok: [/cache/centos-8.qcow2] TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Thursday 21 July 2022 10:08:46 +0000 (0:00:00.512) 0:02:42.910 ********* skipping: [/cache/centos-8.qcow2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [linux-system-roles.storage : define an empty list of pools to be used in testing] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Thursday 21 July 2022 10:08:46 +0000 (0:00:00.071) 0:02:42.981 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : define an empty list of volumes to be used in testing] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Thursday 21 July 2022 10:08:46 +0000 (0:00:00.032) 0:02:43.014 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : include the appropriate provider tasks] ***** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Thursday 21 July 2022 10:08:46 +0000 (0:00:00.034) 0:02:43.048 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for /cache/centos-8.qcow2 TASK [linux-system-roles.storage : get a list of rpm packages installed on host machine] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Thursday 21 July 2022 10:08:46 +0000 (0:00:00.046) 0:02:43.095 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : make sure blivet is available] ************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:7 Thursday 21 July 2022 10:08:46 +0000 (0:00:00.020) 0:02:43.115 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [linux-system-roles.storage : show storage_pools] ************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:14 Thursday 21 July 2022 10:08:48 +0000 (0:00:01.923) 0:02:45.039 ********* ok: [/cache/centos-8.qcow2] => { "storage_pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "name": "vg1", "state": "present", "type": "lvm", "volumes": [ { "mount_point": "/opt/test1", "name": "lv1", "size": "3g", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g" } ] } ] } TASK [linux-system-roles.storage : show storage_volumes] *********************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:19 Thursday 21 July 2022 10:08:48 +0000 (0:00:00.098) 0:02:45.138 ********* ok: [/cache/centos-8.qcow2] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : get required packages] ********************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:24 Thursday 21 July 2022 10:08:48 +0000 (0:00:00.036) 0:02:45.174 ********* ok: [/cache/centos-8.qcow2] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "lvm2" ], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : enable copr repositories if needed] ********* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:37 Thursday 21 July 2022 10:08:49 +0000 (0:00:00.997) 0:02:46.172 ********* included: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for /cache/centos-8.qcow2 TASK [linux-system-roles.storage : check if the COPR support packages should be installed] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Thursday 21 July 2022 10:08:49 +0000 (0:00:00.045) 0:02:46.218 ********* TASK [linux-system-roles.storage : make sure COPR support packages are present] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Thursday 21 July 2022 10:08:49 +0000 (0:00:00.035) 0:02:46.253 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : enable COPRs] ******************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:18 Thursday 21 July 2022 10:08:49 +0000 (0:00:00.039) 0:02:46.293 ********* TASK [linux-system-roles.storage : make sure required packages are installed] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:44 Thursday 21 July 2022 10:08:49 +0000 (0:00:00.035) 0:02:46.328 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [linux-system-roles.storage : get service facts] ************************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:51 Thursday 21 July 2022 10:08:51 +0000 (0:00:01.842) 0:02:48.171 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cockpit-motd.service": { "name": "cockpit-motd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-http.service": { "name": "cockpit-wsinstance-http.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-https-factory@.service": { "name": "cockpit-wsinstance-https-factory@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit-wsinstance-https@.service": { "name": "cockpit-wsinstance-https@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit.service": { "name": "cockpit.service", "source": "systemd", "state": "inactive", "status": "static" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "packagekit-offline-update.service": { "name": "packagekit-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "packagekit.service": { "name": "packagekit.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "rngd-wake-threshold.service": { "name": "rngd-wake-threshold.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "vdo-start-by-dev@.service": { "name": "vdo-start-by-dev@.service", "source": "systemd", "state": "unknown", "status": "static" }, "vdo.service": { "name": "vdo.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:58 Thursday 21 July 2022 10:08:53 +0000 (0:00:01.636) 0:02:49.807 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:71 Thursday 21 July 2022 10:08:53 +0000 (0:00:00.057) 0:02:49.865 ********* TASK [linux-system-roles.storage : manage the pools and volumes to match the specified state] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:77 Thursday 21 July 2022 10:08:53 +0000 (0:00:00.022) 0:02:49.887 ********* changed: [/cache/centos-8.qcow2] => { "actions": [ { "action": "create format", "device": "/dev/nvme2n1", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/nvme2n1p1", "fs_type": null }, { "action": "create format", "device": "/dev/nvme2n1p1", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/nvme1n1", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/nvme1n1p1", "fs_type": null }, { "action": "create format", "device": "/dev/nvme1n1p1", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/nvme0n1", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/nvme0n1p1", "fs_type": null }, { "action": "create format", "device": "/dev/nvme0n1p1", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/vg1", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/vg1-tpool1", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/vg1-lv1", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/vg1-lv1", "fs_type": "xfs" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sr0", "/dev/vda1", "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/vdb", "/dev/vdc", "/dev/vdd", "/dev/mapper/vg1-lv1" ], "mounts": [ { "dump": 0, "fstype": "xfs", "opts": "defaults", "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/vg1-lv1", "state": "mounted" } ], "packages": [ "lvm2", "xfsprogs" ], "pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "3g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g", "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:91 Thursday 21 July 2022 10:08:56 +0000 (0:00:03.367) 0:02:53.255 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:103 Thursday 21 July 2022 10:08:56 +0000 (0:00:00.037) 0:02:53.292 ********* TASK [linux-system-roles.storage : show blivet_output] ************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:109 Thursday 21 July 2022 10:08:56 +0000 (0:00:00.022) 0:02:53.315 ********* ok: [/cache/centos-8.qcow2] => { "blivet_output": { "actions": [ { "action": "create format", "device": "/dev/nvme2n1", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/nvme2n1p1", "fs_type": null }, { "action": "create format", "device": "/dev/nvme2n1p1", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/nvme1n1", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/nvme1n1p1", "fs_type": null }, { "action": "create format", "device": "/dev/nvme1n1p1", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/nvme0n1", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/nvme0n1p1", "fs_type": null }, { "action": "create format", "device": "/dev/nvme0n1p1", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/vg1", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/vg1-tpool1", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/vg1-lv1", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/vg1-lv1", "fs_type": "xfs" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sr0", "/dev/vda1", "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/vdb", "/dev/vdc", "/dev/vdd", "/dev/mapper/vg1-lv1" ], "mounts": [ { "dump": 0, "fstype": "xfs", "opts": "defaults", "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/vg1-lv1", "state": "mounted" } ], "packages": [ "lvm2", "xfsprogs" ], "pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "3g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g", "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [linux-system-roles.storage : set the list of pools for test verification] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:114 Thursday 21 July 2022 10:08:56 +0000 (0:00:00.044) 0:02:53.359 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "3g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g", "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [linux-system-roles.storage : set the list of volumes for test verification] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:118 Thursday 21 July 2022 10:08:56 +0000 (0:00:00.071) 0:02:53.431 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : remove obsolete mounts] ********************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:134 Thursday 21 July 2022 10:08:56 +0000 (0:00:00.068) 0:02:53.499 ********* TASK [linux-system-roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:146 Thursday 21 July 2022 10:08:56 +0000 (0:00:00.039) 0:02:53.538 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : set up new/current mounts] ****************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:151 Thursday 21 July 2022 10:08:57 +0000 (0:00:00.933) 0:02:54.472 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount changed: [/cache/centos-8.qcow2] => (item={'src': '/dev/mapper/vg1-lv1', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted'}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "opts": "defaults", "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/vg1-lv1", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/vg1-lv1" } TASK [linux-system-roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:163 Thursday 21 July 2022 10:08:58 +0000 (0:00:00.549) 0:02:55.021 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : retrieve facts for the /etc/crypttab file] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:171 Thursday 21 July 2022 10:08:58 +0000 (0:00:00.648) 0:02:55.670 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "atime": 1658398003.7492578, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1643141385.117, "dev": 64513, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 135, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1643141019.537, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "3147672035", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : manage /etc/crypttab to account for changes we just made] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:176 Thursday 21 July 2022 10:08:59 +0000 (0:00:00.370) 0:02:56.041 ********* TASK [linux-system-roles.storage : Update facts] ******************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:198 Thursday 21 July 2022 10:08:59 +0000 (0:00:00.021) 0:02:56.062 ********* ok: [/cache/centos-8.qcow2] META: role_complete for /cache/centos-8.qcow2 TASK [include_tasks] *********************************************************** task path: /tmp/tmpa3egnbq5/tests/tests_create_thinp_then_remove.yml:38 Thursday 21 July 2022 10:09:00 +0000 (0:00:00.995) 0:02:57.058 ********* included: /tmp/tmpa3egnbq5/tests/verify-role-results.yml for /cache/centos-8.qcow2 TASK [Print out pool information] ********************************************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:1 Thursday 21 July 2022 10:09:00 +0000 (0:00:00.046) 0:02:57.104 ********* ok: [/cache/centos-8.qcow2] => { "_storage_pools_list": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "3g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g", "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:6 Thursday 21 July 2022 10:09:00 +0000 (0:00:00.089) 0:02:57.194 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:14 Thursday 21 July 2022 10:09:00 +0000 (0:00:00.039) 0:02:57.234 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "info": { "/dev/mapper/vg1-lv1": { "fstype": "xfs", "label": "", "name": "/dev/mapper/vg1-lv1", "size": "3G", "type": "lvm", "uuid": "c574fe3b-c0cb-4bb1-85fe-deeef0a957b9" }, "/dev/mapper/vg1-tpool1": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1-tpool": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1-tpool", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1_tdata": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1_tdata", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1_tmeta": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1_tmeta", "size": "12M", "type": "lvm", "uuid": "" }, "/dev/nvme0n1": { "fstype": "", "label": "", "name": "/dev/nvme0n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme0n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme0n1p1", "size": "10G", "type": "partition", "uuid": "xLu0vZ-nH9w-K1e2-A6yJ-EaoZ-FOkB-aLS539" }, "/dev/nvme1n1": { "fstype": "", "label": "", "name": "/dev/nvme1n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme1n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme1n1p1", "size": "10G", "type": "partition", "uuid": "Jjw50w-Whgv-fxb5-adBQ-h5Qy-h8fG-52EFuy" }, "/dev/nvme2n1": { "fstype": "", "label": "", "name": "/dev/nvme2n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme2n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme2n1p1", "size": "10G", "type": "partition", "uuid": "2PzQCY-FE6q-TI2r-KVCh-wUMp-axXy-16asvy" }, "/dev/sda": { "fstype": "", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sr0": { "fstype": "iso9660", "label": "cidata", "name": "/dev/sr0", "size": "364K", "type": "rom", "uuid": "2022-07-21-10-05-42-00" }, "/dev/vda": { "fstype": "", "label": "", "name": "/dev/vda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vda1": { "fstype": "xfs", "label": "", "name": "/dev/vda1", "size": "10G", "type": "partition", "uuid": "395b9844-e404-4857-afbb-c6edccaf72f3" }, "/dev/vdb": { "fstype": "", "label": "", "name": "/dev/vdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vdc": { "fstype": "", "label": "", "name": "/dev/vdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vdd": { "fstype": "", "label": "", "name": "/dev/vdd", "size": "10G", "type": "disk", "uuid": "" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:19 Thursday 21 July 2022 10:09:01 +0000 (0:00:00.539) 0:02:57.773 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002900", "end": "2022-07-21 10:09:01.317882", "rc": 0, "start": "2022-07-21 10:09:01.314982" } STDOUT: # # /etc/fstab # Created by anaconda on Tue Jan 25 20:03:39 2022 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=395b9844-e404-4857-afbb-c6edccaf72f3 / xfs defaults 0 0 /dev/mapper/vg1-lv1 /opt/test1 xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:24 Thursday 21 July 2022 10:09:01 +0000 (0:00:00.384) 0:02:58.157 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002793", "end": "2022-07-21 10:09:01.731131", "failed_when_result": false, "rc": 0, "start": "2022-07-21 10:09:01.728338" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:33 Thursday 21 July 2022 10:09:01 +0000 (0:00:00.412) 0:02:58.569 ********* included: /tmp/tmpa3egnbq5/tests/test-verify-pool.yml for /cache/centos-8.qcow2 => (item={'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'name': 'vg1', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}], 'raid_chunk_size': None}) TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-pool.yml:5 Thursday 21 July 2022 10:09:01 +0000 (0:00:00.097) 0:02:58.666 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [include_tasks] *********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool.yml:18 Thursday 21 July 2022 10:09:01 +0000 (0:00:00.035) 0:02:58.702 ********* included: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml for /cache/centos-8.qcow2 => (item=members) included: /tmp/tmpa3egnbq5/tests/test-verify-pool-volumes.yml for /cache/centos-8.qcow2 => (item=volumes) TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:1 Thursday 21 July 2022 10:09:02 +0000 (0:00:00.049) 0:02:58.751 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_count": "3", "_storage_test_pool_pvs_lvm": [ "/dev/nvme0n1p1", "/dev/nvme1n1p1", "/dev/nvme2n1p1" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:10 Thursday 21 July 2022 10:09:02 +0000 (0:00:00.055) 0:02:58.807 ********* ok: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme0n1p1", "pv": "/dev/nvme0n1p1" } ok: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme1n1p1", "pv": "/dev/nvme1n1p1" } ok: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme2n1p1", "pv": "/dev/nvme2n1p1" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:19 Thursday 21 July 2022 10:09:03 +0000 (0:00:01.188) 0:02:59.996 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "__pvs_lvm_len": "3" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:23 Thursday 21 July 2022 10:09:03 +0000 (0:00:00.050) 0:03:00.047 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/nvme0n1p1", "/dev/nvme1n1p1", "/dev/nvme2n1p1" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:27 Thursday 21 July 2022 10:09:03 +0000 (0:00:00.050) 0:03:00.097 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:34 Thursday 21 July 2022 10:09:03 +0000 (0:00:00.051) 0:03:00.149 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:38 Thursday 21 July 2022 10:09:03 +0000 (0:00:00.039) 0:03:00.188 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_type": "partition" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:42 Thursday 21 July 2022 10:09:03 +0000 (0:00:00.051) 0:03:00.240 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:46 Thursday 21 July 2022 10:09:03 +0000 (0:00:00.024) 0:03:00.265 ********* ok: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme0n1p1" } MSG: All assertions passed ok: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme1n1p1" } MSG: All assertions passed ok: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme2n1p1" } MSG: All assertions passed TASK [Check MD RAID] *********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:56 Thursday 21 July 2022 10:09:03 +0000 (0:00:00.069) 0:03:00.334 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml for /cache/centos-8.qcow2 TASK [get information about RAID] ********************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:6 Thursday 21 July 2022 10:09:03 +0000 (0:00:00.043) 0:03:00.378 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:12 Thursday 21 July 2022 10:09:03 +0000 (0:00:00.025) 0:03:00.403 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:16 Thursday 21 July 2022 10:09:03 +0000 (0:00:00.025) 0:03:00.429 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:20 Thursday 21 July 2022 10:09:03 +0000 (0:00:00.025) 0:03:00.454 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID active devices count] ***************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:24 Thursday 21 July 2022 10:09:03 +0000 (0:00:00.025) 0:03:00.479 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID spare devices count] ****************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:30 Thursday 21 July 2022 10:09:03 +0000 (0:00:00.025) 0:03:00.504 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID metadata version] ********************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:36 Thursday 21 July 2022 10:09:03 +0000 (0:00:00.026) 0:03:00.531 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:44 Thursday 21 July 2022 10:09:03 +0000 (0:00:00.025) 0:03:00.556 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:59 Thursday 21 July 2022 10:09:03 +0000 (0:00:00.069) 0:03:00.625 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-members-lvmraid.yml for /cache/centos-8.qcow2 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-lvmraid.yml:1 Thursday 21 July 2022 10:09:03 +0000 (0:00:00.075) 0:03:00.700 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-member-lvmraid.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}) TASK [Get information about LVM RAID] ****************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-lvmraid.yml:3 Thursday 21 July 2022 10:09:04 +0000 (0:00:00.044) 0:03:00.745 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is LVM RAID] ******************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-lvmraid.yml:8 Thursday 21 July 2022 10:09:04 +0000 (0:00:00.031) 0:03:00.776 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-lvmraid.yml:12 Thursday 21 July 2022 10:09:04 +0000 (0:00:00.030) 0:03:00.806 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:62 Thursday 21 July 2022 10:09:04 +0000 (0:00:00.029) 0:03:00.836 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-members-thin.yml for /cache/centos-8.qcow2 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-thin.yml:1 Thursday 21 July 2022 10:09:04 +0000 (0:00:00.045) 0:03:00.881 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}) TASK [Get information about thinpool] ****************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml:3 Thursday 21 July 2022 10:09:04 +0000 (0:00:00.044) 0:03:00.926 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "lvs", "--noheading", "-o", "pool_lv", "--select", "lv_name=lv1&&segtype=thin", "vg1" ], "delta": "0:00:00.046627", "end": "2022-07-21 10:09:04.508735", "rc": 0, "start": "2022-07-21 10:09:04.462108" } STDOUT: tpool1 TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml:8 Thursday 21 July 2022 10:09:04 +0000 (0:00:00.426) 0:03:01.353 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml:13 Thursday 21 July 2022 10:09:04 +0000 (0:00:00.058) 0:03:01.412 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml:17 Thursday 21 July 2022 10:09:04 +0000 (0:00:00.053) 0:03:01.465 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_lvmraid_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:65 Thursday 21 July 2022 10:09:04 +0000 (0:00:00.042) 0:03:01.508 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml for /cache/centos-8.qcow2 TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml:4 Thursday 21 July 2022 10:09:04 +0000 (0:00:00.048) 0:03:01.556 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml:8 Thursday 21 July 2022 10:09:04 +0000 (0:00:00.048) 0:03:01.605 ********* skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "_storage_test_pool_member_path": "/dev/nvme0n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "_storage_test_pool_member_path": "/dev/nvme1n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "_storage_test_pool_member_path": "/dev/nvme2n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml:15 Thursday 21 July 2022 10:09:04 +0000 (0:00:00.033) 0:03:01.638 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme0n1p1) included: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme1n1p1) included: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme2n1p1) TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 10:09:04 +0000 (0:00:00.053) 0:03:01.692 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:6 Thursday 21 July 2022 10:09:05 +0000 (0:00:00.051) 0:03:01.743 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:11 Thursday 21 July 2022 10:09:05 +0000 (0:00:00.049) 0:03:01.793 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:17 Thursday 21 July 2022 10:09:05 +0000 (0:00:00.037) 0:03:01.831 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:23 Thursday 21 July 2022 10:09:05 +0000 (0:00:00.037) 0:03:01.868 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:29 Thursday 21 July 2022 10:09:05 +0000 (0:00:00.040) 0:03:01.908 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 10:09:05 +0000 (0:00:00.035) 0:03:01.944 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:6 Thursday 21 July 2022 10:09:05 +0000 (0:00:00.050) 0:03:01.994 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:11 Thursday 21 July 2022 10:09:05 +0000 (0:00:00.053) 0:03:02.047 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:17 Thursday 21 July 2022 10:09:05 +0000 (0:00:00.042) 0:03:02.090 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:23 Thursday 21 July 2022 10:09:05 +0000 (0:00:00.038) 0:03:02.128 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:29 Thursday 21 July 2022 10:09:05 +0000 (0:00:00.037) 0:03:02.166 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 10:09:05 +0000 (0:00:00.039) 0:03:02.205 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:6 Thursday 21 July 2022 10:09:05 +0000 (0:00:00.086) 0:03:02.292 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:11 Thursday 21 July 2022 10:09:05 +0000 (0:00:00.083) 0:03:02.375 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:17 Thursday 21 July 2022 10:09:05 +0000 (0:00:00.095) 0:03:02.471 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:23 Thursday 21 July 2022 10:09:05 +0000 (0:00:00.038) 0:03:02.509 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:29 Thursday 21 July 2022 10:09:05 +0000 (0:00:00.037) 0:03:02.546 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml:22 Thursday 21 July 2022 10:09:05 +0000 (0:00:00.050) 0:03:02.596 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:68 Thursday 21 July 2022 10:09:05 +0000 (0:00:00.038) 0:03:02.635 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-members-vdo.yml for /cache/centos-8.qcow2 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-vdo.yml:1 Thursday 21 July 2022 10:09:05 +0000 (0:00:00.049) 0:03:02.684 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}) TASK [get information about VDO deduplication] ********************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:3 Thursday 21 July 2022 10:09:06 +0000 (0:00:00.046) 0:03:02.730 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:8 Thursday 21 July 2022 10:09:06 +0000 (0:00:00.024) 0:03:02.755 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:11 Thursday 21 July 2022 10:09:06 +0000 (0:00:00.025) 0:03:02.781 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:16 Thursday 21 July 2022 10:09:06 +0000 (0:00:00.025) 0:03:02.806 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:21 Thursday 21 July 2022 10:09:06 +0000 (0:00:00.023) 0:03:02.830 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:24 Thursday 21 July 2022 10:09:06 +0000 (0:00:00.023) 0:03:02.854 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:29 Thursday 21 July 2022 10:09:06 +0000 (0:00:00.023) 0:03:02.878 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:39 Thursday 21 July 2022 10:09:06 +0000 (0:00:00.024) 0:03:02.903 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:71 Thursday 21 July 2022 10:09:06 +0000 (0:00:00.035) 0:03:02.938 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [verify the volumes] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-volumes.yml:3 Thursday 21 July 2022 10:09:06 +0000 (0:00:00.035) 0:03:02.973 ********* included: /tmp/tmpa3egnbq5/tests/test-verify-volume.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}) TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume.yml:2 Thursday 21 July 2022 10:09:06 +0000 (0:00:00.043) 0:03:03.017 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [include_tasks] *********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume.yml:10 Thursday 21 July 2022 10:09:06 +0000 (0:00:00.048) 0:03:03.066 ********* included: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml for /cache/centos-8.qcow2 => (item=mount) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml for /cache/centos-8.qcow2 => (item=fstab) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-fs.yml for /cache/centos-8.qcow2 => (item=fs) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml for /cache/centos-8.qcow2 => (item=device) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml for /cache/centos-8.qcow2 => (item=encryption) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml for /cache/centos-8.qcow2 => (item=md) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml for /cache/centos-8.qcow2 => (item=size) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml for /cache/centos-8.qcow2 => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:6 Thursday 21 July 2022 10:09:06 +0000 (0:00:00.082) 0:03:03.148 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/vg1-lv1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:14 Thursday 21 July 2022 10:09:06 +0000 (0:00:00.053) 0:03:03.201 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_mount_device_matches": [ { "block_available": 770083, "block_size": 4096, "block_total": 783872, "block_used": 13789, "device": "/dev/mapper/vg1-lv1", "fstype": "xfs", "inode_available": 1572861, "inode_total": 1572864, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=64k,sunit=128,swidth=128,noquota", "size_available": 3154259968, "size_total": 3210739712, "uuid": "c574fe3b-c0cb-4bb1-85fe-deeef0a957b9" } ], "storage_test_mount_expected_match_count": "1", "storage_test_mount_point_matches": [ { "block_available": 770083, "block_size": 4096, "block_total": 783872, "block_used": 13789, "device": "/dev/mapper/vg1-lv1", "fstype": "xfs", "inode_available": 1572861, "inode_total": 1572864, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=64k,sunit=128,swidth=128,noquota", "size_available": 3154259968, "size_total": 3210739712, "uuid": "c574fe3b-c0cb-4bb1-85fe-deeef0a957b9" } ], "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Verify the current mount state by device] ******************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:28 Thursday 21 July 2022 10:09:06 +0000 (0:00:00.057) 0:03:03.259 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the current mount state by mount point] *************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:37 Thursday 21 July 2022 10:09:06 +0000 (0:00:00.054) 0:03:03.313 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the mount fs type] ************************************************ task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:45 Thursday 21 July 2022 10:09:06 +0000 (0:00:00.052) 0:03:03.366 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [command] ***************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:54 Thursday 21 July 2022 10:09:06 +0000 (0:00:00.052) 0:03:03.418 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:58 Thursday 21 July 2022 10:09:06 +0000 (0:00:00.025) 0:03:03.443 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:63 Thursday 21 July 2022 10:09:06 +0000 (0:00:00.025) 0:03:03.468 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:75 Thursday 21 July 2022 10:09:06 +0000 (0:00:00.026) 0:03:03.495 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_mount_device_matches": null, "storage_test_mount_expected_match_count": null, "storage_test_mount_point_matches": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:2 Thursday 21 July 2022 10:09:06 +0000 (0:00:00.037) 0:03:03.532 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/vg1-lv1 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:25 Thursday 21 July 2022 10:09:06 +0000 (0:00:00.081) 0:03:03.613 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:32 Thursday 21 July 2022 10:09:06 +0000 (0:00:00.051) 0:03:03.665 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:39 Thursday 21 July 2022 10:09:07 +0000 (0:00:00.055) 0:03:03.720 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:49 Thursday 21 July 2022 10:09:07 +0000 (0:00:00.037) 0:03:03.757 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fs.yml:4 Thursday 21 July 2022 10:09:07 +0000 (0:00:00.120) 0:03:03.878 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fs.yml:10 Thursday 21 July 2022 10:09:07 +0000 (0:00:00.038) 0:03:03.916 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:4 Thursday 21 July 2022 10:09:07 +0000 (0:00:00.039) 0:03:03.955 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "atime": 1658398136.3502579, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1658398136.3502579, "dev": 6, "device_type": 64772, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 103369, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1658398136.3502579, "nlink": 1, "path": "/dev/mapper/vg1-lv1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:10 Thursday 21 July 2022 10:09:07 +0000 (0:00:00.384) 0:03:04.339 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about this volume] ********************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:18 Thursday 21 July 2022 10:09:07 +0000 (0:00:00.039) 0:03:04.379 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [(1/2) Process volume type (set initial value)] *************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:24 Thursday 21 July 2022 10:09:07 +0000 (0:00:00.038) 0:03:04.417 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [(2/2) Process volume type (get RAID value)] ****************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:28 Thursday 21 July 2022 10:09:07 +0000 (0:00:00.036) 0:03:04.453 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:33 Thursday 21 July 2022 10:09:07 +0000 (0:00:00.023) 0:03:04.477 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:3 Thursday 21 July 2022 10:09:07 +0000 (0:00:00.041) 0:03:04.518 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:10 Thursday 21 July 2022 10:09:07 +0000 (0:00:00.025) 0:03:04.544 ********* changed: [/cache/centos-8.qcow2] => { "changed": true, "rc": 0, "results": [ "Installed: cryptsetup-libs-2.3.7-2.el8.x86_64", "Installed: cryptsetup-2.3.7-2.el8.x86_64", "Removed: cryptsetup-libs-2.3.3-4.el8.x86_64" ] } TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:15 Thursday 21 July 2022 10:09:11 +0000 (0:00:03.366) 0:03:07.910 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:21 Thursday 21 July 2022 10:09:11 +0000 (0:00:00.033) 0:03:07.944 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:30 Thursday 21 July 2022 10:09:11 +0000 (0:00:00.030) 0:03:07.975 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:38 Thursday 21 July 2022 10:09:11 +0000 (0:00:00.054) 0:03:08.029 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:44 Thursday 21 July 2022 10:09:11 +0000 (0:00:00.026) 0:03:08.055 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:49 Thursday 21 July 2022 10:09:11 +0000 (0:00:00.025) 0:03:08.081 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:55 Thursday 21 July 2022 10:09:11 +0000 (0:00:00.027) 0:03:08.109 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:61 Thursday 21 July 2022 10:09:11 +0000 (0:00:00.028) 0:03:08.138 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:67 Thursday 21 July 2022 10:09:11 +0000 (0:00:00.026) 0:03:08.164 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:74 Thursday 21 July 2022 10:09:11 +0000 (0:00:00.055) 0:03:08.219 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:79 Thursday 21 July 2022 10:09:11 +0000 (0:00:00.051) 0:03:08.271 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:85 Thursday 21 July 2022 10:09:11 +0000 (0:00:00.039) 0:03:08.310 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:91 Thursday 21 July 2022 10:09:11 +0000 (0:00:00.038) 0:03:08.349 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:97 Thursday 21 July 2022 10:09:11 +0000 (0:00:00.039) 0:03:08.389 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [get information about RAID] ********************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:7 Thursday 21 July 2022 10:09:11 +0000 (0:00:00.035) 0:03:08.425 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:13 Thursday 21 July 2022 10:09:11 +0000 (0:00:00.039) 0:03:08.465 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:17 Thursday 21 July 2022 10:09:11 +0000 (0:00:00.041) 0:03:08.506 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:21 Thursday 21 July 2022 10:09:11 +0000 (0:00:00.042) 0:03:08.548 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID active devices count] ***************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:25 Thursday 21 July 2022 10:09:11 +0000 (0:00:00.039) 0:03:08.588 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID spare devices count] ****************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:31 Thursday 21 July 2022 10:09:11 +0000 (0:00:00.038) 0:03:08.626 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID metadata version] ********************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:37 Thursday 21 July 2022 10:09:11 +0000 (0:00:00.038) 0:03:08.665 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the actual size of the volume] ************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:3 Thursday 21 July 2022 10:09:11 +0000 (0:00:00.041) 0:03:08.706 ********* ok: [/cache/centos-8.qcow2] => { "bytes": 3221225472, "changed": false, "lvm": "3g", "parted": "3GiB", "size": "3 GiB" } TASK [parse the requested size of the volume] ********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:9 Thursday 21 July 2022 10:09:12 +0000 (0:00:00.514) 0:03:09.221 ********* ok: [/cache/centos-8.qcow2] => { "bytes": 3221225472, "changed": false, "lvm": "3g", "parted": "3GiB", "size": "3 GiB" } TASK [Establish base value for expected size] ********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:15 Thursday 21 July 2022 10:09:12 +0000 (0:00:00.388) 0:03:09.610 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_expected_size": "3221225472" }, "changed": false } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:20 Thursday 21 July 2022 10:09:12 +0000 (0:00:00.051) 0:03:09.661 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_expected_size": "3221225472" } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:25 Thursday 21 July 2022 10:09:12 +0000 (0:00:00.037) 0:03:09.699 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:28 Thursday 21 July 2022 10:09:13 +0000 (0:00:00.038) 0:03:09.737 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Get the size of parent/pool device] ************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:31 Thursday 21 July 2022 10:09:13 +0000 (0:00:00.042) 0:03:09.779 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:36 Thursday 21 July 2022 10:09:13 +0000 (0:00:00.038) 0:03:09.818 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:39 Thursday 21 July 2022 10:09:13 +0000 (0:00:00.037) 0:03:09.855 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:44 Thursday 21 July 2022 10:09:13 +0000 (0:00:00.037) 0:03:09.893 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_actual_size": { "bytes": 3221225472, "changed": false, "failed": false, "lvm": "3g", "parted": "3GiB", "size": "3 GiB" } } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:47 Thursday 21 July 2022 10:09:13 +0000 (0:00:00.040) 0:03:09.934 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_expected_size": "3221225472" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:50 Thursday 21 July 2022 10:09:13 +0000 (0:00:00.036) 0:03:09.971 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:6 Thursday 21 July 2022 10:09:13 +0000 (0:00:00.054) 0:03:10.025 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "vg1/lv1" ], "delta": "0:00:00.039112", "end": "2022-07-21 10:09:13.601472", "rc": 0, "start": "2022-07-21 10:09:13.562360" } STDOUT: LVM2_LV_NAME=lv1 LVM2_LV_ATTR=Vwi-aotz-- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=thin TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:14 Thursday 21 July 2022 10:09:13 +0000 (0:00:00.419) 0:03:10.445 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_lv_segtype": [ "thin" ] }, "changed": false } TASK [check segment type] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:17 Thursday 21 July 2022 10:09:13 +0000 (0:00:00.050) 0:03:10.495 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:22 Thursday 21 July 2022 10:09:13 +0000 (0:00:00.053) 0:03:10.548 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the requested cache size] ****************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:26 Thursday 21 July 2022 10:09:13 +0000 (0:00:00.041) 0:03:10.589 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:32 Thursday 21 July 2022 10:09:13 +0000 (0:00:00.038) 0:03:10.628 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:36 Thursday 21 July 2022 10:09:13 +0000 (0:00:00.038) 0:03:10.666 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume.yml:16 Thursday 21 July 2022 10:09:13 +0000 (0:00:00.037) 0:03:10.704 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:43 Thursday 21 July 2022 10:09:14 +0000 (0:00:00.037) 0:03:10.742 ********* TASK [Clean up variable namespace] ********************************************* task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:53 Thursday 21 July 2022 10:09:14 +0000 (0:00:00.022) 0:03:10.764 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Repeat previous invocation to verify idempotence] ************************ task path: /tmp/tmpa3egnbq5/tests/tests_create_thinp_then_remove.yml:40 Thursday 21 July 2022 10:09:14 +0000 (0:00:00.034) 0:03:10.799 ********* TASK [linux-system-roles.storage : set platform/version specific variables] **** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Thursday 21 July 2022 10:09:14 +0000 (0:00:00.050) 0:03:10.850 ********* included: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for /cache/centos-8.qcow2 TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Thursday 21 July 2022 10:09:14 +0000 (0:00:00.035) 0:03:10.885 ********* ok: [/cache/centos-8.qcow2] TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Thursday 21 July 2022 10:09:14 +0000 (0:00:00.553) 0:03:11.438 ********* skipping: [/cache/centos-8.qcow2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [linux-system-roles.storage : define an empty list of pools to be used in testing] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Thursday 21 July 2022 10:09:14 +0000 (0:00:00.122) 0:03:11.561 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : define an empty list of volumes to be used in testing] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Thursday 21 July 2022 10:09:14 +0000 (0:00:00.034) 0:03:11.595 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : include the appropriate provider tasks] ***** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Thursday 21 July 2022 10:09:14 +0000 (0:00:00.035) 0:03:11.630 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for /cache/centos-8.qcow2 TASK [linux-system-roles.storage : get a list of rpm packages installed on host machine] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Thursday 21 July 2022 10:09:14 +0000 (0:00:00.048) 0:03:11.678 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : make sure blivet is available] ************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:7 Thursday 21 July 2022 10:09:14 +0000 (0:00:00.020) 0:03:11.699 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [linux-system-roles.storage : show storage_pools] ************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:14 Thursday 21 July 2022 10:09:16 +0000 (0:00:01.877) 0:03:13.577 ********* ok: [/cache/centos-8.qcow2] => { "storage_pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "name": "vg1", "type": "lvm", "volumes": [ { "mount_point": "/opt/test1", "name": "lv1", "size": "3g", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g" } ] } ] } TASK [linux-system-roles.storage : show storage_volumes] *********************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:19 Thursday 21 July 2022 10:09:16 +0000 (0:00:00.038) 0:03:13.615 ********* ok: [/cache/centos-8.qcow2] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : get required packages] ********************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:24 Thursday 21 July 2022 10:09:16 +0000 (0:00:00.047) 0:03:13.663 ********* ok: [/cache/centos-8.qcow2] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "lvm2" ], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : enable copr repositories if needed] ********* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:37 Thursday 21 July 2022 10:09:18 +0000 (0:00:01.949) 0:03:15.613 ********* included: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for /cache/centos-8.qcow2 TASK [linux-system-roles.storage : check if the COPR support packages should be installed] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Thursday 21 July 2022 10:09:18 +0000 (0:00:00.049) 0:03:15.662 ********* TASK [linux-system-roles.storage : make sure COPR support packages are present] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Thursday 21 July 2022 10:09:18 +0000 (0:00:00.038) 0:03:15.701 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : enable COPRs] ******************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:18 Thursday 21 July 2022 10:09:19 +0000 (0:00:00.042) 0:03:15.744 ********* TASK [linux-system-roles.storage : make sure required packages are installed] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:44 Thursday 21 July 2022 10:09:19 +0000 (0:00:00.036) 0:03:15.780 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [linux-system-roles.storage : get service facts] ************************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:51 Thursday 21 July 2022 10:09:20 +0000 (0:00:01.865) 0:03:17.646 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cockpit-motd.service": { "name": "cockpit-motd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-http.service": { "name": "cockpit-wsinstance-http.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-https-factory@.service": { "name": "cockpit-wsinstance-https-factory@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit-wsinstance-https@.service": { "name": "cockpit-wsinstance-https@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit.service": { "name": "cockpit.service", "source": "systemd", "state": "inactive", "status": "static" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "running", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "lvm2-pvscan@259:4.service": { "name": "lvm2-pvscan@259:4.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@259:5.service": { "name": "lvm2-pvscan@259:5.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@259:6.service": { "name": "lvm2-pvscan@259:6.service", "source": "systemd", "state": "stopped", "status": "active" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "packagekit-offline-update.service": { "name": "packagekit-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "packagekit.service": { "name": "packagekit.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "rngd-wake-threshold.service": { "name": "rngd-wake-threshold.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "vdo-start-by-dev@.service": { "name": "vdo-start-by-dev@.service", "source": "systemd", "state": "unknown", "status": "static" }, "vdo.service": { "name": "vdo.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:58 Thursday 21 July 2022 10:09:22 +0000 (0:00:01.658) 0:03:19.305 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:71 Thursday 21 July 2022 10:09:22 +0000 (0:00:00.089) 0:03:19.394 ********* TASK [linux-system-roles.storage : manage the pools and volumes to match the specified state] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:77 Thursday 21 July 2022 10:09:22 +0000 (0:00:00.021) 0:03:19.415 ********* ok: [/cache/centos-8.qcow2] => { "actions": [], "changed": false, "crypts": [], "leaves": [ "/dev/sr0", "/dev/vda1", "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/mapper/vg1-lv1", "/dev/vdb", "/dev/vdc", "/dev/vdd" ], "mounts": [ { "dump": 0, "fstype": "xfs", "opts": "defaults", "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/vg1-lv1", "state": "mounted" } ], "packages": [ "lvm2", "xfsprogs" ], "pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "3g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g", "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:91 Thursday 21 July 2022 10:09:24 +0000 (0:00:02.082) 0:03:21.498 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:103 Thursday 21 July 2022 10:09:24 +0000 (0:00:00.038) 0:03:21.536 ********* TASK [linux-system-roles.storage : show blivet_output] ************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:109 Thursday 21 July 2022 10:09:24 +0000 (0:00:00.023) 0:03:21.559 ********* ok: [/cache/centos-8.qcow2] => { "blivet_output": { "actions": [], "changed": false, "crypts": [], "failed": false, "leaves": [ "/dev/sr0", "/dev/vda1", "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/mapper/vg1-lv1", "/dev/vdb", "/dev/vdc", "/dev/vdd" ], "mounts": [ { "dump": 0, "fstype": "xfs", "opts": "defaults", "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/vg1-lv1", "state": "mounted" } ], "packages": [ "lvm2", "xfsprogs" ], "pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "3g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g", "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [linux-system-roles.storage : set the list of pools for test verification] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:114 Thursday 21 July 2022 10:09:24 +0000 (0:00:00.039) 0:03:21.599 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "3g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g", "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [linux-system-roles.storage : set the list of volumes for test verification] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:118 Thursday 21 July 2022 10:09:24 +0000 (0:00:00.039) 0:03:21.639 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : remove obsolete mounts] ********************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:134 Thursday 21 July 2022 10:09:24 +0000 (0:00:00.039) 0:03:21.678 ********* TASK [linux-system-roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:146 Thursday 21 July 2022 10:09:25 +0000 (0:00:00.040) 0:03:21.719 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : set up new/current mounts] ****************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:151 Thursday 21 July 2022 10:09:25 +0000 (0:00:00.665) 0:03:22.384 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount ok: [/cache/centos-8.qcow2] => (item={'src': '/dev/mapper/vg1-lv1', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted'}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": false, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "opts": "defaults", "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/vg1-lv1", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/vg1-lv1" } TASK [linux-system-roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:163 Thursday 21 July 2022 10:09:26 +0000 (0:00:00.412) 0:03:22.797 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : retrieve facts for the /etc/crypttab file] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:171 Thursday 21 July 2022 10:09:26 +0000 (0:00:00.625) 0:03:23.422 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "atime": 1658398003.7492578, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1643141385.117, "dev": 64513, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 135, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1643141019.537, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "3147672035", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : manage /etc/crypttab to account for changes we just made] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:176 Thursday 21 July 2022 10:09:27 +0000 (0:00:00.406) 0:03:23.829 ********* TASK [linux-system-roles.storage : Update facts] ******************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:198 Thursday 21 July 2022 10:09:27 +0000 (0:00:00.022) 0:03:23.851 ********* ok: [/cache/centos-8.qcow2] META: role_complete for /cache/centos-8.qcow2 TASK [include_tasks] *********************************************************** task path: /tmp/tmpa3egnbq5/tests/tests_create_thinp_then_remove.yml:56 Thursday 21 July 2022 10:09:28 +0000 (0:00:01.001) 0:03:24.853 ********* included: /tmp/tmpa3egnbq5/tests/verify-role-results.yml for /cache/centos-8.qcow2 TASK [Print out pool information] ********************************************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:1 Thursday 21 July 2022 10:09:28 +0000 (0:00:00.069) 0:03:24.922 ********* ok: [/cache/centos-8.qcow2] => { "_storage_pools_list": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "3g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g", "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:6 Thursday 21 July 2022 10:09:28 +0000 (0:00:00.050) 0:03:24.972 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:14 Thursday 21 July 2022 10:09:28 +0000 (0:00:00.036) 0:03:25.008 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "info": { "/dev/mapper/vg1-lv1": { "fstype": "xfs", "label": "", "name": "/dev/mapper/vg1-lv1", "size": "3G", "type": "lvm", "uuid": "c574fe3b-c0cb-4bb1-85fe-deeef0a957b9" }, "/dev/mapper/vg1-tpool1": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1-tpool": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1-tpool", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1_tdata": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1_tdata", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1_tmeta": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1_tmeta", "size": "12M", "type": "lvm", "uuid": "" }, "/dev/nvme0n1": { "fstype": "", "label": "", "name": "/dev/nvme0n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme0n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme0n1p1", "size": "10G", "type": "partition", "uuid": "xLu0vZ-nH9w-K1e2-A6yJ-EaoZ-FOkB-aLS539" }, "/dev/nvme1n1": { "fstype": "", "label": "", "name": "/dev/nvme1n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme1n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme1n1p1", "size": "10G", "type": "partition", "uuid": "Jjw50w-Whgv-fxb5-adBQ-h5Qy-h8fG-52EFuy" }, "/dev/nvme2n1": { "fstype": "", "label": "", "name": "/dev/nvme2n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme2n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme2n1p1", "size": "10G", "type": "partition", "uuid": "2PzQCY-FE6q-TI2r-KVCh-wUMp-axXy-16asvy" }, "/dev/sda": { "fstype": "", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sr0": { "fstype": "iso9660", "label": "cidata", "name": "/dev/sr0", "size": "364K", "type": "rom", "uuid": "2022-07-21-10-05-42-00" }, "/dev/vda": { "fstype": "", "label": "", "name": "/dev/vda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vda1": { "fstype": "xfs", "label": "", "name": "/dev/vda1", "size": "10G", "type": "partition", "uuid": "395b9844-e404-4857-afbb-c6edccaf72f3" }, "/dev/vdb": { "fstype": "", "label": "", "name": "/dev/vdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vdc": { "fstype": "", "label": "", "name": "/dev/vdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vdd": { "fstype": "", "label": "", "name": "/dev/vdd", "size": "10G", "type": "disk", "uuid": "" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:19 Thursday 21 July 2022 10:09:28 +0000 (0:00:00.389) 0:03:25.398 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003532", "end": "2022-07-21 10:09:28.939295", "rc": 0, "start": "2022-07-21 10:09:28.935763" } STDOUT: # # /etc/fstab # Created by anaconda on Tue Jan 25 20:03:39 2022 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=395b9844-e404-4857-afbb-c6edccaf72f3 / xfs defaults 0 0 /dev/mapper/vg1-lv1 /opt/test1 xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:24 Thursday 21 July 2022 10:09:29 +0000 (0:00:00.384) 0:03:25.782 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002731", "end": "2022-07-21 10:09:29.321409", "failed_when_result": false, "rc": 0, "start": "2022-07-21 10:09:29.318678" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:33 Thursday 21 July 2022 10:09:29 +0000 (0:00:00.380) 0:03:26.163 ********* included: /tmp/tmpa3egnbq5/tests/test-verify-pool.yml for /cache/centos-8.qcow2 => (item={'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'name': 'vg1', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}], 'raid_chunk_size': None}) TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-pool.yml:5 Thursday 21 July 2022 10:09:29 +0000 (0:00:00.059) 0:03:26.222 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [include_tasks] *********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool.yml:18 Thursday 21 July 2022 10:09:29 +0000 (0:00:00.037) 0:03:26.259 ********* included: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml for /cache/centos-8.qcow2 => (item=members) included: /tmp/tmpa3egnbq5/tests/test-verify-pool-volumes.yml for /cache/centos-8.qcow2 => (item=volumes) TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:1 Thursday 21 July 2022 10:09:29 +0000 (0:00:00.049) 0:03:26.309 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_count": "3", "_storage_test_pool_pvs_lvm": [ "/dev/nvme0n1p1", "/dev/nvme1n1p1", "/dev/nvme2n1p1" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:10 Thursday 21 July 2022 10:09:29 +0000 (0:00:00.055) 0:03:26.365 ********* ok: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme0n1p1", "pv": "/dev/nvme0n1p1" } ok: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme1n1p1", "pv": "/dev/nvme1n1p1" } ok: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme2n1p1", "pv": "/dev/nvme2n1p1" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:19 Thursday 21 July 2022 10:09:30 +0000 (0:00:01.072) 0:03:27.437 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "__pvs_lvm_len": "3" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:23 Thursday 21 July 2022 10:09:30 +0000 (0:00:00.050) 0:03:27.487 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/nvme0n1p1", "/dev/nvme1n1p1", "/dev/nvme2n1p1" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:27 Thursday 21 July 2022 10:09:30 +0000 (0:00:00.051) 0:03:27.539 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:34 Thursday 21 July 2022 10:09:30 +0000 (0:00:00.050) 0:03:27.590 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:38 Thursday 21 July 2022 10:09:30 +0000 (0:00:00.039) 0:03:27.629 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_type": "partition" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:42 Thursday 21 July 2022 10:09:30 +0000 (0:00:00.051) 0:03:27.680 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:46 Thursday 21 July 2022 10:09:30 +0000 (0:00:00.025) 0:03:27.706 ********* ok: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme0n1p1" } MSG: All assertions passed ok: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme1n1p1" } MSG: All assertions passed ok: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme2n1p1" } MSG: All assertions passed TASK [Check MD RAID] *********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:56 Thursday 21 July 2022 10:09:31 +0000 (0:00:00.104) 0:03:27.811 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml for /cache/centos-8.qcow2 TASK [get information about RAID] ********************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:6 Thursday 21 July 2022 10:09:31 +0000 (0:00:00.045) 0:03:27.856 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:12 Thursday 21 July 2022 10:09:31 +0000 (0:00:00.027) 0:03:27.883 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:16 Thursday 21 July 2022 10:09:31 +0000 (0:00:00.026) 0:03:27.910 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:20 Thursday 21 July 2022 10:09:31 +0000 (0:00:00.027) 0:03:27.938 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID active devices count] ***************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:24 Thursday 21 July 2022 10:09:31 +0000 (0:00:00.067) 0:03:28.005 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID spare devices count] ****************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:30 Thursday 21 July 2022 10:09:31 +0000 (0:00:00.026) 0:03:28.031 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID metadata version] ********************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:36 Thursday 21 July 2022 10:09:31 +0000 (0:00:00.025) 0:03:28.057 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:44 Thursday 21 July 2022 10:09:31 +0000 (0:00:00.026) 0:03:28.083 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:59 Thursday 21 July 2022 10:09:31 +0000 (0:00:00.039) 0:03:28.123 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-members-lvmraid.yml for /cache/centos-8.qcow2 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-lvmraid.yml:1 Thursday 21 July 2022 10:09:31 +0000 (0:00:00.045) 0:03:28.168 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-member-lvmraid.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}) TASK [Get information about LVM RAID] ****************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-lvmraid.yml:3 Thursday 21 July 2022 10:09:31 +0000 (0:00:00.046) 0:03:28.215 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is LVM RAID] ******************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-lvmraid.yml:8 Thursday 21 July 2022 10:09:31 +0000 (0:00:00.030) 0:03:28.245 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-lvmraid.yml:12 Thursday 21 July 2022 10:09:31 +0000 (0:00:00.029) 0:03:28.275 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:62 Thursday 21 July 2022 10:09:31 +0000 (0:00:00.031) 0:03:28.306 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-members-thin.yml for /cache/centos-8.qcow2 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-thin.yml:1 Thursday 21 July 2022 10:09:31 +0000 (0:00:00.044) 0:03:28.351 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}) TASK [Get information about thinpool] ****************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml:3 Thursday 21 July 2022 10:09:31 +0000 (0:00:00.044) 0:03:28.395 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "lvs", "--noheading", "-o", "pool_lv", "--select", "lv_name=lv1&&segtype=thin", "vg1" ], "delta": "0:00:00.034841", "end": "2022-07-21 10:09:31.975164", "rc": 0, "start": "2022-07-21 10:09:31.940323" } STDOUT: tpool1 TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml:8 Thursday 21 July 2022 10:09:32 +0000 (0:00:00.421) 0:03:28.817 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml:13 Thursday 21 July 2022 10:09:32 +0000 (0:00:00.057) 0:03:28.875 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml:17 Thursday 21 July 2022 10:09:32 +0000 (0:00:00.054) 0:03:28.929 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_lvmraid_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:65 Thursday 21 July 2022 10:09:32 +0000 (0:00:00.043) 0:03:28.972 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml for /cache/centos-8.qcow2 TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml:4 Thursday 21 July 2022 10:09:32 +0000 (0:00:00.048) 0:03:29.020 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml:8 Thursday 21 July 2022 10:09:32 +0000 (0:00:00.051) 0:03:29.072 ********* skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "_storage_test_pool_member_path": "/dev/nvme0n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "_storage_test_pool_member_path": "/dev/nvme1n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "_storage_test_pool_member_path": "/dev/nvme2n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml:15 Thursday 21 July 2022 10:09:32 +0000 (0:00:00.034) 0:03:29.106 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme0n1p1) included: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme1n1p1) included: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme2n1p1) TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 10:09:32 +0000 (0:00:00.055) 0:03:29.162 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:6 Thursday 21 July 2022 10:09:32 +0000 (0:00:00.058) 0:03:29.221 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:11 Thursday 21 July 2022 10:09:32 +0000 (0:00:00.054) 0:03:29.276 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:17 Thursday 21 July 2022 10:09:32 +0000 (0:00:00.037) 0:03:29.313 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:23 Thursday 21 July 2022 10:09:32 +0000 (0:00:00.039) 0:03:29.353 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:29 Thursday 21 July 2022 10:09:32 +0000 (0:00:00.036) 0:03:29.390 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 10:09:32 +0000 (0:00:00.037) 0:03:29.427 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:6 Thursday 21 July 2022 10:09:32 +0000 (0:00:00.069) 0:03:29.497 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:11 Thursday 21 July 2022 10:09:32 +0000 (0:00:00.101) 0:03:29.598 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:17 Thursday 21 July 2022 10:09:32 +0000 (0:00:00.048) 0:03:29.647 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:23 Thursday 21 July 2022 10:09:33 +0000 (0:00:00.080) 0:03:29.727 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:29 Thursday 21 July 2022 10:09:33 +0000 (0:00:00.036) 0:03:29.763 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 10:09:33 +0000 (0:00:00.035) 0:03:29.798 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:6 Thursday 21 July 2022 10:09:33 +0000 (0:00:00.050) 0:03:29.848 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:11 Thursday 21 July 2022 10:09:33 +0000 (0:00:00.048) 0:03:29.897 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:17 Thursday 21 July 2022 10:09:33 +0000 (0:00:00.036) 0:03:29.933 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:23 Thursday 21 July 2022 10:09:33 +0000 (0:00:00.036) 0:03:29.969 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:29 Thursday 21 July 2022 10:09:33 +0000 (0:00:00.039) 0:03:30.009 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml:22 Thursday 21 July 2022 10:09:33 +0000 (0:00:00.035) 0:03:30.044 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:68 Thursday 21 July 2022 10:09:33 +0000 (0:00:00.035) 0:03:30.080 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-members-vdo.yml for /cache/centos-8.qcow2 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-vdo.yml:1 Thursday 21 July 2022 10:09:33 +0000 (0:00:00.048) 0:03:30.129 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}) TASK [get information about VDO deduplication] ********************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:3 Thursday 21 July 2022 10:09:33 +0000 (0:00:00.047) 0:03:30.177 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:8 Thursday 21 July 2022 10:09:33 +0000 (0:00:00.027) 0:03:30.204 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:11 Thursday 21 July 2022 10:09:33 +0000 (0:00:00.025) 0:03:30.229 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:16 Thursday 21 July 2022 10:09:33 +0000 (0:00:00.027) 0:03:30.257 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:21 Thursday 21 July 2022 10:09:33 +0000 (0:00:00.025) 0:03:30.282 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:24 Thursday 21 July 2022 10:09:33 +0000 (0:00:00.026) 0:03:30.309 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:29 Thursday 21 July 2022 10:09:33 +0000 (0:00:00.025) 0:03:30.334 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:39 Thursday 21 July 2022 10:09:33 +0000 (0:00:00.025) 0:03:30.360 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:71 Thursday 21 July 2022 10:09:33 +0000 (0:00:00.037) 0:03:30.397 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [verify the volumes] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-volumes.yml:3 Thursday 21 July 2022 10:09:33 +0000 (0:00:00.039) 0:03:30.437 ********* included: /tmp/tmpa3egnbq5/tests/test-verify-volume.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}) TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume.yml:2 Thursday 21 July 2022 10:09:33 +0000 (0:00:00.043) 0:03:30.480 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [include_tasks] *********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume.yml:10 Thursday 21 July 2022 10:09:33 +0000 (0:00:00.051) 0:03:30.532 ********* included: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml for /cache/centos-8.qcow2 => (item=mount) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml for /cache/centos-8.qcow2 => (item=fstab) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-fs.yml for /cache/centos-8.qcow2 => (item=fs) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml for /cache/centos-8.qcow2 => (item=device) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml for /cache/centos-8.qcow2 => (item=encryption) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml for /cache/centos-8.qcow2 => (item=md) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml for /cache/centos-8.qcow2 => (item=size) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml for /cache/centos-8.qcow2 => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:6 Thursday 21 July 2022 10:09:33 +0000 (0:00:00.078) 0:03:30.610 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/vg1-lv1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:14 Thursday 21 July 2022 10:09:33 +0000 (0:00:00.043) 0:03:30.654 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_mount_device_matches": [ { "block_available": 770083, "block_size": 4096, "block_total": 783872, "block_used": 13789, "device": "/dev/mapper/vg1-lv1", "fstype": "xfs", "inode_available": 1572861, "inode_total": 1572864, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=64k,sunit=128,swidth=128,noquota", "size_available": 3154259968, "size_total": 3210739712, "uuid": "c574fe3b-c0cb-4bb1-85fe-deeef0a957b9" } ], "storage_test_mount_expected_match_count": "1", "storage_test_mount_point_matches": [ { "block_available": 770083, "block_size": 4096, "block_total": 783872, "block_used": 13789, "device": "/dev/mapper/vg1-lv1", "fstype": "xfs", "inode_available": 1572861, "inode_total": 1572864, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=64k,sunit=128,swidth=128,noquota", "size_available": 3154259968, "size_total": 3210739712, "uuid": "c574fe3b-c0cb-4bb1-85fe-deeef0a957b9" } ], "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Verify the current mount state by device] ******************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:28 Thursday 21 July 2022 10:09:34 +0000 (0:00:00.067) 0:03:30.721 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the current mount state by mount point] *************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:37 Thursday 21 July 2022 10:09:34 +0000 (0:00:00.052) 0:03:30.774 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the mount fs type] ************************************************ task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:45 Thursday 21 July 2022 10:09:34 +0000 (0:00:00.052) 0:03:30.826 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [command] ***************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:54 Thursday 21 July 2022 10:09:34 +0000 (0:00:00.051) 0:03:30.878 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:58 Thursday 21 July 2022 10:09:34 +0000 (0:00:00.025) 0:03:30.904 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:63 Thursday 21 July 2022 10:09:34 +0000 (0:00:00.026) 0:03:30.930 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:75 Thursday 21 July 2022 10:09:34 +0000 (0:00:00.024) 0:03:30.955 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_mount_device_matches": null, "storage_test_mount_expected_match_count": null, "storage_test_mount_point_matches": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:2 Thursday 21 July 2022 10:09:34 +0000 (0:00:00.074) 0:03:31.030 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/vg1-lv1 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:25 Thursday 21 July 2022 10:09:34 +0000 (0:00:00.136) 0:03:31.166 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:32 Thursday 21 July 2022 10:09:34 +0000 (0:00:00.051) 0:03:31.217 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:39 Thursday 21 July 2022 10:09:34 +0000 (0:00:00.051) 0:03:31.269 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:49 Thursday 21 July 2022 10:09:34 +0000 (0:00:00.039) 0:03:31.309 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fs.yml:4 Thursday 21 July 2022 10:09:34 +0000 (0:00:00.035) 0:03:31.344 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fs.yml:10 Thursday 21 July 2022 10:09:34 +0000 (0:00:00.041) 0:03:31.385 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:4 Thursday 21 July 2022 10:09:34 +0000 (0:00:00.042) 0:03:31.428 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "atime": 1658398136.3502579, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1658398136.3502579, "dev": 6, "device_type": 64772, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 103369, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1658398136.3502579, "nlink": 1, "path": "/dev/mapper/vg1-lv1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:10 Thursday 21 July 2022 10:09:35 +0000 (0:00:00.395) 0:03:31.824 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about this volume] ********************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:18 Thursday 21 July 2022 10:09:35 +0000 (0:00:00.043) 0:03:31.868 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [(1/2) Process volume type (set initial value)] *************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:24 Thursday 21 July 2022 10:09:35 +0000 (0:00:00.042) 0:03:31.910 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [(2/2) Process volume type (get RAID value)] ****************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:28 Thursday 21 July 2022 10:09:35 +0000 (0:00:00.039) 0:03:31.949 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:33 Thursday 21 July 2022 10:09:35 +0000 (0:00:00.026) 0:03:31.976 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:3 Thursday 21 July 2022 10:09:35 +0000 (0:00:00.039) 0:03:32.015 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:10 Thursday 21 July 2022 10:09:35 +0000 (0:00:00.024) 0:03:32.040 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:15 Thursday 21 July 2022 10:09:37 +0000 (0:00:01.925) 0:03:33.965 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:21 Thursday 21 July 2022 10:09:37 +0000 (0:00:00.026) 0:03:33.991 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:30 Thursday 21 July 2022 10:09:37 +0000 (0:00:00.026) 0:03:34.018 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:38 Thursday 21 July 2022 10:09:37 +0000 (0:00:00.055) 0:03:34.073 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:44 Thursday 21 July 2022 10:09:37 +0000 (0:00:00.027) 0:03:34.100 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:49 Thursday 21 July 2022 10:09:37 +0000 (0:00:00.025) 0:03:34.126 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:55 Thursday 21 July 2022 10:09:37 +0000 (0:00:00.026) 0:03:34.153 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:61 Thursday 21 July 2022 10:09:37 +0000 (0:00:00.026) 0:03:34.179 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:67 Thursday 21 July 2022 10:09:37 +0000 (0:00:00.029) 0:03:34.209 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:74 Thursday 21 July 2022 10:09:37 +0000 (0:00:00.054) 0:03:34.263 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:79 Thursday 21 July 2022 10:09:37 +0000 (0:00:00.052) 0:03:34.316 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:85 Thursday 21 July 2022 10:09:37 +0000 (0:00:00.039) 0:03:34.356 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:91 Thursday 21 July 2022 10:09:37 +0000 (0:00:00.039) 0:03:34.395 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:97 Thursday 21 July 2022 10:09:37 +0000 (0:00:00.038) 0:03:34.433 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [get information about RAID] ********************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:7 Thursday 21 July 2022 10:09:37 +0000 (0:00:00.078) 0:03:34.512 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:13 Thursday 21 July 2022 10:09:37 +0000 (0:00:00.040) 0:03:34.552 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:17 Thursday 21 July 2022 10:09:37 +0000 (0:00:00.039) 0:03:34.591 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:21 Thursday 21 July 2022 10:09:37 +0000 (0:00:00.039) 0:03:34.631 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID active devices count] ***************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:25 Thursday 21 July 2022 10:09:37 +0000 (0:00:00.039) 0:03:34.670 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID spare devices count] ****************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:31 Thursday 21 July 2022 10:09:38 +0000 (0:00:00.040) 0:03:34.711 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID metadata version] ********************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:37 Thursday 21 July 2022 10:09:38 +0000 (0:00:00.037) 0:03:34.748 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the actual size of the volume] ************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:3 Thursday 21 July 2022 10:09:38 +0000 (0:00:00.045) 0:03:34.794 ********* ok: [/cache/centos-8.qcow2] => { "bytes": 3221225472, "changed": false, "lvm": "3g", "parted": "3GiB", "size": "3 GiB" } TASK [parse the requested size of the volume] ********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:9 Thursday 21 July 2022 10:09:38 +0000 (0:00:00.417) 0:03:35.212 ********* ok: [/cache/centos-8.qcow2] => { "bytes": 3221225472, "changed": false, "lvm": "3g", "parted": "3GiB", "size": "3 GiB" } TASK [Establish base value for expected size] ********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:15 Thursday 21 July 2022 10:09:38 +0000 (0:00:00.433) 0:03:35.645 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_expected_size": "3221225472" }, "changed": false } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:20 Thursday 21 July 2022 10:09:38 +0000 (0:00:00.050) 0:03:35.696 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_expected_size": "3221225472" } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:25 Thursday 21 July 2022 10:09:39 +0000 (0:00:00.037) 0:03:35.733 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:28 Thursday 21 July 2022 10:09:39 +0000 (0:00:00.037) 0:03:35.770 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Get the size of parent/pool device] ************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:31 Thursday 21 July 2022 10:09:39 +0000 (0:00:00.038) 0:03:35.809 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:36 Thursday 21 July 2022 10:09:39 +0000 (0:00:00.039) 0:03:35.849 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:39 Thursday 21 July 2022 10:09:39 +0000 (0:00:00.041) 0:03:35.891 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:44 Thursday 21 July 2022 10:09:39 +0000 (0:00:00.039) 0:03:35.930 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_actual_size": { "bytes": 3221225472, "changed": false, "failed": false, "lvm": "3g", "parted": "3GiB", "size": "3 GiB" } } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:47 Thursday 21 July 2022 10:09:39 +0000 (0:00:00.037) 0:03:35.968 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_expected_size": "3221225472" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:50 Thursday 21 July 2022 10:09:39 +0000 (0:00:00.041) 0:03:36.010 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:6 Thursday 21 July 2022 10:09:39 +0000 (0:00:00.052) 0:03:36.063 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "vg1/lv1" ], "delta": "0:00:00.031886", "end": "2022-07-21 10:09:39.630171", "rc": 0, "start": "2022-07-21 10:09:39.598285" } STDOUT: LVM2_LV_NAME=lv1 LVM2_LV_ATTR=Vwi-aotz-- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=thin TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:14 Thursday 21 July 2022 10:09:39 +0000 (0:00:00.412) 0:03:36.475 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_lv_segtype": [ "thin" ] }, "changed": false } TASK [check segment type] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:17 Thursday 21 July 2022 10:09:39 +0000 (0:00:00.050) 0:03:36.525 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:22 Thursday 21 July 2022 10:09:39 +0000 (0:00:00.051) 0:03:36.576 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the requested cache size] ****************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:26 Thursday 21 July 2022 10:09:39 +0000 (0:00:00.037) 0:03:36.613 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:32 Thursday 21 July 2022 10:09:39 +0000 (0:00:00.037) 0:03:36.650 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:36 Thursday 21 July 2022 10:09:39 +0000 (0:00:00.040) 0:03:36.690 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume.yml:16 Thursday 21 July 2022 10:09:40 +0000 (0:00:00.037) 0:03:36.727 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:43 Thursday 21 July 2022 10:09:40 +0000 (0:00:00.040) 0:03:36.768 ********* TASK [Clean up variable namespace] ********************************************* task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:53 Thursday 21 July 2022 10:09:40 +0000 (0:00:00.024) 0:03:36.793 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Change thinlv fs type] *************************************************** task path: /tmp/tmpa3egnbq5/tests/tests_create_thinp_then_remove.yml:58 Thursday 21 July 2022 10:09:40 +0000 (0:00:00.038) 0:03:36.831 ********* TASK [linux-system-roles.storage : set platform/version specific variables] **** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Thursday 21 July 2022 10:09:40 +0000 (0:00:00.056) 0:03:36.887 ********* included: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for /cache/centos-8.qcow2 TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Thursday 21 July 2022 10:09:40 +0000 (0:00:00.035) 0:03:36.923 ********* ok: [/cache/centos-8.qcow2] TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Thursday 21 July 2022 10:09:40 +0000 (0:00:00.526) 0:03:37.449 ********* skipping: [/cache/centos-8.qcow2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [linux-system-roles.storage : define an empty list of pools to be used in testing] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Thursday 21 July 2022 10:09:40 +0000 (0:00:00.113) 0:03:37.563 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : define an empty list of volumes to be used in testing] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Thursday 21 July 2022 10:09:40 +0000 (0:00:00.036) 0:03:37.599 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : include the appropriate provider tasks] ***** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Thursday 21 July 2022 10:09:40 +0000 (0:00:00.035) 0:03:37.635 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for /cache/centos-8.qcow2 TASK [linux-system-roles.storage : get a list of rpm packages installed on host machine] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Thursday 21 July 2022 10:09:41 +0000 (0:00:00.083) 0:03:37.719 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : make sure blivet is available] ************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:7 Thursday 21 July 2022 10:09:41 +0000 (0:00:00.021) 0:03:37.740 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [linux-system-roles.storage : show storage_pools] ************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:14 Thursday 21 July 2022 10:09:42 +0000 (0:00:01.827) 0:03:39.567 ********* ok: [/cache/centos-8.qcow2] => { "storage_pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "name": "vg1", "type": "lvm", "volumes": [ { "fs_type": "xfs", "name": "lv1", "thin": true, "thin_pool_name": "tpool1" } ] } ] } TASK [linux-system-roles.storage : show storage_volumes] *********************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:19 Thursday 21 July 2022 10:09:42 +0000 (0:00:00.053) 0:03:39.620 ********* ok: [/cache/centos-8.qcow2] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : get required packages] ********************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:24 Thursday 21 July 2022 10:09:42 +0000 (0:00:00.039) 0:03:39.660 ********* ok: [/cache/centos-8.qcow2] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "lvm2", "xfsprogs" ], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : enable copr repositories if needed] ********* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:37 Thursday 21 July 2022 10:09:44 +0000 (0:00:01.928) 0:03:41.588 ********* included: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for /cache/centos-8.qcow2 TASK [linux-system-roles.storage : check if the COPR support packages should be installed] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Thursday 21 July 2022 10:09:44 +0000 (0:00:00.048) 0:03:41.637 ********* TASK [linux-system-roles.storage : make sure COPR support packages are present] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Thursday 21 July 2022 10:09:44 +0000 (0:00:00.038) 0:03:41.675 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : enable COPRs] ******************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:18 Thursday 21 July 2022 10:09:45 +0000 (0:00:00.040) 0:03:41.716 ********* TASK [linux-system-roles.storage : make sure required packages are installed] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:44 Thursday 21 July 2022 10:09:45 +0000 (0:00:00.035) 0:03:41.751 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [linux-system-roles.storage : get service facts] ************************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:51 Thursday 21 July 2022 10:09:46 +0000 (0:00:01.881) 0:03:43.633 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cockpit-motd.service": { "name": "cockpit-motd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-http.service": { "name": "cockpit-wsinstance-http.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-https-factory@.service": { "name": "cockpit-wsinstance-https-factory@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit-wsinstance-https@.service": { "name": "cockpit-wsinstance-https@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit.service": { "name": "cockpit.service", "source": "systemd", "state": "inactive", "status": "static" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "running", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "lvm2-pvscan@259:4.service": { "name": "lvm2-pvscan@259:4.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@259:5.service": { "name": "lvm2-pvscan@259:5.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@259:6.service": { "name": "lvm2-pvscan@259:6.service", "source": "systemd", "state": "stopped", "status": "active" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "packagekit-offline-update.service": { "name": "packagekit-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "packagekit.service": { "name": "packagekit.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "rngd-wake-threshold.service": { "name": "rngd-wake-threshold.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "vdo-start-by-dev@.service": { "name": "vdo-start-by-dev@.service", "source": "systemd", "state": "unknown", "status": "static" }, "vdo.service": { "name": "vdo.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:58 Thursday 21 July 2022 10:09:49 +0000 (0:00:02.656) 0:03:46.290 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:71 Thursday 21 July 2022 10:09:49 +0000 (0:00:00.062) 0:03:46.352 ********* TASK [linux-system-roles.storage : manage the pools and volumes to match the specified state] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:77 Thursday 21 July 2022 10:09:49 +0000 (0:00:00.024) 0:03:46.377 ********* ok: [/cache/centos-8.qcow2] => { "actions": [], "changed": false, "crypts": [], "leaves": [ "/dev/sr0", "/dev/vda1", "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/mapper/vg1-lv1", "/dev/vdb", "/dev/vdc", "/dev/vdd" ], "mounts": [ { "path": "/opt/test1", "state": "absent" } ], "packages": [ "xfsprogs", "lvm2" ], "pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": 3221225472, "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:91 Thursday 21 July 2022 10:09:51 +0000 (0:00:02.070) 0:03:48.448 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:103 Thursday 21 July 2022 10:09:51 +0000 (0:00:00.038) 0:03:48.486 ********* TASK [linux-system-roles.storage : show blivet_output] ************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:109 Thursday 21 July 2022 10:09:51 +0000 (0:00:00.023) 0:03:48.510 ********* ok: [/cache/centos-8.qcow2] => { "blivet_output": { "actions": [], "changed": false, "crypts": [], "failed": false, "leaves": [ "/dev/sr0", "/dev/vda1", "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/mapper/vg1-lv1", "/dev/vdb", "/dev/vdc", "/dev/vdd" ], "mounts": [ { "path": "/opt/test1", "state": "absent" } ], "packages": [ "xfsprogs", "lvm2" ], "pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": 3221225472, "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [linux-system-roles.storage : set the list of pools for test verification] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:114 Thursday 21 July 2022 10:09:51 +0000 (0:00:00.039) 0:03:48.549 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": 3221225472, "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [linux-system-roles.storage : set the list of volumes for test verification] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:118 Thursday 21 July 2022 10:09:51 +0000 (0:00:00.083) 0:03:48.632 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : remove obsolete mounts] ********************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:134 Thursday 21 July 2022 10:09:51 +0000 (0:00:00.046) 0:03:48.679 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount changed: [/cache/centos-8.qcow2] => (item={'path': '/opt/test1', 'state': 'absent'}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": true, "dump": "0", "fstab": "/etc/fstab", "mount_info": { "path": "/opt/test1", "state": "absent" }, "name": "/opt/test1", "opts": "defaults", "passno": "0" } TASK [linux-system-roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:146 Thursday 21 July 2022 10:09:52 +0000 (0:00:00.457) 0:03:49.136 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : set up new/current mounts] ****************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:151 Thursday 21 July 2022 10:09:53 +0000 (0:00:00.665) 0:03:49.802 ********* TASK [linux-system-roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:163 Thursday 21 July 2022 10:09:53 +0000 (0:00:00.073) 0:03:49.875 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : retrieve facts for the /etc/crypttab file] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:171 Thursday 21 July 2022 10:09:53 +0000 (0:00:00.631) 0:03:50.507 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "atime": 1658398003.7492578, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1643141385.117, "dev": 64513, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 135, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1643141019.537, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "3147672035", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : manage /etc/crypttab to account for changes we just made] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:176 Thursday 21 July 2022 10:09:54 +0000 (0:00:00.386) 0:03:50.894 ********* TASK [linux-system-roles.storage : Update facts] ******************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:198 Thursday 21 July 2022 10:09:54 +0000 (0:00:00.022) 0:03:50.916 ********* ok: [/cache/centos-8.qcow2] META: role_complete for /cache/centos-8.qcow2 TASK [include_tasks] *********************************************************** task path: /tmp/tmpa3egnbq5/tests/tests_create_thinp_then_remove.yml:72 Thursday 21 July 2022 10:09:55 +0000 (0:00:00.988) 0:03:51.905 ********* included: /tmp/tmpa3egnbq5/tests/verify-role-results.yml for /cache/centos-8.qcow2 TASK [Print out pool information] ********************************************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:1 Thursday 21 July 2022 10:09:55 +0000 (0:00:00.044) 0:03:51.950 ********* ok: [/cache/centos-8.qcow2] => { "_storage_pools_list": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": 3221225472, "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:6 Thursday 21 July 2022 10:09:55 +0000 (0:00:00.053) 0:03:52.003 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:14 Thursday 21 July 2022 10:09:55 +0000 (0:00:00.039) 0:03:52.042 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "info": { "/dev/mapper/vg1-lv1": { "fstype": "xfs", "label": "", "name": "/dev/mapper/vg1-lv1", "size": "3G", "type": "lvm", "uuid": "c574fe3b-c0cb-4bb1-85fe-deeef0a957b9" }, "/dev/mapper/vg1-tpool1": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1-tpool": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1-tpool", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1_tdata": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1_tdata", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1_tmeta": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1_tmeta", "size": "12M", "type": "lvm", "uuid": "" }, "/dev/nvme0n1": { "fstype": "", "label": "", "name": "/dev/nvme0n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme0n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme0n1p1", "size": "10G", "type": "partition", "uuid": "xLu0vZ-nH9w-K1e2-A6yJ-EaoZ-FOkB-aLS539" }, "/dev/nvme1n1": { "fstype": "", "label": "", "name": "/dev/nvme1n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme1n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme1n1p1", "size": "10G", "type": "partition", "uuid": "Jjw50w-Whgv-fxb5-adBQ-h5Qy-h8fG-52EFuy" }, "/dev/nvme2n1": { "fstype": "", "label": "", "name": "/dev/nvme2n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme2n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme2n1p1", "size": "10G", "type": "partition", "uuid": "2PzQCY-FE6q-TI2r-KVCh-wUMp-axXy-16asvy" }, "/dev/sda": { "fstype": "", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sr0": { "fstype": "iso9660", "label": "cidata", "name": "/dev/sr0", "size": "364K", "type": "rom", "uuid": "2022-07-21-10-05-42-00" }, "/dev/vda": { "fstype": "", "label": "", "name": "/dev/vda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vda1": { "fstype": "xfs", "label": "", "name": "/dev/vda1", "size": "10G", "type": "partition", "uuid": "395b9844-e404-4857-afbb-c6edccaf72f3" }, "/dev/vdb": { "fstype": "", "label": "", "name": "/dev/vdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vdc": { "fstype": "", "label": "", "name": "/dev/vdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vdd": { "fstype": "", "label": "", "name": "/dev/vdd", "size": "10G", "type": "disk", "uuid": "" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:19 Thursday 21 July 2022 10:09:55 +0000 (0:00:00.375) 0:03:52.417 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002892", "end": "2022-07-21 10:09:55.975749", "rc": 0, "start": "2022-07-21 10:09:55.972857" } STDOUT: # # /etc/fstab # Created by anaconda on Tue Jan 25 20:03:39 2022 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=395b9844-e404-4857-afbb-c6edccaf72f3 / xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:24 Thursday 21 July 2022 10:09:56 +0000 (0:00:00.400) 0:03:52.818 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002764", "end": "2022-07-21 10:09:56.346632", "failed_when_result": false, "rc": 0, "start": "2022-07-21 10:09:56.343868" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:33 Thursday 21 July 2022 10:09:56 +0000 (0:00:00.369) 0:03:53.188 ********* included: /tmp/tmpa3egnbq5/tests/test-verify-pool.yml for /cache/centos-8.qcow2 => (item={'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'name': 'vg1', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': None, 'name': 'lv1', 'raid_level': None, 'size': 3221225472, 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}], 'raid_chunk_size': None}) TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-pool.yml:5 Thursday 21 July 2022 10:09:56 +0000 (0:00:00.060) 0:03:53.248 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [include_tasks] *********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool.yml:18 Thursday 21 July 2022 10:09:56 +0000 (0:00:00.072) 0:03:53.321 ********* included: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml for /cache/centos-8.qcow2 => (item=members) included: /tmp/tmpa3egnbq5/tests/test-verify-pool-volumes.yml for /cache/centos-8.qcow2 => (item=volumes) TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:1 Thursday 21 July 2022 10:09:56 +0000 (0:00:00.046) 0:03:53.368 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_count": "3", "_storage_test_pool_pvs_lvm": [ "/dev/nvme0n1p1", "/dev/nvme1n1p1", "/dev/nvme2n1p1" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:10 Thursday 21 July 2022 10:09:56 +0000 (0:00:00.096) 0:03:53.464 ********* ok: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme0n1p1", "pv": "/dev/nvme0n1p1" } ok: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme1n1p1", "pv": "/dev/nvme1n1p1" } ok: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme2n1p1", "pv": "/dev/nvme2n1p1" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:19 Thursday 21 July 2022 10:09:57 +0000 (0:00:01.058) 0:03:54.523 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "__pvs_lvm_len": "3" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:23 Thursday 21 July 2022 10:09:57 +0000 (0:00:00.051) 0:03:54.575 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/nvme0n1p1", "/dev/nvme1n1p1", "/dev/nvme2n1p1" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:27 Thursday 21 July 2022 10:09:57 +0000 (0:00:00.055) 0:03:54.630 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:34 Thursday 21 July 2022 10:09:57 +0000 (0:00:00.054) 0:03:54.685 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:38 Thursday 21 July 2022 10:09:58 +0000 (0:00:00.040) 0:03:54.725 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_type": "partition" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:42 Thursday 21 July 2022 10:09:58 +0000 (0:00:00.050) 0:03:54.776 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:46 Thursday 21 July 2022 10:09:58 +0000 (0:00:00.025) 0:03:54.801 ********* ok: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme0n1p1" } MSG: All assertions passed ok: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme1n1p1" } MSG: All assertions passed ok: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme2n1p1" } MSG: All assertions passed TASK [Check MD RAID] *********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:56 Thursday 21 July 2022 10:09:58 +0000 (0:00:00.072) 0:03:54.873 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml for /cache/centos-8.qcow2 TASK [get information about RAID] ********************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:6 Thursday 21 July 2022 10:09:58 +0000 (0:00:00.042) 0:03:54.916 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:12 Thursday 21 July 2022 10:09:58 +0000 (0:00:00.025) 0:03:54.941 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:16 Thursday 21 July 2022 10:09:58 +0000 (0:00:00.025) 0:03:54.966 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:20 Thursday 21 July 2022 10:09:58 +0000 (0:00:00.023) 0:03:54.990 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID active devices count] ***************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:24 Thursday 21 July 2022 10:09:58 +0000 (0:00:00.024) 0:03:55.015 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID spare devices count] ****************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:30 Thursday 21 July 2022 10:09:58 +0000 (0:00:00.027) 0:03:55.042 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID metadata version] ********************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:36 Thursday 21 July 2022 10:09:58 +0000 (0:00:00.028) 0:03:55.071 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:44 Thursday 21 July 2022 10:09:58 +0000 (0:00:00.024) 0:03:55.095 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:59 Thursday 21 July 2022 10:09:58 +0000 (0:00:00.037) 0:03:55.132 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-members-lvmraid.yml for /cache/centos-8.qcow2 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-lvmraid.yml:1 Thursday 21 July 2022 10:09:58 +0000 (0:00:00.045) 0:03:55.178 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-member-lvmraid.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': None, 'name': 'lv1', 'raid_level': None, 'size': 3221225472, 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}) TASK [Get information about LVM RAID] ****************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-lvmraid.yml:3 Thursday 21 July 2022 10:09:58 +0000 (0:00:00.043) 0:03:55.221 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is LVM RAID] ******************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-lvmraid.yml:8 Thursday 21 July 2022 10:09:58 +0000 (0:00:00.028) 0:03:55.249 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-lvmraid.yml:12 Thursday 21 July 2022 10:09:58 +0000 (0:00:00.030) 0:03:55.280 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:62 Thursday 21 July 2022 10:09:58 +0000 (0:00:00.027) 0:03:55.308 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-members-thin.yml for /cache/centos-8.qcow2 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-thin.yml:1 Thursday 21 July 2022 10:09:58 +0000 (0:00:00.044) 0:03:55.353 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': None, 'name': 'lv1', 'raid_level': None, 'size': 3221225472, 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}) TASK [Get information about thinpool] ****************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml:3 Thursday 21 July 2022 10:09:58 +0000 (0:00:00.074) 0:03:55.428 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "lvs", "--noheading", "-o", "pool_lv", "--select", "lv_name=lv1&&segtype=thin", "vg1" ], "delta": "0:00:00.043849", "end": "2022-07-21 10:09:58.994723", "rc": 0, "start": "2022-07-21 10:09:58.950874" } STDOUT: tpool1 TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml:8 Thursday 21 July 2022 10:09:59 +0000 (0:00:00.410) 0:03:55.838 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml:13 Thursday 21 July 2022 10:09:59 +0000 (0:00:00.055) 0:03:55.893 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml:17 Thursday 21 July 2022 10:09:59 +0000 (0:00:00.049) 0:03:55.942 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_lvmraid_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:65 Thursday 21 July 2022 10:09:59 +0000 (0:00:00.038) 0:03:55.981 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml for /cache/centos-8.qcow2 TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml:4 Thursday 21 July 2022 10:09:59 +0000 (0:00:00.045) 0:03:56.027 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml:8 Thursday 21 July 2022 10:09:59 +0000 (0:00:00.047) 0:03:56.074 ********* skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "_storage_test_pool_member_path": "/dev/nvme0n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "_storage_test_pool_member_path": "/dev/nvme1n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "_storage_test_pool_member_path": "/dev/nvme2n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml:15 Thursday 21 July 2022 10:09:59 +0000 (0:00:00.032) 0:03:56.107 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme0n1p1) included: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme1n1p1) included: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme2n1p1) TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 10:09:59 +0000 (0:00:00.055) 0:03:56.163 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:6 Thursday 21 July 2022 10:09:59 +0000 (0:00:00.049) 0:03:56.212 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:11 Thursday 21 July 2022 10:09:59 +0000 (0:00:00.047) 0:03:56.260 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:17 Thursday 21 July 2022 10:09:59 +0000 (0:00:00.035) 0:03:56.296 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:23 Thursday 21 July 2022 10:09:59 +0000 (0:00:00.035) 0:03:56.331 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:29 Thursday 21 July 2022 10:09:59 +0000 (0:00:00.038) 0:03:56.370 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 10:09:59 +0000 (0:00:00.033) 0:03:56.404 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:6 Thursday 21 July 2022 10:09:59 +0000 (0:00:00.049) 0:03:56.454 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:11 Thursday 21 July 2022 10:09:59 +0000 (0:00:00.050) 0:03:56.504 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:17 Thursday 21 July 2022 10:09:59 +0000 (0:00:00.036) 0:03:56.541 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:23 Thursday 21 July 2022 10:09:59 +0000 (0:00:00.035) 0:03:56.577 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:29 Thursday 21 July 2022 10:09:59 +0000 (0:00:00.035) 0:03:56.612 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 10:09:59 +0000 (0:00:00.038) 0:03:56.651 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:6 Thursday 21 July 2022 10:09:59 +0000 (0:00:00.050) 0:03:56.701 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:11 Thursday 21 July 2022 10:10:00 +0000 (0:00:00.049) 0:03:56.751 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:17 Thursday 21 July 2022 10:10:00 +0000 (0:00:00.036) 0:03:56.787 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:23 Thursday 21 July 2022 10:10:00 +0000 (0:00:00.041) 0:03:56.829 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:29 Thursday 21 July 2022 10:10:00 +0000 (0:00:00.036) 0:03:56.866 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml:22 Thursday 21 July 2022 10:10:00 +0000 (0:00:00.034) 0:03:56.901 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:68 Thursday 21 July 2022 10:10:00 +0000 (0:00:00.035) 0:03:56.936 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-members-vdo.yml for /cache/centos-8.qcow2 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-vdo.yml:1 Thursday 21 July 2022 10:10:00 +0000 (0:00:00.048) 0:03:56.984 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': None, 'name': 'lv1', 'raid_level': None, 'size': 3221225472, 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}) TASK [get information about VDO deduplication] ********************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:3 Thursday 21 July 2022 10:10:00 +0000 (0:00:00.045) 0:03:57.030 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:8 Thursday 21 July 2022 10:10:00 +0000 (0:00:00.055) 0:03:57.086 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:11 Thursday 21 July 2022 10:10:00 +0000 (0:00:00.025) 0:03:57.111 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:16 Thursday 21 July 2022 10:10:00 +0000 (0:00:00.024) 0:03:57.135 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:21 Thursday 21 July 2022 10:10:00 +0000 (0:00:00.023) 0:03:57.159 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:24 Thursday 21 July 2022 10:10:00 +0000 (0:00:00.027) 0:03:57.186 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:29 Thursday 21 July 2022 10:10:00 +0000 (0:00:00.025) 0:03:57.212 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:39 Thursday 21 July 2022 10:10:00 +0000 (0:00:00.025) 0:03:57.237 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:71 Thursday 21 July 2022 10:10:00 +0000 (0:00:00.036) 0:03:57.274 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [verify the volumes] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-volumes.yml:3 Thursday 21 July 2022 10:10:00 +0000 (0:00:00.038) 0:03:57.312 ********* included: /tmp/tmpa3egnbq5/tests/test-verify-volume.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': None, 'name': 'lv1', 'raid_level': None, 'size': 3221225472, 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}) TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume.yml:2 Thursday 21 July 2022 10:10:00 +0000 (0:00:00.043) 0:03:57.356 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [include_tasks] *********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume.yml:10 Thursday 21 July 2022 10:10:00 +0000 (0:00:00.056) 0:03:57.412 ********* included: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml for /cache/centos-8.qcow2 => (item=mount) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml for /cache/centos-8.qcow2 => (item=fstab) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-fs.yml for /cache/centos-8.qcow2 => (item=fs) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml for /cache/centos-8.qcow2 => (item=device) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml for /cache/centos-8.qcow2 => (item=encryption) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml for /cache/centos-8.qcow2 => (item=md) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml for /cache/centos-8.qcow2 => (item=size) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml for /cache/centos-8.qcow2 => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:6 Thursday 21 July 2022 10:10:00 +0000 (0:00:00.077) 0:03:57.489 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/vg1-lv1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:14 Thursday 21 July 2022 10:10:00 +0000 (0:00:00.045) 0:03:57.534 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_mount_device_matches": [], "storage_test_mount_expected_match_count": "0", "storage_test_mount_point_matches": [], "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Verify the current mount state by device] ******************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:28 Thursday 21 July 2022 10:10:00 +0000 (0:00:00.058) 0:03:57.593 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by mount point] *************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:37 Thursday 21 July 2022 10:10:00 +0000 (0:00:00.024) 0:03:57.618 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the mount fs type] ************************************************ task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:45 Thursday 21 July 2022 10:10:00 +0000 (0:00:00.053) 0:03:57.671 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [command] ***************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:54 Thursday 21 July 2022 10:10:01 +0000 (0:00:00.038) 0:03:57.710 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:58 Thursday 21 July 2022 10:10:01 +0000 (0:00:00.024) 0:03:57.735 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:63 Thursday 21 July 2022 10:10:01 +0000 (0:00:00.025) 0:03:57.760 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:75 Thursday 21 July 2022 10:10:01 +0000 (0:00:00.025) 0:03:57.785 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_mount_device_matches": null, "storage_test_mount_expected_match_count": null, "storage_test_mount_point_matches": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:2 Thursday 21 July 2022 10:10:01 +0000 (0:00:00.035) 0:03:57.820 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "0", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "0", "storage_test_fstab_id_matches": [], "storage_test_fstab_mount_options_matches": [], "storage_test_fstab_mount_point_matches": [] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:25 Thursday 21 July 2022 10:10:01 +0000 (0:00:00.062) 0:03:57.883 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:32 Thursday 21 July 2022 10:10:01 +0000 (0:00:00.051) 0:03:57.935 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:39 Thursday 21 July 2022 10:10:01 +0000 (0:00:00.049) 0:03:57.985 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:49 Thursday 21 July 2022 10:10:01 +0000 (0:00:00.036) 0:03:58.021 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fs.yml:4 Thursday 21 July 2022 10:10:01 +0000 (0:00:00.037) 0:03:58.058 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fs.yml:10 Thursday 21 July 2022 10:10:01 +0000 (0:00:00.038) 0:03:58.097 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:4 Thursday 21 July 2022 10:10:01 +0000 (0:00:00.040) 0:03:58.137 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "atime": 1658398136.3502579, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1658398136.3502579, "dev": 6, "device_type": 64772, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 103369, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1658398136.3502579, "nlink": 1, "path": "/dev/mapper/vg1-lv1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:10 Thursday 21 July 2022 10:10:01 +0000 (0:00:00.398) 0:03:58.535 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about this volume] ********************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:18 Thursday 21 July 2022 10:10:01 +0000 (0:00:00.039) 0:03:58.575 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [(1/2) Process volume type (set initial value)] *************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:24 Thursday 21 July 2022 10:10:01 +0000 (0:00:00.038) 0:03:58.613 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [(2/2) Process volume type (get RAID value)] ****************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:28 Thursday 21 July 2022 10:10:01 +0000 (0:00:00.076) 0:03:58.689 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:33 Thursday 21 July 2022 10:10:02 +0000 (0:00:00.056) 0:03:58.746 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:3 Thursday 21 July 2022 10:10:02 +0000 (0:00:00.039) 0:03:58.785 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:10 Thursday 21 July 2022 10:10:02 +0000 (0:00:00.024) 0:03:58.810 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:15 Thursday 21 July 2022 10:10:04 +0000 (0:00:01.962) 0:04:00.772 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:21 Thursday 21 July 2022 10:10:04 +0000 (0:00:00.023) 0:04:00.796 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:30 Thursday 21 July 2022 10:10:04 +0000 (0:00:00.023) 0:04:00.820 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:38 Thursday 21 July 2022 10:10:04 +0000 (0:00:00.050) 0:04:00.870 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:44 Thursday 21 July 2022 10:10:04 +0000 (0:00:00.023) 0:04:00.894 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:49 Thursday 21 July 2022 10:10:04 +0000 (0:00:00.023) 0:04:00.918 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:55 Thursday 21 July 2022 10:10:04 +0000 (0:00:00.023) 0:04:00.941 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:61 Thursday 21 July 2022 10:10:04 +0000 (0:00:00.023) 0:04:00.964 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:67 Thursday 21 July 2022 10:10:04 +0000 (0:00:00.024) 0:04:00.989 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:74 Thursday 21 July 2022 10:10:04 +0000 (0:00:00.048) 0:04:01.037 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:79 Thursday 21 July 2022 10:10:04 +0000 (0:00:00.048) 0:04:01.086 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:85 Thursday 21 July 2022 10:10:04 +0000 (0:00:00.038) 0:04:01.125 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:91 Thursday 21 July 2022 10:10:04 +0000 (0:00:00.037) 0:04:01.162 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:97 Thursday 21 July 2022 10:10:04 +0000 (0:00:00.036) 0:04:01.198 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [get information about RAID] ********************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:7 Thursday 21 July 2022 10:10:04 +0000 (0:00:00.037) 0:04:01.235 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:13 Thursday 21 July 2022 10:10:04 +0000 (0:00:00.039) 0:04:01.275 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:17 Thursday 21 July 2022 10:10:04 +0000 (0:00:00.038) 0:04:01.313 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:21 Thursday 21 July 2022 10:10:04 +0000 (0:00:00.037) 0:04:01.350 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID active devices count] ***************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:25 Thursday 21 July 2022 10:10:04 +0000 (0:00:00.036) 0:04:01.386 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID spare devices count] ****************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:31 Thursday 21 July 2022 10:10:04 +0000 (0:00:00.039) 0:04:01.426 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID metadata version] ********************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:37 Thursday 21 July 2022 10:10:04 +0000 (0:00:00.039) 0:04:01.465 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the actual size of the volume] ************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:3 Thursday 21 July 2022 10:10:04 +0000 (0:00:00.045) 0:04:01.511 ********* ok: [/cache/centos-8.qcow2] => { "bytes": 3221225472, "changed": false, "lvm": "3g", "parted": "3GiB", "size": "3 GiB" } TASK [parse the requested size of the volume] ********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:9 Thursday 21 July 2022 10:10:05 +0000 (0:00:00.419) 0:04:01.931 ********* ok: [/cache/centos-8.qcow2] => { "bytes": 3221225472, "changed": false, "lvm": "3g", "parted": "3GiB", "size": "3 GiB" } TASK [Establish base value for expected size] ********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:15 Thursday 21 July 2022 10:10:05 +0000 (0:00:00.386) 0:04:02.318 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_expected_size": "3221225472" }, "changed": false } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:20 Thursday 21 July 2022 10:10:05 +0000 (0:00:00.083) 0:04:02.401 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_expected_size": "3221225472" } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:25 Thursday 21 July 2022 10:10:05 +0000 (0:00:00.099) 0:04:02.501 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:28 Thursday 21 July 2022 10:10:05 +0000 (0:00:00.037) 0:04:02.538 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Get the size of parent/pool device] ************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:31 Thursday 21 July 2022 10:10:05 +0000 (0:00:00.037) 0:04:02.576 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:36 Thursday 21 July 2022 10:10:05 +0000 (0:00:00.037) 0:04:02.614 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:39 Thursday 21 July 2022 10:10:05 +0000 (0:00:00.043) 0:04:02.657 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:44 Thursday 21 July 2022 10:10:05 +0000 (0:00:00.040) 0:04:02.697 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_actual_size": { "bytes": 3221225472, "changed": false, "failed": false, "lvm": "3g", "parted": "3GiB", "size": "3 GiB" } } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:47 Thursday 21 July 2022 10:10:06 +0000 (0:00:00.040) 0:04:02.737 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_expected_size": "3221225472" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:50 Thursday 21 July 2022 10:10:06 +0000 (0:00:00.040) 0:04:02.778 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:6 Thursday 21 July 2022 10:10:06 +0000 (0:00:00.057) 0:04:02.835 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "vg1/lv1" ], "delta": "0:00:00.042698", "end": "2022-07-21 10:10:06.439740", "rc": 0, "start": "2022-07-21 10:10:06.397042" } STDOUT: LVM2_LV_NAME=lv1 LVM2_LV_ATTR=Vwi-a-tz-- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=thin TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:14 Thursday 21 July 2022 10:10:06 +0000 (0:00:00.451) 0:04:03.287 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_lv_segtype": [ "thin" ] }, "changed": false } TASK [check segment type] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:17 Thursday 21 July 2022 10:10:06 +0000 (0:00:00.055) 0:04:03.343 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:22 Thursday 21 July 2022 10:10:06 +0000 (0:00:00.057) 0:04:03.400 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the requested cache size] ****************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:26 Thursday 21 July 2022 10:10:06 +0000 (0:00:00.038) 0:04:03.439 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:32 Thursday 21 July 2022 10:10:06 +0000 (0:00:00.040) 0:04:03.479 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:36 Thursday 21 July 2022 10:10:06 +0000 (0:00:00.039) 0:04:03.518 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume.yml:16 Thursday 21 July 2022 10:10:06 +0000 (0:00:00.039) 0:04:03.557 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:43 Thursday 21 July 2022 10:10:06 +0000 (0:00:00.036) 0:04:03.594 ********* TASK [Clean up variable namespace] ********************************************* task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:53 Thursday 21 July 2022 10:10:06 +0000 (0:00:00.024) 0:04:03.619 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Create new LV under existing thinpool] *********************************** task path: /tmp/tmpa3egnbq5/tests/tests_create_thinp_then_remove.yml:74 Thursday 21 July 2022 10:10:06 +0000 (0:00:00.035) 0:04:03.654 ********* TASK [linux-system-roles.storage : set platform/version specific variables] **** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Thursday 21 July 2022 10:10:07 +0000 (0:00:00.061) 0:04:03.715 ********* included: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for /cache/centos-8.qcow2 TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Thursday 21 July 2022 10:10:07 +0000 (0:00:00.035) 0:04:03.751 ********* ok: [/cache/centos-8.qcow2] TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Thursday 21 July 2022 10:10:07 +0000 (0:00:00.543) 0:04:04.295 ********* skipping: [/cache/centos-8.qcow2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [linux-system-roles.storage : define an empty list of pools to be used in testing] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Thursday 21 July 2022 10:10:07 +0000 (0:00:00.126) 0:04:04.422 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : define an empty list of volumes to be used in testing] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Thursday 21 July 2022 10:10:07 +0000 (0:00:00.035) 0:04:04.457 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : include the appropriate provider tasks] ***** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Thursday 21 July 2022 10:10:07 +0000 (0:00:00.080) 0:04:04.537 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for /cache/centos-8.qcow2 TASK [linux-system-roles.storage : get a list of rpm packages installed on host machine] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Thursday 21 July 2022 10:10:07 +0000 (0:00:00.088) 0:04:04.625 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : make sure blivet is available] ************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:7 Thursday 21 July 2022 10:10:07 +0000 (0:00:00.021) 0:04:04.647 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [linux-system-roles.storage : show storage_pools] ************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:14 Thursday 21 July 2022 10:10:09 +0000 (0:00:01.839) 0:04:06.487 ********* ok: [/cache/centos-8.qcow2] => { "storage_pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "name": "vg1", "type": "lvm", "volumes": [ { "mount_point": "/opt/test2", "name": "lv2", "size": "4g", "thin": true, "thin_pool_name": "tpool1" } ] } ] } TASK [linux-system-roles.storage : show storage_volumes] *********************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:19 Thursday 21 July 2022 10:10:09 +0000 (0:00:00.038) 0:04:06.526 ********* ok: [/cache/centos-8.qcow2] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : get required packages] ********************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:24 Thursday 21 July 2022 10:10:09 +0000 (0:00:00.035) 0:04:06.561 ********* ok: [/cache/centos-8.qcow2] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "lvm2" ], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : enable copr repositories if needed] ********* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:37 Thursday 21 July 2022 10:10:11 +0000 (0:00:01.949) 0:04:08.511 ********* included: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for /cache/centos-8.qcow2 TASK [linux-system-roles.storage : check if the COPR support packages should be installed] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Thursday 21 July 2022 10:10:11 +0000 (0:00:00.046) 0:04:08.557 ********* TASK [linux-system-roles.storage : make sure COPR support packages are present] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Thursday 21 July 2022 10:10:11 +0000 (0:00:00.035) 0:04:08.592 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : enable COPRs] ******************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:18 Thursday 21 July 2022 10:10:11 +0000 (0:00:00.041) 0:04:08.634 ********* TASK [linux-system-roles.storage : make sure required packages are installed] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:44 Thursday 21 July 2022 10:10:11 +0000 (0:00:00.036) 0:04:08.671 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [linux-system-roles.storage : get service facts] ************************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:51 Thursday 21 July 2022 10:10:13 +0000 (0:00:01.830) 0:04:10.501 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cockpit-motd.service": { "name": "cockpit-motd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-http.service": { "name": "cockpit-wsinstance-http.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-https-factory@.service": { "name": "cockpit-wsinstance-https-factory@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit-wsinstance-https@.service": { "name": "cockpit-wsinstance-https@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit.service": { "name": "cockpit.service", "source": "systemd", "state": "inactive", "status": "static" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "running", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "lvm2-pvscan@259:4.service": { "name": "lvm2-pvscan@259:4.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@259:5.service": { "name": "lvm2-pvscan@259:5.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@259:6.service": { "name": "lvm2-pvscan@259:6.service", "source": "systemd", "state": "stopped", "status": "active" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "packagekit-offline-update.service": { "name": "packagekit-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "packagekit.service": { "name": "packagekit.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "rngd-wake-threshold.service": { "name": "rngd-wake-threshold.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "vdo-start-by-dev@.service": { "name": "vdo-start-by-dev@.service", "source": "systemd", "state": "unknown", "status": "static" }, "vdo.service": { "name": "vdo.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:58 Thursday 21 July 2022 10:10:15 +0000 (0:00:01.614) 0:04:12.116 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:71 Thursday 21 July 2022 10:10:15 +0000 (0:00:00.056) 0:04:12.172 ********* TASK [linux-system-roles.storage : manage the pools and volumes to match the specified state] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:77 Thursday 21 July 2022 10:10:15 +0000 (0:00:00.022) 0:04:12.195 ********* changed: [/cache/centos-8.qcow2] => { "actions": [ { "action": "create device", "device": "/dev/mapper/vg1-lv2", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/vg1-lv2", "fs_type": "xfs" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sr0", "/dev/vda1", "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/mapper/vg1-lv1", "/dev/vdb", "/dev/vdc", "/dev/vdd", "/dev/mapper/vg1-lv2" ], "mounts": [ { "dump": 0, "fstype": "xfs", "opts": "defaults", "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/vg1-lv2", "state": "mounted" } ], "packages": [ "lvm2", "xfsprogs" ], "pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv2", "_kernel_device": "/dev/dm-5", "_mount_id": "/dev/mapper/vg1-lv2", "_raw_device": "/dev/mapper/vg1-lv2", "_raw_kernel_device": "/dev/dm-5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "name": "lv2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "4g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:91 Thursday 21 July 2022 10:10:17 +0000 (0:00:02.256) 0:04:14.451 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:103 Thursday 21 July 2022 10:10:17 +0000 (0:00:00.092) 0:04:14.544 ********* TASK [linux-system-roles.storage : show blivet_output] ************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:109 Thursday 21 July 2022 10:10:17 +0000 (0:00:00.034) 0:04:14.579 ********* ok: [/cache/centos-8.qcow2] => { "blivet_output": { "actions": [ { "action": "create device", "device": "/dev/mapper/vg1-lv2", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/vg1-lv2", "fs_type": "xfs" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sr0", "/dev/vda1", "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/mapper/vg1-lv1", "/dev/vdb", "/dev/vdc", "/dev/vdd", "/dev/mapper/vg1-lv2" ], "mounts": [ { "dump": 0, "fstype": "xfs", "opts": "defaults", "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/vg1-lv2", "state": "mounted" } ], "packages": [ "lvm2", "xfsprogs" ], "pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv2", "_kernel_device": "/dev/dm-5", "_mount_id": "/dev/mapper/vg1-lv2", "_raw_device": "/dev/mapper/vg1-lv2", "_raw_kernel_device": "/dev/dm-5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "name": "lv2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "4g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [linux-system-roles.storage : set the list of pools for test verification] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:114 Thursday 21 July 2022 10:10:17 +0000 (0:00:00.041) 0:04:14.621 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv2", "_kernel_device": "/dev/dm-5", "_mount_id": "/dev/mapper/vg1-lv2", "_raw_device": "/dev/mapper/vg1-lv2", "_raw_kernel_device": "/dev/dm-5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "name": "lv2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "4g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [linux-system-roles.storage : set the list of volumes for test verification] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:118 Thursday 21 July 2022 10:10:17 +0000 (0:00:00.039) 0:04:14.660 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : remove obsolete mounts] ********************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:134 Thursday 21 July 2022 10:10:17 +0000 (0:00:00.035) 0:04:14.696 ********* TASK [linux-system-roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:146 Thursday 21 July 2022 10:10:18 +0000 (0:00:00.039) 0:04:14.735 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : set up new/current mounts] ****************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:151 Thursday 21 July 2022 10:10:18 +0000 (0:00:00.631) 0:04:15.367 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount changed: [/cache/centos-8.qcow2] => (item={'src': '/dev/mapper/vg1-lv2', 'path': '/opt/test2', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted'}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "opts": "defaults", "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/vg1-lv2", "state": "mounted" }, "name": "/opt/test2", "opts": "defaults", "passno": "0", "src": "/dev/mapper/vg1-lv2" } TASK [linux-system-roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:163 Thursday 21 July 2022 10:10:19 +0000 (0:00:00.441) 0:04:15.808 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : retrieve facts for the /etc/crypttab file] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:171 Thursday 21 July 2022 10:10:19 +0000 (0:00:00.616) 0:04:16.425 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "atime": 1658398003.7492578, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1643141385.117, "dev": 64513, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 135, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1643141019.537, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "3147672035", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : manage /etc/crypttab to account for changes we just made] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:176 Thursday 21 July 2022 10:10:20 +0000 (0:00:00.389) 0:04:16.814 ********* TASK [linux-system-roles.storage : Update facts] ******************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:198 Thursday 21 July 2022 10:10:20 +0000 (0:00:00.023) 0:04:16.838 ********* ok: [/cache/centos-8.qcow2] META: role_complete for /cache/centos-8.qcow2 TASK [include_tasks] *********************************************************** task path: /tmp/tmpa3egnbq5/tests/tests_create_thinp_then_remove.yml:89 Thursday 21 July 2022 10:10:21 +0000 (0:00:00.974) 0:04:17.813 ********* included: /tmp/tmpa3egnbq5/tests/verify-role-results.yml for /cache/centos-8.qcow2 TASK [Print out pool information] ********************************************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:1 Thursday 21 July 2022 10:10:21 +0000 (0:00:00.048) 0:04:17.862 ********* ok: [/cache/centos-8.qcow2] => { "_storage_pools_list": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv2", "_kernel_device": "/dev/dm-5", "_mount_id": "/dev/mapper/vg1-lv2", "_raw_device": "/dev/mapper/vg1-lv2", "_raw_kernel_device": "/dev/dm-5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "name": "lv2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "4g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:6 Thursday 21 July 2022 10:10:21 +0000 (0:00:00.084) 0:04:17.947 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:14 Thursday 21 July 2022 10:10:21 +0000 (0:00:00.073) 0:04:18.020 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "info": { "/dev/mapper/vg1-lv1": { "fstype": "xfs", "label": "", "name": "/dev/mapper/vg1-lv1", "size": "3G", "type": "lvm", "uuid": "c574fe3b-c0cb-4bb1-85fe-deeef0a957b9" }, "/dev/mapper/vg1-lv2": { "fstype": "xfs", "label": "", "name": "/dev/mapper/vg1-lv2", "size": "4G", "type": "lvm", "uuid": "418e71a6-daf8-4ebd-82a7-9dffd1221b29" }, "/dev/mapper/vg1-tpool1": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1-tpool": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1-tpool", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1_tdata": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1_tdata", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1_tmeta": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1_tmeta", "size": "12M", "type": "lvm", "uuid": "" }, "/dev/nvme0n1": { "fstype": "", "label": "", "name": "/dev/nvme0n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme0n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme0n1p1", "size": "10G", "type": "partition", "uuid": "xLu0vZ-nH9w-K1e2-A6yJ-EaoZ-FOkB-aLS539" }, "/dev/nvme1n1": { "fstype": "", "label": "", "name": "/dev/nvme1n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme1n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme1n1p1", "size": "10G", "type": "partition", "uuid": "Jjw50w-Whgv-fxb5-adBQ-h5Qy-h8fG-52EFuy" }, "/dev/nvme2n1": { "fstype": "", "label": "", "name": "/dev/nvme2n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme2n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme2n1p1", "size": "10G", "type": "partition", "uuid": "2PzQCY-FE6q-TI2r-KVCh-wUMp-axXy-16asvy" }, "/dev/sda": { "fstype": "", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sr0": { "fstype": "iso9660", "label": "cidata", "name": "/dev/sr0", "size": "364K", "type": "rom", "uuid": "2022-07-21-10-05-42-00" }, "/dev/vda": { "fstype": "", "label": "", "name": "/dev/vda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vda1": { "fstype": "xfs", "label": "", "name": "/dev/vda1", "size": "10G", "type": "partition", "uuid": "395b9844-e404-4857-afbb-c6edccaf72f3" }, "/dev/vdb": { "fstype": "", "label": "", "name": "/dev/vdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vdc": { "fstype": "", "label": "", "name": "/dev/vdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vdd": { "fstype": "", "label": "", "name": "/dev/vdd", "size": "10G", "type": "disk", "uuid": "" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:19 Thursday 21 July 2022 10:10:21 +0000 (0:00:00.415) 0:04:18.436 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002784", "end": "2022-07-21 10:10:21.966313", "rc": 0, "start": "2022-07-21 10:10:21.963529" } STDOUT: # # /etc/fstab # Created by anaconda on Tue Jan 25 20:03:39 2022 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=395b9844-e404-4857-afbb-c6edccaf72f3 / xfs defaults 0 0 /dev/mapper/vg1-lv2 /opt/test2 xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:24 Thursday 21 July 2022 10:10:22 +0000 (0:00:00.372) 0:04:18.809 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002881", "end": "2022-07-21 10:10:22.332479", "failed_when_result": false, "rc": 0, "start": "2022-07-21 10:10:22.329598" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:33 Thursday 21 July 2022 10:10:22 +0000 (0:00:00.366) 0:04:19.175 ********* included: /tmp/tmpa3egnbq5/tests/test-verify-pool.yml for /cache/centos-8.qcow2 => (item={'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'name': 'vg1', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'name': 'lv2', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv2', '_raw_device': '/dev/mapper/vg1-lv2', '_mount_id': '/dev/mapper/vg1-lv2', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'}], 'raid_chunk_size': None}) TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-pool.yml:5 Thursday 21 July 2022 10:10:22 +0000 (0:00:00.061) 0:04:19.237 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [include_tasks] *********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool.yml:18 Thursday 21 July 2022 10:10:22 +0000 (0:00:00.035) 0:04:19.272 ********* included: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml for /cache/centos-8.qcow2 => (item=members) included: /tmp/tmpa3egnbq5/tests/test-verify-pool-volumes.yml for /cache/centos-8.qcow2 => (item=volumes) TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:1 Thursday 21 July 2022 10:10:22 +0000 (0:00:00.046) 0:04:19.319 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_count": "3", "_storage_test_pool_pvs_lvm": [ "/dev/nvme0n1p1", "/dev/nvme1n1p1", "/dev/nvme2n1p1" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:10 Thursday 21 July 2022 10:10:22 +0000 (0:00:00.070) 0:04:19.389 ********* ok: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme0n1p1", "pv": "/dev/nvme0n1p1" } ok: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme1n1p1", "pv": "/dev/nvme1n1p1" } ok: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme2n1p1", "pv": "/dev/nvme2n1p1" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:19 Thursday 21 July 2022 10:10:23 +0000 (0:00:01.046) 0:04:20.436 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "__pvs_lvm_len": "3" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:23 Thursday 21 July 2022 10:10:23 +0000 (0:00:00.048) 0:04:20.485 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/nvme0n1p1", "/dev/nvme1n1p1", "/dev/nvme2n1p1" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:27 Thursday 21 July 2022 10:10:23 +0000 (0:00:00.049) 0:04:20.535 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:34 Thursday 21 July 2022 10:10:23 +0000 (0:00:00.050) 0:04:20.586 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:38 Thursday 21 July 2022 10:10:23 +0000 (0:00:00.039) 0:04:20.625 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_type": "partition" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:42 Thursday 21 July 2022 10:10:23 +0000 (0:00:00.048) 0:04:20.674 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:46 Thursday 21 July 2022 10:10:23 +0000 (0:00:00.023) 0:04:20.698 ********* ok: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme0n1p1" } MSG: All assertions passed ok: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme1n1p1" } MSG: All assertions passed ok: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme2n1p1" } MSG: All assertions passed TASK [Check MD RAID] *********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:56 Thursday 21 July 2022 10:10:24 +0000 (0:00:00.107) 0:04:20.805 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml for /cache/centos-8.qcow2 TASK [get information about RAID] ********************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:6 Thursday 21 July 2022 10:10:24 +0000 (0:00:00.042) 0:04:20.847 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:12 Thursday 21 July 2022 10:10:24 +0000 (0:00:00.024) 0:04:20.872 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:16 Thursday 21 July 2022 10:10:24 +0000 (0:00:00.050) 0:04:20.923 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:20 Thursday 21 July 2022 10:10:24 +0000 (0:00:00.024) 0:04:20.947 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID active devices count] ***************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:24 Thursday 21 July 2022 10:10:24 +0000 (0:00:00.024) 0:04:20.972 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID spare devices count] ****************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:30 Thursday 21 July 2022 10:10:24 +0000 (0:00:00.024) 0:04:20.997 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID metadata version] ********************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:36 Thursday 21 July 2022 10:10:24 +0000 (0:00:00.025) 0:04:21.022 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:44 Thursday 21 July 2022 10:10:24 +0000 (0:00:00.024) 0:04:21.047 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:59 Thursday 21 July 2022 10:10:24 +0000 (0:00:00.036) 0:04:21.083 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-members-lvmraid.yml for /cache/centos-8.qcow2 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-lvmraid.yml:1 Thursday 21 July 2022 10:10:24 +0000 (0:00:00.045) 0:04:21.128 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-member-lvmraid.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'name': 'lv2', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv2', '_raw_device': '/dev/mapper/vg1-lv2', '_mount_id': '/dev/mapper/vg1-lv2', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'}) TASK [Get information about LVM RAID] ****************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-lvmraid.yml:3 Thursday 21 July 2022 10:10:24 +0000 (0:00:00.043) 0:04:21.172 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is LVM RAID] ******************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-lvmraid.yml:8 Thursday 21 July 2022 10:10:24 +0000 (0:00:00.028) 0:04:21.201 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-lvmraid.yml:12 Thursday 21 July 2022 10:10:24 +0000 (0:00:00.026) 0:04:21.227 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:62 Thursday 21 July 2022 10:10:24 +0000 (0:00:00.026) 0:04:21.254 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-members-thin.yml for /cache/centos-8.qcow2 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-thin.yml:1 Thursday 21 July 2022 10:10:24 +0000 (0:00:00.045) 0:04:21.299 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'name': 'lv2', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv2', '_raw_device': '/dev/mapper/vg1-lv2', '_mount_id': '/dev/mapper/vg1-lv2', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'}) TASK [Get information about thinpool] ****************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml:3 Thursday 21 July 2022 10:10:24 +0000 (0:00:00.043) 0:04:21.342 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "lvs", "--noheading", "-o", "pool_lv", "--select", "lv_name=lv2&&segtype=thin", "vg1" ], "delta": "0:00:00.036217", "end": "2022-07-21 10:10:24.909019", "rc": 0, "start": "2022-07-21 10:10:24.872802" } STDOUT: tpool1 TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml:8 Thursday 21 July 2022 10:10:25 +0000 (0:00:00.409) 0:04:21.752 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml:13 Thursday 21 July 2022 10:10:25 +0000 (0:00:00.065) 0:04:21.817 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml:17 Thursday 21 July 2022 10:10:25 +0000 (0:00:00.063) 0:04:21.881 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_lvmraid_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:65 Thursday 21 July 2022 10:10:25 +0000 (0:00:00.043) 0:04:21.924 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml for /cache/centos-8.qcow2 TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml:4 Thursday 21 July 2022 10:10:25 +0000 (0:00:00.071) 0:04:21.995 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml:8 Thursday 21 July 2022 10:10:25 +0000 (0:00:00.052) 0:04:22.048 ********* skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "_storage_test_pool_member_path": "/dev/nvme0n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "_storage_test_pool_member_path": "/dev/nvme1n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "_storage_test_pool_member_path": "/dev/nvme2n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml:15 Thursday 21 July 2022 10:10:25 +0000 (0:00:00.035) 0:04:22.083 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme0n1p1) included: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme1n1p1) included: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme2n1p1) TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 10:10:25 +0000 (0:00:00.052) 0:04:22.136 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:6 Thursday 21 July 2022 10:10:25 +0000 (0:00:00.050) 0:04:22.187 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:11 Thursday 21 July 2022 10:10:25 +0000 (0:00:00.051) 0:04:22.239 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:17 Thursday 21 July 2022 10:10:25 +0000 (0:00:00.035) 0:04:22.274 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:23 Thursday 21 July 2022 10:10:25 +0000 (0:00:00.037) 0:04:22.312 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:29 Thursday 21 July 2022 10:10:25 +0000 (0:00:00.036) 0:04:22.348 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 10:10:25 +0000 (0:00:00.078) 0:04:22.427 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:6 Thursday 21 July 2022 10:10:25 +0000 (0:00:00.116) 0:04:22.543 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:11 Thursday 21 July 2022 10:10:25 +0000 (0:00:00.050) 0:04:22.593 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:17 Thursday 21 July 2022 10:10:25 +0000 (0:00:00.037) 0:04:22.630 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:23 Thursday 21 July 2022 10:10:25 +0000 (0:00:00.036) 0:04:22.667 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:29 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.041) 0:04:22.709 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.034) 0:04:22.743 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:6 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.049) 0:04:22.792 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:11 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.052) 0:04:22.844 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:17 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.036) 0:04:22.881 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:23 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.036) 0:04:22.917 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:29 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.035) 0:04:22.953 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml:22 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.037) 0:04:22.991 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:68 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.033) 0:04:23.024 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-members-vdo.yml for /cache/centos-8.qcow2 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-vdo.yml:1 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.047) 0:04:23.072 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'name': 'lv2', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv2', '_raw_device': '/dev/mapper/vg1-lv2', '_mount_id': '/dev/mapper/vg1-lv2', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'}) TASK [get information about VDO deduplication] ********************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:3 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.045) 0:04:23.118 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:8 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.024) 0:04:23.142 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:11 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.023) 0:04:23.166 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:16 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.024) 0:04:23.191 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:21 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.023) 0:04:23.215 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:24 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.024) 0:04:23.239 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:29 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.024) 0:04:23.264 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:39 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.026) 0:04:23.290 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:71 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.034) 0:04:23.324 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [verify the volumes] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-volumes.yml:3 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.035) 0:04:23.359 ********* included: /tmp/tmpa3egnbq5/tests/test-verify-volume.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'name': 'lv2', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv2', '_raw_device': '/dev/mapper/vg1-lv2', '_mount_id': '/dev/mapper/vg1-lv2', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'}) TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume.yml:2 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.044) 0:04:23.404 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [include_tasks] *********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume.yml:10 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.050) 0:04:23.454 ********* included: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml for /cache/centos-8.qcow2 => (item=mount) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml for /cache/centos-8.qcow2 => (item=fstab) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-fs.yml for /cache/centos-8.qcow2 => (item=fs) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml for /cache/centos-8.qcow2 => (item=device) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml for /cache/centos-8.qcow2 => (item=encryption) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml for /cache/centos-8.qcow2 => (item=md) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml for /cache/centos-8.qcow2 => (item=size) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml for /cache/centos-8.qcow2 => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:6 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.076) 0:04:23.531 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/vg1-lv2" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:14 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.042) 0:04:23.573 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_mount_device_matches": [ { "block_available": 1030395, "block_size": 4096, "block_total": 1046016, "block_used": 15621, "device": "/dev/mapper/vg1-lv2", "fstype": "xfs", "inode_available": 2097149, "inode_total": 2097152, "inode_used": 3, "mount": "/opt/test2", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=64k,sunit=128,swidth=128,noquota", "size_available": 4220497920, "size_total": 4284481536, "uuid": "418e71a6-daf8-4ebd-82a7-9dffd1221b29" } ], "storage_test_mount_expected_match_count": "1", "storage_test_mount_point_matches": [ { "block_available": 1030395, "block_size": 4096, "block_total": 1046016, "block_used": 15621, "device": "/dev/mapper/vg1-lv2", "fstype": "xfs", "inode_available": 2097149, "inode_total": 2097152, "inode_used": 3, "mount": "/opt/test2", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=64k,sunit=128,swidth=128,noquota", "size_available": 4220497920, "size_total": 4284481536, "uuid": "418e71a6-daf8-4ebd-82a7-9dffd1221b29" } ], "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Verify the current mount state by device] ******************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:28 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.057) 0:04:23.630 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the current mount state by mount point] *************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:37 Thursday 21 July 2022 10:10:26 +0000 (0:00:00.050) 0:04:23.681 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the mount fs type] ************************************************ task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:45 Thursday 21 July 2022 10:10:27 +0000 (0:00:00.052) 0:04:23.734 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [command] ***************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:54 Thursday 21 July 2022 10:10:27 +0000 (0:00:00.053) 0:04:23.787 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:58 Thursday 21 July 2022 10:10:27 +0000 (0:00:00.024) 0:04:23.811 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:63 Thursday 21 July 2022 10:10:27 +0000 (0:00:00.023) 0:04:23.835 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:75 Thursday 21 July 2022 10:10:27 +0000 (0:00:00.023) 0:04:23.859 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_mount_device_matches": null, "storage_test_mount_expected_match_count": null, "storage_test_mount_point_matches": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:2 Thursday 21 July 2022 10:10:27 +0000 (0:00:00.107) 0:04:23.966 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/vg1-lv2 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test2 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test2 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:25 Thursday 21 July 2022 10:10:27 +0000 (0:00:00.063) 0:04:24.029 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:32 Thursday 21 July 2022 10:10:27 +0000 (0:00:00.051) 0:04:24.081 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:39 Thursday 21 July 2022 10:10:27 +0000 (0:00:00.054) 0:04:24.135 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:49 Thursday 21 July 2022 10:10:27 +0000 (0:00:00.037) 0:04:24.172 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fs.yml:4 Thursday 21 July 2022 10:10:27 +0000 (0:00:00.043) 0:04:24.216 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fs.yml:10 Thursday 21 July 2022 10:10:27 +0000 (0:00:00.070) 0:04:24.286 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:4 Thursday 21 July 2022 10:10:27 +0000 (0:00:00.042) 0:04:24.329 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "atime": 1658398217.5412579, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1658398217.5412579, "dev": 6, "device_type": 64773, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 148829, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1658398217.5412579, "nlink": 1, "path": "/dev/mapper/vg1-lv2", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:10 Thursday 21 July 2022 10:10:28 +0000 (0:00:00.397) 0:04:24.727 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about this volume] ********************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:18 Thursday 21 July 2022 10:10:28 +0000 (0:00:00.040) 0:04:24.768 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [(1/2) Process volume type (set initial value)] *************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:24 Thursday 21 July 2022 10:10:28 +0000 (0:00:00.043) 0:04:24.812 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [(2/2) Process volume type (get RAID value)] ****************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:28 Thursday 21 July 2022 10:10:28 +0000 (0:00:00.036) 0:04:24.848 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:33 Thursday 21 July 2022 10:10:28 +0000 (0:00:00.024) 0:04:24.872 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:3 Thursday 21 July 2022 10:10:28 +0000 (0:00:00.040) 0:04:24.913 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:10 Thursday 21 July 2022 10:10:28 +0000 (0:00:00.025) 0:04:24.938 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:15 Thursday 21 July 2022 10:10:30 +0000 (0:00:01.895) 0:04:26.833 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:21 Thursday 21 July 2022 10:10:30 +0000 (0:00:00.024) 0:04:26.858 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:30 Thursday 21 July 2022 10:10:30 +0000 (0:00:00.025) 0:04:26.884 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:38 Thursday 21 July 2022 10:10:30 +0000 (0:00:00.053) 0:04:26.937 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:44 Thursday 21 July 2022 10:10:30 +0000 (0:00:00.024) 0:04:26.962 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:49 Thursday 21 July 2022 10:10:30 +0000 (0:00:00.024) 0:04:26.986 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:55 Thursday 21 July 2022 10:10:30 +0000 (0:00:00.026) 0:04:27.012 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:61 Thursday 21 July 2022 10:10:30 +0000 (0:00:00.028) 0:04:27.041 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:67 Thursday 21 July 2022 10:10:30 +0000 (0:00:00.027) 0:04:27.069 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:74 Thursday 21 July 2022 10:10:30 +0000 (0:00:00.054) 0:04:27.124 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:79 Thursday 21 July 2022 10:10:30 +0000 (0:00:00.056) 0:04:27.180 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:85 Thursday 21 July 2022 10:10:30 +0000 (0:00:00.041) 0:04:27.221 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:91 Thursday 21 July 2022 10:10:30 +0000 (0:00:00.038) 0:04:27.260 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:97 Thursday 21 July 2022 10:10:30 +0000 (0:00:00.039) 0:04:27.300 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [get information about RAID] ********************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:7 Thursday 21 July 2022 10:10:30 +0000 (0:00:00.041) 0:04:27.341 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:13 Thursday 21 July 2022 10:10:30 +0000 (0:00:00.039) 0:04:27.381 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:17 Thursday 21 July 2022 10:10:30 +0000 (0:00:00.039) 0:04:27.421 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:21 Thursday 21 July 2022 10:10:30 +0000 (0:00:00.040) 0:04:27.461 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID active devices count] ***************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:25 Thursday 21 July 2022 10:10:30 +0000 (0:00:00.042) 0:04:27.503 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID spare devices count] ****************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:31 Thursday 21 July 2022 10:10:30 +0000 (0:00:00.040) 0:04:27.544 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID metadata version] ********************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:37 Thursday 21 July 2022 10:10:30 +0000 (0:00:00.038) 0:04:27.582 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the actual size of the volume] ************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:3 Thursday 21 July 2022 10:10:30 +0000 (0:00:00.038) 0:04:27.621 ********* ok: [/cache/centos-8.qcow2] => { "bytes": 4294967296, "changed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } TASK [parse the requested size of the volume] ********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:9 Thursday 21 July 2022 10:10:31 +0000 (0:00:00.383) 0:04:28.005 ********* ok: [/cache/centos-8.qcow2] => { "bytes": 4294967296, "changed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } TASK [Establish base value for expected size] ********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:15 Thursday 21 July 2022 10:10:31 +0000 (0:00:00.434) 0:04:28.439 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_expected_size": "4294967296" }, "changed": false } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:20 Thursday 21 July 2022 10:10:31 +0000 (0:00:00.050) 0:04:28.490 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_expected_size": "4294967296" } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:25 Thursday 21 July 2022 10:10:31 +0000 (0:00:00.035) 0:04:28.525 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:28 Thursday 21 July 2022 10:10:31 +0000 (0:00:00.037) 0:04:28.562 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Get the size of parent/pool device] ************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:31 Thursday 21 July 2022 10:10:31 +0000 (0:00:00.037) 0:04:28.600 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:36 Thursday 21 July 2022 10:10:31 +0000 (0:00:00.040) 0:04:28.640 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:39 Thursday 21 July 2022 10:10:31 +0000 (0:00:00.039) 0:04:28.680 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:44 Thursday 21 July 2022 10:10:32 +0000 (0:00:00.036) 0:04:28.716 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_actual_size": { "bytes": 4294967296, "changed": false, "failed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:47 Thursday 21 July 2022 10:10:32 +0000 (0:00:00.037) 0:04:28.753 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_expected_size": "4294967296" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:50 Thursday 21 July 2022 10:10:32 +0000 (0:00:00.038) 0:04:28.792 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:6 Thursday 21 July 2022 10:10:32 +0000 (0:00:00.050) 0:04:28.843 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "vg1/lv2" ], "delta": "0:00:00.035080", "end": "2022-07-21 10:10:32.411043", "rc": 0, "start": "2022-07-21 10:10:32.375963" } STDOUT: LVM2_LV_NAME=lv2 LVM2_LV_ATTR=Vwi-aotz-- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=thin TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:14 Thursday 21 July 2022 10:10:32 +0000 (0:00:00.411) 0:04:29.254 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_lv_segtype": [ "thin" ] }, "changed": false } TASK [check segment type] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:17 Thursday 21 July 2022 10:10:32 +0000 (0:00:00.050) 0:04:29.304 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:22 Thursday 21 July 2022 10:10:32 +0000 (0:00:00.050) 0:04:29.355 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the requested cache size] ****************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:26 Thursday 21 July 2022 10:10:32 +0000 (0:00:00.037) 0:04:29.392 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:32 Thursday 21 July 2022 10:10:32 +0000 (0:00:00.038) 0:04:29.430 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:36 Thursday 21 July 2022 10:10:32 +0000 (0:00:00.036) 0:04:29.467 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume.yml:16 Thursday 21 July 2022 10:10:32 +0000 (0:00:00.036) 0:04:29.503 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:43 Thursday 21 July 2022 10:10:32 +0000 (0:00:00.039) 0:04:29.542 ********* TASK [Clean up variable namespace] ********************************************* task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:53 Thursday 21 July 2022 10:10:32 +0000 (0:00:00.037) 0:04:29.580 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Remove existing LV under existing thinpool] ****************************** task path: /tmp/tmpa3egnbq5/tests/tests_create_thinp_then_remove.yml:91 Thursday 21 July 2022 10:10:32 +0000 (0:00:00.033) 0:04:29.614 ********* TASK [linux-system-roles.storage : set platform/version specific variables] **** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Thursday 21 July 2022 10:10:32 +0000 (0:00:00.064) 0:04:29.679 ********* included: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for /cache/centos-8.qcow2 TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Thursday 21 July 2022 10:10:33 +0000 (0:00:00.034) 0:04:29.713 ********* ok: [/cache/centos-8.qcow2] TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Thursday 21 July 2022 10:10:33 +0000 (0:00:00.537) 0:04:30.251 ********* skipping: [/cache/centos-8.qcow2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [linux-system-roles.storage : define an empty list of pools to be used in testing] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Thursday 21 July 2022 10:10:33 +0000 (0:00:00.145) 0:04:30.397 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : define an empty list of volumes to be used in testing] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Thursday 21 July 2022 10:10:33 +0000 (0:00:00.034) 0:04:30.431 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : include the appropriate provider tasks] ***** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Thursday 21 July 2022 10:10:33 +0000 (0:00:00.035) 0:04:30.467 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for /cache/centos-8.qcow2 TASK [linux-system-roles.storage : get a list of rpm packages installed on host machine] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Thursday 21 July 2022 10:10:33 +0000 (0:00:00.049) 0:04:30.516 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : make sure blivet is available] ************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:7 Thursday 21 July 2022 10:10:33 +0000 (0:00:00.021) 0:04:30.538 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [linux-system-roles.storage : show storage_pools] ************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:14 Thursday 21 July 2022 10:10:35 +0000 (0:00:01.863) 0:04:32.401 ********* ok: [/cache/centos-8.qcow2] => { "storage_pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "name": "vg1", "type": "lvm", "volumes": [ { "mount_point": "/opt/test2", "name": "lv2", "state": "absent", "thin": true, "thin_pool_name": "tpool1" } ] } ] } TASK [linux-system-roles.storage : show storage_volumes] *********************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:19 Thursday 21 July 2022 10:10:35 +0000 (0:00:00.042) 0:04:32.444 ********* ok: [/cache/centos-8.qcow2] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : get required packages] ********************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:24 Thursday 21 July 2022 10:10:35 +0000 (0:00:00.037) 0:04:32.482 ********* ok: [/cache/centos-8.qcow2] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "lvm2" ], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : enable copr repositories if needed] ********* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:37 Thursday 21 July 2022 10:10:37 +0000 (0:00:02.050) 0:04:34.533 ********* included: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for /cache/centos-8.qcow2 TASK [linux-system-roles.storage : check if the COPR support packages should be installed] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Thursday 21 July 2022 10:10:37 +0000 (0:00:00.046) 0:04:34.579 ********* TASK [linux-system-roles.storage : make sure COPR support packages are present] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Thursday 21 July 2022 10:10:37 +0000 (0:00:00.034) 0:04:34.614 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : enable COPRs] ******************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:18 Thursday 21 July 2022 10:10:37 +0000 (0:00:00.044) 0:04:34.658 ********* TASK [linux-system-roles.storage : make sure required packages are installed] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:44 Thursday 21 July 2022 10:10:37 +0000 (0:00:00.036) 0:04:34.695 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [linux-system-roles.storage : get service facts] ************************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:51 Thursday 21 July 2022 10:10:39 +0000 (0:00:01.871) 0:04:36.566 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cockpit-motd.service": { "name": "cockpit-motd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-http.service": { "name": "cockpit-wsinstance-http.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-https-factory@.service": { "name": "cockpit-wsinstance-https-factory@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit-wsinstance-https@.service": { "name": "cockpit-wsinstance-https@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit.service": { "name": "cockpit.service", "source": "systemd", "state": "inactive", "status": "static" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "running", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "lvm2-pvscan@259:4.service": { "name": "lvm2-pvscan@259:4.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@259:5.service": { "name": "lvm2-pvscan@259:5.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@259:6.service": { "name": "lvm2-pvscan@259:6.service", "source": "systemd", "state": "stopped", "status": "active" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "packagekit-offline-update.service": { "name": "packagekit-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "packagekit.service": { "name": "packagekit.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "rngd-wake-threshold.service": { "name": "rngd-wake-threshold.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "vdo-start-by-dev@.service": { "name": "vdo-start-by-dev@.service", "source": "systemd", "state": "unknown", "status": "static" }, "vdo.service": { "name": "vdo.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:58 Thursday 21 July 2022 10:10:41 +0000 (0:00:01.636) 0:04:38.202 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:71 Thursday 21 July 2022 10:10:41 +0000 (0:00:00.099) 0:04:38.302 ********* TASK [linux-system-roles.storage : manage the pools and volumes to match the specified state] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:77 Thursday 21 July 2022 10:10:41 +0000 (0:00:00.023) 0:04:38.325 ********* changed: [/cache/centos-8.qcow2] => { "actions": [ { "action": "destroy format", "device": "/dev/mapper/vg1-lv2", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/vg1-lv2", "fs_type": null } ], "changed": true, "crypts": [], "leaves": [ "/dev/sr0", "/dev/vda1", "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/mapper/vg1-lv1", "/dev/vdb", "/dev/vdc", "/dev/vdd" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test2", "src": "/dev/mapper/vg1-lv2", "state": "absent" } ], "packages": [ "lvm2", "xfsprogs" ], "pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv2", "_mount_id": "/dev/mapper/vg1-lv2", "_raw_device": "/dev/mapper/vg1-lv2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "name": "lv2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": 4294967296, "state": "absent", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:91 Thursday 21 July 2022 10:10:44 +0000 (0:00:02.621) 0:04:40.947 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:103 Thursday 21 July 2022 10:10:44 +0000 (0:00:00.037) 0:04:40.985 ********* TASK [linux-system-roles.storage : show blivet_output] ************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:109 Thursday 21 July 2022 10:10:44 +0000 (0:00:00.023) 0:04:41.008 ********* ok: [/cache/centos-8.qcow2] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/mapper/vg1-lv2", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/vg1-lv2", "fs_type": null } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sr0", "/dev/vda1", "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/mapper/vg1-lv1", "/dev/vdb", "/dev/vdc", "/dev/vdd" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test2", "src": "/dev/mapper/vg1-lv2", "state": "absent" } ], "packages": [ "lvm2", "xfsprogs" ], "pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv2", "_mount_id": "/dev/mapper/vg1-lv2", "_raw_device": "/dev/mapper/vg1-lv2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "name": "lv2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": 4294967296, "state": "absent", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [linux-system-roles.storage : set the list of pools for test verification] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:114 Thursday 21 July 2022 10:10:44 +0000 (0:00:00.040) 0:04:41.048 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv2", "_mount_id": "/dev/mapper/vg1-lv2", "_raw_device": "/dev/mapper/vg1-lv2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "name": "lv2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": 4294967296, "state": "absent", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [linux-system-roles.storage : set the list of volumes for test verification] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:118 Thursday 21 July 2022 10:10:44 +0000 (0:00:00.041) 0:04:41.090 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : remove obsolete mounts] ********************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:134 Thursday 21 July 2022 10:10:44 +0000 (0:00:00.040) 0:04:41.130 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount changed: [/cache/centos-8.qcow2] => (item={'src': '/dev/mapper/vg1-lv2', 'path': '/opt/test2', 'state': 'absent', 'fstype': 'xfs'}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "fstype": "xfs", "path": "/opt/test2", "src": "/dev/mapper/vg1-lv2", "state": "absent" }, "name": "/opt/test2", "opts": "defaults", "passno": "0", "src": "/dev/mapper/vg1-lv2" } TASK [linux-system-roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:146 Thursday 21 July 2022 10:10:44 +0000 (0:00:00.403) 0:04:41.534 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : set up new/current mounts] ****************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:151 Thursday 21 July 2022 10:10:45 +0000 (0:00:00.622) 0:04:42.156 ********* TASK [linux-system-roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:163 Thursday 21 July 2022 10:10:45 +0000 (0:00:00.038) 0:04:42.195 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "name": null, "status": {} } TASK [linux-system-roles.storage : retrieve facts for the /etc/crypttab file] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:171 Thursday 21 July 2022 10:10:46 +0000 (0:00:00.619) 0:04:42.815 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "atime": 1658398003.7492578, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1643141385.117, "dev": 64513, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 135, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1643141019.537, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "3147672035", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : manage /etc/crypttab to account for changes we just made] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:176 Thursday 21 July 2022 10:10:46 +0000 (0:00:00.375) 0:04:43.190 ********* TASK [linux-system-roles.storage : Update facts] ******************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:198 Thursday 21 July 2022 10:10:46 +0000 (0:00:00.023) 0:04:43.213 ********* ok: [/cache/centos-8.qcow2] META: role_complete for /cache/centos-8.qcow2 TASK [include_tasks] *********************************************************** task path: /tmp/tmpa3egnbq5/tests/tests_create_thinp_then_remove.yml:106 Thursday 21 July 2022 10:10:47 +0000 (0:00:00.950) 0:04:44.163 ********* included: /tmp/tmpa3egnbq5/tests/verify-role-results.yml for /cache/centos-8.qcow2 TASK [Print out pool information] ********************************************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:1 Thursday 21 July 2022 10:10:47 +0000 (0:00:00.054) 0:04:44.218 ********* ok: [/cache/centos-8.qcow2] => { "_storage_pools_list": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv2", "_mount_id": "/dev/mapper/vg1-lv2", "_raw_device": "/dev/mapper/vg1-lv2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "name": "lv2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": 4294967296, "state": "absent", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:6 Thursday 21 July 2022 10:10:47 +0000 (0:00:00.052) 0:04:44.271 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:14 Thursday 21 July 2022 10:10:47 +0000 (0:00:00.036) 0:04:44.308 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "info": { "/dev/mapper/vg1-lv1": { "fstype": "xfs", "label": "", "name": "/dev/mapper/vg1-lv1", "size": "3G", "type": "lvm", "uuid": "c574fe3b-c0cb-4bb1-85fe-deeef0a957b9" }, "/dev/mapper/vg1-tpool1": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1-tpool": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1-tpool", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1_tdata": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1_tdata", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1_tmeta": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1_tmeta", "size": "12M", "type": "lvm", "uuid": "" }, "/dev/nvme0n1": { "fstype": "", "label": "", "name": "/dev/nvme0n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme0n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme0n1p1", "size": "10G", "type": "partition", "uuid": "xLu0vZ-nH9w-K1e2-A6yJ-EaoZ-FOkB-aLS539" }, "/dev/nvme1n1": { "fstype": "", "label": "", "name": "/dev/nvme1n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme1n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme1n1p1", "size": "10G", "type": "partition", "uuid": "Jjw50w-Whgv-fxb5-adBQ-h5Qy-h8fG-52EFuy" }, "/dev/nvme2n1": { "fstype": "", "label": "", "name": "/dev/nvme2n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme2n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme2n1p1", "size": "10G", "type": "partition", "uuid": "2PzQCY-FE6q-TI2r-KVCh-wUMp-axXy-16asvy" }, "/dev/sda": { "fstype": "", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sr0": { "fstype": "iso9660", "label": "cidata", "name": "/dev/sr0", "size": "364K", "type": "rom", "uuid": "2022-07-21-10-05-42-00" }, "/dev/vda": { "fstype": "", "label": "", "name": "/dev/vda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vda1": { "fstype": "xfs", "label": "", "name": "/dev/vda1", "size": "10G", "type": "partition", "uuid": "395b9844-e404-4857-afbb-c6edccaf72f3" }, "/dev/vdb": { "fstype": "", "label": "", "name": "/dev/vdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vdc": { "fstype": "", "label": "", "name": "/dev/vdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vdd": { "fstype": "", "label": "", "name": "/dev/vdd", "size": "10G", "type": "disk", "uuid": "" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:19 Thursday 21 July 2022 10:10:48 +0000 (0:00:00.424) 0:04:44.733 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003037", "end": "2022-07-21 10:10:48.264830", "rc": 0, "start": "2022-07-21 10:10:48.261793" } STDOUT: # # /etc/fstab # Created by anaconda on Tue Jan 25 20:03:39 2022 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=395b9844-e404-4857-afbb-c6edccaf72f3 / xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:24 Thursday 21 July 2022 10:10:48 +0000 (0:00:00.376) 0:04:45.109 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002793", "end": "2022-07-21 10:10:48.639604", "failed_when_result": false, "rc": 0, "start": "2022-07-21 10:10:48.636811" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:33 Thursday 21 July 2022 10:10:48 +0000 (0:00:00.372) 0:04:45.482 ********* included: /tmp/tmpa3egnbq5/tests/test-verify-pool.yml for /cache/centos-8.qcow2 => (item={'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'name': 'vg1', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'name': 'lv2', 'raid_level': None, 'size': 4294967296, 'state': 'absent', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv2', '_raw_device': '/dev/mapper/vg1-lv2', '_mount_id': '/dev/mapper/vg1-lv2'}], 'raid_chunk_size': None}) TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-pool.yml:5 Thursday 21 July 2022 10:10:48 +0000 (0:00:00.060) 0:04:45.542 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [include_tasks] *********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool.yml:18 Thursday 21 July 2022 10:10:48 +0000 (0:00:00.037) 0:04:45.579 ********* included: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml for /cache/centos-8.qcow2 => (item=members) included: /tmp/tmpa3egnbq5/tests/test-verify-pool-volumes.yml for /cache/centos-8.qcow2 => (item=volumes) TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:1 Thursday 21 July 2022 10:10:48 +0000 (0:00:00.045) 0:04:45.625 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_count": "3", "_storage_test_pool_pvs_lvm": [ "/dev/nvme0n1p1", "/dev/nvme1n1p1", "/dev/nvme2n1p1" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:10 Thursday 21 July 2022 10:10:48 +0000 (0:00:00.055) 0:04:45.681 ********* ok: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme0n1p1", "pv": "/dev/nvme0n1p1" } ok: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme1n1p1", "pv": "/dev/nvme1n1p1" } ok: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme2n1p1", "pv": "/dev/nvme2n1p1" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:19 Thursday 21 July 2022 10:10:50 +0000 (0:00:01.061) 0:04:46.742 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "__pvs_lvm_len": "3" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:23 Thursday 21 July 2022 10:10:50 +0000 (0:00:00.056) 0:04:46.798 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/nvme0n1p1", "/dev/nvme1n1p1", "/dev/nvme2n1p1" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:27 Thursday 21 July 2022 10:10:50 +0000 (0:00:00.057) 0:04:46.856 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:34 Thursday 21 July 2022 10:10:50 +0000 (0:00:00.052) 0:04:46.908 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:38 Thursday 21 July 2022 10:10:50 +0000 (0:00:00.037) 0:04:46.946 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_type": "partition" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:42 Thursday 21 July 2022 10:10:50 +0000 (0:00:00.092) 0:04:47.039 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:46 Thursday 21 July 2022 10:10:50 +0000 (0:00:00.025) 0:04:47.065 ********* ok: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme0n1p1" } MSG: All assertions passed ok: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme1n1p1" } MSG: All assertions passed ok: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme2n1p1" } MSG: All assertions passed TASK [Check MD RAID] *********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:56 Thursday 21 July 2022 10:10:50 +0000 (0:00:00.111) 0:04:47.176 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml for /cache/centos-8.qcow2 TASK [get information about RAID] ********************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:6 Thursday 21 July 2022 10:10:50 +0000 (0:00:00.042) 0:04:47.219 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:12 Thursday 21 July 2022 10:10:50 +0000 (0:00:00.024) 0:04:47.243 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:16 Thursday 21 July 2022 10:10:50 +0000 (0:00:00.024) 0:04:47.267 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:20 Thursday 21 July 2022 10:10:50 +0000 (0:00:00.023) 0:04:47.291 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID active devices count] ***************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:24 Thursday 21 July 2022 10:10:50 +0000 (0:00:00.025) 0:04:47.317 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID spare devices count] ****************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:30 Thursday 21 July 2022 10:10:50 +0000 (0:00:00.052) 0:04:47.369 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID metadata version] ********************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:36 Thursday 21 July 2022 10:10:50 +0000 (0:00:00.025) 0:04:47.394 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:44 Thursday 21 July 2022 10:10:50 +0000 (0:00:00.023) 0:04:47.418 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:59 Thursday 21 July 2022 10:10:50 +0000 (0:00:00.034) 0:04:47.453 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-members-lvmraid.yml for /cache/centos-8.qcow2 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-lvmraid.yml:1 Thursday 21 July 2022 10:10:50 +0000 (0:00:00.044) 0:04:47.497 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-member-lvmraid.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'name': 'lv2', 'raid_level': None, 'size': 4294967296, 'state': 'absent', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv2', '_raw_device': '/dev/mapper/vg1-lv2', '_mount_id': '/dev/mapper/vg1-lv2'}) TASK [Get information about LVM RAID] ****************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-lvmraid.yml:3 Thursday 21 July 2022 10:10:50 +0000 (0:00:00.044) 0:04:47.541 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is LVM RAID] ******************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-lvmraid.yml:8 Thursday 21 July 2022 10:10:50 +0000 (0:00:00.028) 0:04:47.570 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-lvmraid.yml:12 Thursday 21 July 2022 10:10:50 +0000 (0:00:00.028) 0:04:47.598 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:62 Thursday 21 July 2022 10:10:50 +0000 (0:00:00.027) 0:04:47.626 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-members-thin.yml for /cache/centos-8.qcow2 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-thin.yml:1 Thursday 21 July 2022 10:10:50 +0000 (0:00:00.046) 0:04:47.672 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'name': 'lv2', 'raid_level': None, 'size': 4294967296, 'state': 'absent', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv2', '_raw_device': '/dev/mapper/vg1-lv2', '_mount_id': '/dev/mapper/vg1-lv2'}) TASK [Get information about thinpool] ****************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml:3 Thursday 21 July 2022 10:10:51 +0000 (0:00:00.043) 0:04:47.715 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml:8 Thursday 21 July 2022 10:10:51 +0000 (0:00:00.027) 0:04:47.743 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml:13 Thursday 21 July 2022 10:10:51 +0000 (0:00:00.026) 0:04:47.769 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml:17 Thursday 21 July 2022 10:10:51 +0000 (0:00:00.026) 0:04:47.796 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check member encryption] ************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:65 Thursday 21 July 2022 10:10:51 +0000 (0:00:00.025) 0:04:47.822 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml for /cache/centos-8.qcow2 TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml:4 Thursday 21 July 2022 10:10:51 +0000 (0:00:00.046) 0:04:47.869 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml:8 Thursday 21 July 2022 10:10:51 +0000 (0:00:00.049) 0:04:47.919 ********* skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "_storage_test_pool_member_path": "/dev/nvme0n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "_storage_test_pool_member_path": "/dev/nvme1n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "_storage_test_pool_member_path": "/dev/nvme2n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml:15 Thursday 21 July 2022 10:10:51 +0000 (0:00:00.033) 0:04:47.952 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme0n1p1) included: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme1n1p1) included: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme2n1p1) TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 10:10:51 +0000 (0:00:00.052) 0:04:48.004 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:6 Thursday 21 July 2022 10:10:51 +0000 (0:00:00.050) 0:04:48.055 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:11 Thursday 21 July 2022 10:10:51 +0000 (0:00:00.048) 0:04:48.104 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:17 Thursday 21 July 2022 10:10:51 +0000 (0:00:00.036) 0:04:48.140 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:23 Thursday 21 July 2022 10:10:51 +0000 (0:00:00.035) 0:04:48.176 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:29 Thursday 21 July 2022 10:10:51 +0000 (0:00:00.040) 0:04:48.217 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 10:10:51 +0000 (0:00:00.033) 0:04:48.251 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:6 Thursday 21 July 2022 10:10:51 +0000 (0:00:00.049) 0:04:48.300 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:11 Thursday 21 July 2022 10:10:51 +0000 (0:00:00.051) 0:04:48.352 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:17 Thursday 21 July 2022 10:10:51 +0000 (0:00:00.036) 0:04:48.388 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:23 Thursday 21 July 2022 10:10:51 +0000 (0:00:00.036) 0:04:48.424 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:29 Thursday 21 July 2022 10:10:51 +0000 (0:00:00.035) 0:04:48.460 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 10:10:51 +0000 (0:00:00.036) 0:04:48.497 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:6 Thursday 21 July 2022 10:10:51 +0000 (0:00:00.048) 0:04:48.545 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:11 Thursday 21 July 2022 10:10:51 +0000 (0:00:00.093) 0:04:48.638 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:17 Thursday 21 July 2022 10:10:51 +0000 (0:00:00.039) 0:04:48.678 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:23 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.073) 0:04:48.751 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-crypttab.yml:29 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.036) 0:04:48.788 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml:22 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.035) 0:04:48.824 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:68 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.036) 0:04:48.860 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-members-vdo.yml for /cache/centos-8.qcow2 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-vdo.yml:1 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.046) 0:04:48.907 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'name': 'lv2', 'raid_level': None, 'size': 4294967296, 'state': 'absent', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv2', '_raw_device': '/dev/mapper/vg1-lv2', '_mount_id': '/dev/mapper/vg1-lv2'}) TASK [get information about VDO deduplication] ********************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:3 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.045) 0:04:48.953 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:8 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.024) 0:04:48.977 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:11 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.024) 0:04:49.002 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:16 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.024) 0:04:49.026 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:21 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.025) 0:04:49.051 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:24 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.026) 0:04:49.078 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:29 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.025) 0:04:49.104 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:39 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.024) 0:04:49.128 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:71 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.038) 0:04:49.167 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [verify the volumes] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-volumes.yml:3 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.035) 0:04:49.202 ********* included: /tmp/tmpa3egnbq5/tests/test-verify-volume.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'name': 'lv2', 'raid_level': None, 'size': 4294967296, 'state': 'absent', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv2', '_raw_device': '/dev/mapper/vg1-lv2', '_mount_id': '/dev/mapper/vg1-lv2'}) TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume.yml:2 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.044) 0:04:49.247 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_volume_present": false, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [include_tasks] *********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume.yml:10 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.052) 0:04:49.300 ********* included: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml for /cache/centos-8.qcow2 => (item=mount) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml for /cache/centos-8.qcow2 => (item=fstab) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-fs.yml for /cache/centos-8.qcow2 => (item=fs) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml for /cache/centos-8.qcow2 => (item=device) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml for /cache/centos-8.qcow2 => (item=encryption) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml for /cache/centos-8.qcow2 => (item=md) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml for /cache/centos-8.qcow2 => (item=size) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml for /cache/centos-8.qcow2 => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:6 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.076) 0:04:49.376 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/vg1-lv2" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:14 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.041) 0:04:49.418 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_mount_device_matches": [], "storage_test_mount_expected_match_count": "0", "storage_test_mount_point_matches": [], "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Verify the current mount state by device] ******************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:28 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.057) 0:04:49.475 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by mount point] *************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:37 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.025) 0:04:49.501 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the mount fs type] ************************************************ task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:45 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.049) 0:04:49.550 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [command] ***************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:54 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.036) 0:04:49.587 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:58 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.023) 0:04:49.611 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:63 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.024) 0:04:49.636 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:75 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.024) 0:04:49.660 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_mount_device_matches": null, "storage_test_mount_expected_match_count": null, "storage_test_mount_point_matches": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:2 Thursday 21 July 2022 10:10:52 +0000 (0:00:00.036) 0:04:49.696 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "0", "storage_test_fstab_expected_mount_options_matches": "0", "storage_test_fstab_expected_mount_point_matches": "0", "storage_test_fstab_id_matches": [], "storage_test_fstab_mount_options_matches": [], "storage_test_fstab_mount_point_matches": [] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:25 Thursday 21 July 2022 10:10:53 +0000 (0:00:00.063) 0:04:49.760 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the fstab mount point] ******************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:32 Thursday 21 July 2022 10:10:53 +0000 (0:00:00.024) 0:04:49.784 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:39 Thursday 21 July 2022 10:10:53 +0000 (0:00:00.053) 0:04:49.838 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:49 Thursday 21 July 2022 10:10:53 +0000 (0:00:00.035) 0:04:49.874 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fs.yml:4 Thursday 21 July 2022 10:10:53 +0000 (0:00:00.036) 0:04:49.910 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fs label] ********************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fs.yml:10 Thursday 21 July 2022 10:10:53 +0000 (0:00:00.023) 0:04:49.934 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [See whether the device node is present] ********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:4 Thursday 21 July 2022 10:10:53 +0000 (0:00:00.024) 0:04:49.959 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "exists": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:10 Thursday 21 July 2022 10:10:53 +0000 (0:00:00.423) 0:04:50.383 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about this volume] ********************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:18 Thursday 21 July 2022 10:10:53 +0000 (0:00:00.038) 0:04:50.421 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [(1/2) Process volume type (set initial value)] *************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:24 Thursday 21 July 2022 10:10:53 +0000 (0:00:00.024) 0:04:50.445 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [(2/2) Process volume type (get RAID value)] ****************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:28 Thursday 21 July 2022 10:10:53 +0000 (0:00:00.037) 0:04:50.483 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:33 Thursday 21 July 2022 10:10:53 +0000 (0:00:00.024) 0:04:50.507 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:3 Thursday 21 July 2022 10:10:53 +0000 (0:00:00.024) 0:04:50.531 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:10 Thursday 21 July 2022 10:10:53 +0000 (0:00:00.023) 0:04:50.555 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:15 Thursday 21 July 2022 10:10:55 +0000 (0:00:01.864) 0:04:52.420 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:21 Thursday 21 July 2022 10:10:55 +0000 (0:00:00.024) 0:04:52.444 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:30 Thursday 21 July 2022 10:10:55 +0000 (0:00:00.024) 0:04:52.469 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:38 Thursday 21 July 2022 10:10:55 +0000 (0:00:00.024) 0:04:52.493 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:44 Thursday 21 July 2022 10:10:55 +0000 (0:00:00.024) 0:04:52.517 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:49 Thursday 21 July 2022 10:10:55 +0000 (0:00:00.023) 0:04:52.541 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:55 Thursday 21 July 2022 10:10:55 +0000 (0:00:00.025) 0:04:52.567 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:61 Thursday 21 July 2022 10:10:55 +0000 (0:00:00.026) 0:04:52.593 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:67 Thursday 21 July 2022 10:10:55 +0000 (0:00:00.024) 0:04:52.618 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:74 Thursday 21 July 2022 10:10:55 +0000 (0:00:00.052) 0:04:52.670 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:79 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.048) 0:04:52.718 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:85 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.036) 0:04:52.755 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:91 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.035) 0:04:52.790 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:97 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.040) 0:04:52.831 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [get information about RAID] ********************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:7 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.034) 0:04:52.865 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:13 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.036) 0:04:52.902 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:17 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.036) 0:04:52.938 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:21 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.040) 0:04:52.979 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID active devices count] ***************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:25 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.036) 0:04:53.015 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID spare devices count] ****************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:31 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.036) 0:04:53.051 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID metadata version] ********************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:37 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.036) 0:04:53.088 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the actual size of the volume] ************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:3 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.042) 0:04:53.130 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the requested size of the volume] ********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:9 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.026) 0:04:53.157 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:15 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.037) 0:04:53.194 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:20 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.037) 0:04:53.232 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_expected_size": "4294967296" } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:25 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.039) 0:04:53.271 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:28 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.037) 0:04:53.308 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Get the size of parent/pool device] ************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:31 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.037) 0:04:53.345 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:36 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.037) 0:04:53.382 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:39 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.071) 0:04:53.454 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:44 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.037) 0:04:53.492 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_actual_size": { "changed": false, "skip_reason": "Conditional result was False", "skipped": true } } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:47 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.037) 0:04:53.529 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_expected_size": "4294967296" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:50 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.037) 0:04:53.566 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:6 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.025) 0:04:53.592 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:14 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.024) 0:04:53.616 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check segment type] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:17 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.024) 0:04:53.640 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:22 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.023) 0:04:53.664 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the requested cache size] ****************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:26 Thursday 21 July 2022 10:10:56 +0000 (0:00:00.025) 0:04:53.690 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:32 Thursday 21 July 2022 10:10:57 +0000 (0:00:00.024) 0:04:53.714 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:36 Thursday 21 July 2022 10:10:57 +0000 (0:00:00.024) 0:04:53.739 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume.yml:16 Thursday 21 July 2022 10:10:57 +0000 (0:00:00.025) 0:04:53.764 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:43 Thursday 21 July 2022 10:10:57 +0000 (0:00:00.038) 0:04:53.803 ********* TASK [Clean up variable namespace] ********************************************* task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:53 Thursday 21 July 2022 10:10:57 +0000 (0:00:00.022) 0:04:53.825 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Cleanup] ***************************************************************** task path: /tmp/tmpa3egnbq5/tests/tests_create_thinp_then_remove.yml:108 Thursday 21 July 2022 10:10:57 +0000 (0:00:00.035) 0:04:53.860 ********* TASK [linux-system-roles.storage : set platform/version specific variables] **** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Thursday 21 July 2022 10:10:57 +0000 (0:00:00.071) 0:04:53.932 ********* included: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for /cache/centos-8.qcow2 TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Thursday 21 July 2022 10:10:57 +0000 (0:00:00.037) 0:04:53.969 ********* ok: [/cache/centos-8.qcow2] TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Thursday 21 July 2022 10:10:57 +0000 (0:00:00.557) 0:04:54.527 ********* skipping: [/cache/centos-8.qcow2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [linux-system-roles.storage : define an empty list of pools to be used in testing] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Thursday 21 July 2022 10:10:57 +0000 (0:00:00.071) 0:04:54.598 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : define an empty list of volumes to be used in testing] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Thursday 21 July 2022 10:10:57 +0000 (0:00:00.035) 0:04:54.634 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : include the appropriate provider tasks] ***** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Thursday 21 July 2022 10:10:57 +0000 (0:00:00.036) 0:04:54.670 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for /cache/centos-8.qcow2 TASK [linux-system-roles.storage : get a list of rpm packages installed on host machine] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Thursday 21 July 2022 10:10:58 +0000 (0:00:00.047) 0:04:54.718 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : make sure blivet is available] ************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:7 Thursday 21 July 2022 10:10:58 +0000 (0:00:00.022) 0:04:54.740 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [linux-system-roles.storage : show storage_pools] ************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:14 Thursday 21 July 2022 10:10:59 +0000 (0:00:01.841) 0:04:56.582 ********* ok: [/cache/centos-8.qcow2] => { "storage_pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "name": "vg1", "state": "absent", "type": "lvm", "volumes": [ { "mount_point": "/opt/test1", "name": "lv1", "size": "3g", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g" } ] } ] } TASK [linux-system-roles.storage : show storage_volumes] *********************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:19 Thursday 21 July 2022 10:10:59 +0000 (0:00:00.038) 0:04:56.621 ********* ok: [/cache/centos-8.qcow2] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : get required packages] ********************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:24 Thursday 21 July 2022 10:10:59 +0000 (0:00:00.036) 0:04:56.657 ********* ok: [/cache/centos-8.qcow2] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : enable copr repositories if needed] ********* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:37 Thursday 21 July 2022 10:11:01 +0000 (0:00:01.926) 0:04:58.583 ********* included: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for /cache/centos-8.qcow2 TASK [linux-system-roles.storage : check if the COPR support packages should be installed] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Thursday 21 July 2022 10:11:01 +0000 (0:00:00.046) 0:04:58.630 ********* TASK [linux-system-roles.storage : make sure COPR support packages are present] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Thursday 21 July 2022 10:11:01 +0000 (0:00:00.034) 0:04:58.665 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : enable COPRs] ******************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:18 Thursday 21 July 2022 10:11:01 +0000 (0:00:00.038) 0:04:58.704 ********* TASK [linux-system-roles.storage : make sure required packages are installed] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:44 Thursday 21 July 2022 10:11:02 +0000 (0:00:00.036) 0:04:58.740 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [linux-system-roles.storage : get service facts] ************************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:51 Thursday 21 July 2022 10:11:03 +0000 (0:00:01.867) 0:05:00.608 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cockpit-motd.service": { "name": "cockpit-motd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-http.service": { "name": "cockpit-wsinstance-http.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-https-factory@.service": { "name": "cockpit-wsinstance-https-factory@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit-wsinstance-https@.service": { "name": "cockpit-wsinstance-https@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit.service": { "name": "cockpit.service", "source": "systemd", "state": "inactive", "status": "static" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "running", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "lvm2-pvscan@259:3.service": { "name": "lvm2-pvscan@259:3.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@259:5.service": { "name": "lvm2-pvscan@259:5.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@259:6.service": { "name": "lvm2-pvscan@259:6.service", "source": "systemd", "state": "stopped", "status": "active" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "packagekit-offline-update.service": { "name": "packagekit-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "packagekit.service": { "name": "packagekit.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "rngd-wake-threshold.service": { "name": "rngd-wake-threshold.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "vdo-start-by-dev@.service": { "name": "vdo-start-by-dev@.service", "source": "systemd", "state": "unknown", "status": "static" }, "vdo.service": { "name": "vdo.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:58 Thursday 21 July 2022 10:11:06 +0000 (0:00:02.619) 0:05:03.227 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:71 Thursday 21 July 2022 10:11:06 +0000 (0:00:00.096) 0:05:03.323 ********* TASK [linux-system-roles.storage : manage the pools and volumes to match the specified state] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:77 Thursday 21 July 2022 10:11:06 +0000 (0:00:00.025) 0:05:03.349 ********* changed: [/cache/centos-8.qcow2] => { "actions": [ { "action": "destroy format", "device": "/dev/mapper/vg1-lv1", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/vg1-lv1", "fs_type": null }, { "action": "destroy device", "device": "/dev/mapper/vg1-tpool1", "fs_type": null }, { "action": "destroy device", "device": "/dev/vg1", "fs_type": null }, { "action": "destroy format", "device": "/dev/nvme2n1p1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/nvme2n1p1", "fs_type": null }, { "action": "destroy format", "device": "/dev/nvme2n1", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/nvme1n1p1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/nvme1n1p1", "fs_type": null }, { "action": "destroy format", "device": "/dev/nvme1n1", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/nvme0n1p1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/nvme0n1p1", "fs_type": null }, { "action": "destroy format", "device": "/dev/nvme0n1", "fs_type": "disklabel" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sr0", "/dev/vda1", "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/nvme0n1", "/dev/nvme1n1", "/dev/nvme2n1", "/dev/vdb", "/dev/vdc", "/dev/vdd" ], "mounts": [], "packages": [ "xfsprogs" ], "pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "3g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g", "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:91 Thursday 21 July 2022 10:11:09 +0000 (0:00:03.212) 0:05:06.562 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:103 Thursday 21 July 2022 10:11:09 +0000 (0:00:00.036) 0:05:06.599 ********* TASK [linux-system-roles.storage : show blivet_output] ************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:109 Thursday 21 July 2022 10:11:09 +0000 (0:00:00.021) 0:05:06.620 ********* ok: [/cache/centos-8.qcow2] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/mapper/vg1-lv1", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/vg1-lv1", "fs_type": null }, { "action": "destroy device", "device": "/dev/mapper/vg1-tpool1", "fs_type": null }, { "action": "destroy device", "device": "/dev/vg1", "fs_type": null }, { "action": "destroy format", "device": "/dev/nvme2n1p1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/nvme2n1p1", "fs_type": null }, { "action": "destroy format", "device": "/dev/nvme2n1", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/nvme1n1p1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/nvme1n1p1", "fs_type": null }, { "action": "destroy format", "device": "/dev/nvme1n1", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/nvme0n1p1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/nvme0n1p1", "fs_type": null }, { "action": "destroy format", "device": "/dev/nvme0n1", "fs_type": "disklabel" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sr0", "/dev/vda1", "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/nvme0n1", "/dev/nvme1n1", "/dev/nvme2n1", "/dev/vdb", "/dev/vdc", "/dev/vdd" ], "mounts": [], "packages": [ "xfsprogs" ], "pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "3g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g", "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [linux-system-roles.storage : set the list of pools for test verification] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:114 Thursday 21 July 2022 10:11:09 +0000 (0:00:00.041) 0:05:06.662 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "3g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g", "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [linux-system-roles.storage : set the list of volumes for test verification] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:118 Thursday 21 July 2022 10:11:10 +0000 (0:00:00.078) 0:05:06.740 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : remove obsolete mounts] ********************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:134 Thursday 21 July 2022 10:11:10 +0000 (0:00:00.081) 0:05:06.821 ********* TASK [linux-system-roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:146 Thursday 21 July 2022 10:11:10 +0000 (0:00:00.078) 0:05:06.899 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : set up new/current mounts] ****************** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:151 Thursday 21 July 2022 10:11:10 +0000 (0:00:00.024) 0:05:06.924 ********* TASK [linux-system-roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:163 Thursday 21 July 2022 10:11:10 +0000 (0:00:00.035) 0:05:06.960 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : retrieve facts for the /etc/crypttab file] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:171 Thursday 21 July 2022 10:11:10 +0000 (0:00:00.025) 0:05:06.985 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "atime": 1658398003.7492578, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1643141385.117, "dev": 64513, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 135, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1643141019.537, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "3147672035", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : manage /etc/crypttab to account for changes we just made] *** task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:176 Thursday 21 July 2022 10:11:10 +0000 (0:00:00.381) 0:05:07.366 ********* TASK [linux-system-roles.storage : Update facts] ******************************* task path: /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:198 Thursday 21 July 2022 10:11:10 +0000 (0:00:00.023) 0:05:07.390 ********* ok: [/cache/centos-8.qcow2] META: role_complete for /cache/centos-8.qcow2 TASK [include_tasks] *********************************************************** task path: /tmp/tmpa3egnbq5/tests/tests_create_thinp_then_remove.yml:125 Thursday 21 July 2022 10:11:11 +0000 (0:00:00.931) 0:05:08.321 ********* included: /tmp/tmpa3egnbq5/tests/verify-role-results.yml for /cache/centos-8.qcow2 TASK [Print out pool information] ********************************************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:1 Thursday 21 July 2022 10:11:11 +0000 (0:00:00.050) 0:05:08.372 ********* ok: [/cache/centos-8.qcow2] => { "_storage_pools_list": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "3g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g", "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:6 Thursday 21 July 2022 10:11:11 +0000 (0:00:00.050) 0:05:08.422 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:14 Thursday 21 July 2022 10:11:11 +0000 (0:00:00.035) 0:05:08.458 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "info": { "/dev/nvme0n1": { "fstype": "", "label": "", "name": "/dev/nvme0n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme1n1": { "fstype": "", "label": "", "name": "/dev/nvme1n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme2n1": { "fstype": "", "label": "", "name": "/dev/nvme2n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sda": { "fstype": "", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sr0": { "fstype": "iso9660", "label": "cidata", "name": "/dev/sr0", "size": "364K", "type": "rom", "uuid": "2022-07-21-10-05-42-00" }, "/dev/vda": { "fstype": "", "label": "", "name": "/dev/vda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vda1": { "fstype": "xfs", "label": "", "name": "/dev/vda1", "size": "10G", "type": "partition", "uuid": "395b9844-e404-4857-afbb-c6edccaf72f3" }, "/dev/vdb": { "fstype": "", "label": "", "name": "/dev/vdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vdc": { "fstype": "", "label": "", "name": "/dev/vdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vdd": { "fstype": "", "label": "", "name": "/dev/vdd", "size": "10G", "type": "disk", "uuid": "" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:19 Thursday 21 July 2022 10:11:12 +0000 (0:00:00.371) 0:05:08.830 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003656", "end": "2022-07-21 10:11:12.357319", "rc": 0, "start": "2022-07-21 10:11:12.353663" } STDOUT: # # /etc/fstab # Created by anaconda on Tue Jan 25 20:03:39 2022 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=395b9844-e404-4857-afbb-c6edccaf72f3 / xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:24 Thursday 21 July 2022 10:11:12 +0000 (0:00:00.379) 0:05:09.209 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003606", "end": "2022-07-21 10:11:12.749905", "failed_when_result": false, "rc": 0, "start": "2022-07-21 10:11:12.746299" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:33 Thursday 21 July 2022 10:11:12 +0000 (0:00:00.385) 0:05:09.594 ********* included: /tmp/tmpa3egnbq5/tests/test-verify-pool.yml for /cache/centos-8.qcow2 => (item={'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'name': 'vg1', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'state': 'absent', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1'}], 'raid_chunk_size': None}) TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-pool.yml:5 Thursday 21 July 2022 10:11:12 +0000 (0:00:00.056) 0:05:09.651 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [include_tasks] *********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool.yml:18 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.066) 0:05:09.718 ********* included: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml for /cache/centos-8.qcow2 => (item=members) included: /tmp/tmpa3egnbq5/tests/test-verify-pool-volumes.yml for /cache/centos-8.qcow2 => (item=volumes) TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:1 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.045) 0:05:09.764 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_count": "0", "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:10 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.089) 0:05:09.853 ********* TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:19 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.021) 0:05:09.875 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "__pvs_lvm_len": "0" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:23 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.097) 0:05:09.973 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_pool_pvs": [] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:27 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.051) 0:05:10.025 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:34 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.059) 0:05:10.084 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:38 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.042) 0:05:10.127 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_type": "partition" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:42 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.053) 0:05:10.181 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:46 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.024) 0:05:10.205 ********* TASK [Check MD RAID] *********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:56 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.022) 0:05:10.228 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml for /cache/centos-8.qcow2 TASK [get information about RAID] ********************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:6 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.039) 0:05:10.267 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:12 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.022) 0:05:10.290 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:16 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.023) 0:05:10.313 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:20 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.023) 0:05:10.337 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID active devices count] ***************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:24 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.023) 0:05:10.360 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID spare devices count] ****************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:30 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.023) 0:05:10.384 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID metadata version] ********************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:36 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.024) 0:05:10.408 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-md.yml:44 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.024) 0:05:10.432 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:59 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.033) 0:05:10.466 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-members-lvmraid.yml for /cache/centos-8.qcow2 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-lvmraid.yml:1 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.042) 0:05:10.509 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-member-lvmraid.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1'}) TASK [Get information about LVM RAID] ****************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-lvmraid.yml:3 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.041) 0:05:10.550 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is LVM RAID] ******************************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-lvmraid.yml:8 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.026) 0:05:10.576 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-lvmraid.yml:12 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.027) 0:05:10.603 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:62 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.026) 0:05:10.630 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-members-thin.yml for /cache/centos-8.qcow2 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-thin.yml:1 Thursday 21 July 2022 10:11:13 +0000 (0:00:00.042) 0:05:10.673 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1'}) TASK [Get information about thinpool] ****************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml:3 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.042) 0:05:10.716 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml:8 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.024) 0:05:10.740 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml:13 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.023) 0:05:10.764 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-thin.yml:17 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.024) 0:05:10.789 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check member encryption] ************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:65 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.023) 0:05:10.813 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml for /cache/centos-8.qcow2 TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml:4 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.043) 0:05:10.857 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml:8 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.078) 0:05:10.935 ********* TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml:15 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.021) 0:05:10.956 ********* TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-encryption.yml:22 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.020) 0:05:10.976 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:68 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.034) 0:05:11.011 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-members-vdo.yml for /cache/centos-8.qcow2 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-members-vdo.yml:1 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.045) 0:05:11.056 ********* included: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1'}) TASK [get information about VDO deduplication] ********************************* task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:3 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.077) 0:05:11.133 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:8 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.025) 0:05:11.158 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:11 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.023) 0:05:11.182 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:16 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.023) 0:05:11.206 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:21 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.024) 0:05:11.231 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:24 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.023) 0:05:11.255 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:29 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.024) 0:05:11.279 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/verify-pool-member-vdo.yml:39 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.023) 0:05:11.302 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-members.yml:71 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.037) 0:05:11.339 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [verify the volumes] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-pool-volumes.yml:3 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.033) 0:05:11.373 ********* included: /tmp/tmpa3egnbq5/tests/test-verify-volume.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1'}) TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume.yml:2 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.041) 0:05:11.414 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_volume_present": false, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [include_tasks] *********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume.yml:10 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.055) 0:05:11.469 ********* included: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml for /cache/centos-8.qcow2 => (item=mount) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml for /cache/centos-8.qcow2 => (item=fstab) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-fs.yml for /cache/centos-8.qcow2 => (item=fs) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml for /cache/centos-8.qcow2 => (item=device) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml for /cache/centos-8.qcow2 => (item=encryption) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml for /cache/centos-8.qcow2 => (item=md) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml for /cache/centos-8.qcow2 => (item=size) included: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml for /cache/centos-8.qcow2 => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:6 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.075) 0:05:11.545 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/vg1-lv1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:14 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.040) 0:05:11.585 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_mount_device_matches": [], "storage_test_mount_expected_match_count": "0", "storage_test_mount_point_matches": [], "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Verify the current mount state by device] ******************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:28 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.056) 0:05:11.642 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by mount point] *************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:37 Thursday 21 July 2022 10:11:14 +0000 (0:00:00.023) 0:05:11.665 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the mount fs type] ************************************************ task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:45 Thursday 21 July 2022 10:11:15 +0000 (0:00:00.049) 0:05:11.715 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [command] ***************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:54 Thursday 21 July 2022 10:11:15 +0000 (0:00:00.036) 0:05:11.752 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:58 Thursday 21 July 2022 10:11:15 +0000 (0:00:00.023) 0:05:11.776 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:63 Thursday 21 July 2022 10:11:15 +0000 (0:00:00.025) 0:05:11.802 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-mount.yml:75 Thursday 21 July 2022 10:11:15 +0000 (0:00:00.023) 0:05:11.825 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_mount_device_matches": null, "storage_test_mount_expected_match_count": null, "storage_test_mount_point_matches": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:2 Thursday 21 July 2022 10:11:15 +0000 (0:00:00.035) 0:05:11.861 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "0", "storage_test_fstab_expected_mount_options_matches": "0", "storage_test_fstab_expected_mount_point_matches": "0", "storage_test_fstab_id_matches": [], "storage_test_fstab_mount_options_matches": [], "storage_test_fstab_mount_point_matches": [] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:25 Thursday 21 July 2022 10:11:15 +0000 (0:00:00.062) 0:05:11.924 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the fstab mount point] ******************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:32 Thursday 21 July 2022 10:11:15 +0000 (0:00:00.024) 0:05:11.948 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:39 Thursday 21 July 2022 10:11:15 +0000 (0:00:00.053) 0:05:12.002 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fstab.yml:49 Thursday 21 July 2022 10:11:15 +0000 (0:00:00.037) 0:05:12.039 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fs.yml:4 Thursday 21 July 2022 10:11:15 +0000 (0:00:00.035) 0:05:12.075 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fs label] ********************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-fs.yml:10 Thursday 21 July 2022 10:11:15 +0000 (0:00:00.025) 0:05:12.101 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [See whether the device node is present] ********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:4 Thursday 21 July 2022 10:11:15 +0000 (0:00:00.023) 0:05:12.124 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "exists": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:10 Thursday 21 July 2022 10:11:15 +0000 (0:00:00.383) 0:05:12.508 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about this volume] ********************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:18 Thursday 21 July 2022 10:11:15 +0000 (0:00:00.038) 0:05:12.546 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [(1/2) Process volume type (set initial value)] *************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:24 Thursday 21 July 2022 10:11:15 +0000 (0:00:00.024) 0:05:12.571 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [(2/2) Process volume type (get RAID value)] ****************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:28 Thursday 21 July 2022 10:11:15 +0000 (0:00:00.046) 0:05:12.617 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-device.yml:33 Thursday 21 July 2022 10:11:15 +0000 (0:00:00.029) 0:05:12.646 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:3 Thursday 21 July 2022 10:11:15 +0000 (0:00:00.033) 0:05:12.680 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:10 Thursday 21 July 2022 10:11:16 +0000 (0:00:00.059) 0:05:12.740 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:15 Thursday 21 July 2022 10:11:17 +0000 (0:00:01.857) 0:05:14.597 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:21 Thursday 21 July 2022 10:11:17 +0000 (0:00:00.023) 0:05:14.621 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:30 Thursday 21 July 2022 10:11:17 +0000 (0:00:00.022) 0:05:14.644 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:38 Thursday 21 July 2022 10:11:17 +0000 (0:00:00.024) 0:05:14.668 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:44 Thursday 21 July 2022 10:11:17 +0000 (0:00:00.022) 0:05:14.691 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:49 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.022) 0:05:14.714 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:55 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.023) 0:05:14.737 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:61 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.024) 0:05:14.761 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:67 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.022) 0:05:14.783 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:74 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.049) 0:05:14.833 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:79 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.051) 0:05:14.885 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:85 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.039) 0:05:14.924 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:91 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.036) 0:05:14.960 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:97 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.035) 0:05:14.996 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [get information about RAID] ********************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:7 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.036) 0:05:15.032 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:13 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.040) 0:05:15.072 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:17 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.036) 0:05:15.109 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:21 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.036) 0:05:15.146 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID active devices count] ***************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:25 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.035) 0:05:15.182 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID spare devices count] ****************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:31 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.039) 0:05:15.222 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID metadata version] ********************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-md.yml:37 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.048) 0:05:15.271 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the actual size of the volume] ************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:3 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.038) 0:05:15.309 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the requested size of the volume] ********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:9 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.026) 0:05:15.336 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:15 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.041) 0:05:15.377 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:20 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.037) 0:05:15.415 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_expected_size": "4294967296" } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:25 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.039) 0:05:15.455 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:28 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.039) 0:05:15.494 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Get the size of parent/pool device] ************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:31 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.037) 0:05:15.531 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:36 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.038) 0:05:15.570 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:39 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.037) 0:05:15.607 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:44 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.038) 0:05:15.646 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_actual_size": { "changed": false, "skip_reason": "Conditional result was False", "skipped": true } } TASK [debug] ******************************************************************* task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:47 Thursday 21 July 2022 10:11:18 +0000 (0:00:00.036) 0:05:15.683 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_expected_size": "4294967296" } TASK [assert] ****************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-size.yml:50 Thursday 21 July 2022 10:11:19 +0000 (0:00:00.037) 0:05:15.720 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:6 Thursday 21 July 2022 10:11:19 +0000 (0:00:00.033) 0:05:15.753 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:14 Thursday 21 July 2022 10:11:19 +0000 (0:00:00.026) 0:05:15.780 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check segment type] ****************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:17 Thursday 21 July 2022 10:11:19 +0000 (0:00:00.023) 0:05:15.803 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:22 Thursday 21 July 2022 10:11:19 +0000 (0:00:00.023) 0:05:15.826 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the requested cache size] ****************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:26 Thursday 21 July 2022 10:11:19 +0000 (0:00:00.024) 0:05:15.850 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:32 Thursday 21 July 2022 10:11:19 +0000 (0:00:00.027) 0:05:15.878 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume-cache.yml:36 Thursday 21 July 2022 10:11:19 +0000 (0:00:00.059) 0:05:15.937 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/tmpa3egnbq5/tests/test-verify-volume.yml:16 Thursday 21 July 2022 10:11:19 +0000 (0:00:00.028) 0:05:15.965 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:43 Thursday 21 July 2022 10:11:19 +0000 (0:00:00.037) 0:05:16.003 ********* TASK [Clean up variable namespace] ********************************************* task path: /tmp/tmpa3egnbq5/tests/verify-role-results.yml:53 Thursday 21 July 2022 10:11:19 +0000 (0:00:00.030) 0:05:16.033 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } META: ran handlers META: ran handlers PLAY RECAP ********************************************************************* /cache/centos-8.qcow2 : ok=649 changed=11 unreachable=0 failed=0 skipped=476 rescued=0 ignored=0 Thursday 21 July 2022 10:11:19 +0000 (0:00:00.061) 0:05:16.095 ********* =============================================================================== linux-system-roles.storage : make sure blivet is available ------------ 150.36s /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:7 linux-system-roles.storage : manage the pools and volumes to match the specified state --- 3.37s /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:77 Ensure cryptsetup is present -------------------------------------------- 3.37s /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:10 ------------------- linux-system-roles.storage : manage the pools and volumes to match the specified state --- 3.21s /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:77 linux-system-roles.storage : get service facts -------------------------- 2.66s /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:51 linux-system-roles.storage : manage the pools and volumes to match the specified state --- 2.62s /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:77 linux-system-roles.storage : get service facts -------------------------- 2.62s /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:51 linux-system-roles.storage : manage the pools and volumes to match the specified state --- 2.26s /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:77 linux-system-roles.storage : manage the pools and volumes to match the specified state --- 2.08s /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:77 linux-system-roles.storage : manage the pools and volumes to match the specified state --- 2.07s /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:77 linux-system-roles.storage : get required packages ---------------------- 2.05s /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:24 Ensure cryptsetup is present -------------------------------------------- 1.96s /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:10 ------------------- linux-system-roles.storage : get required packages ---------------------- 1.95s /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:24 linux-system-roles.storage : get required packages ---------------------- 1.95s /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:24 linux-system-roles.storage : make sure required packages are installed --- 1.93s /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:44 linux-system-roles.storage : get required packages ---------------------- 1.93s /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:24 linux-system-roles.storage : get required packages ---------------------- 1.93s /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:24 Ensure cryptsetup is present -------------------------------------------- 1.93s /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:10 ------------------- linux-system-roles.storage : make sure blivet is available -------------- 1.92s /tmp/tmpa3egnbq5/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:7 Ensure cryptsetup is present -------------------------------------------- 1.90s /tmp/tmpa3egnbq5/tests/test-verify-volume-encryption.yml:10 ------------------- ansible-playbook [core 2.12.6] config file = /etc/ansible/ansible.cfg configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/lib/python3.9/site-packages/ansible ansible collection location = /tmp/tmpu1heti3n executable location = /usr/bin/ansible-playbook python version = 3.9.13 (main, May 18 2022, 00:00:00) [GCC 11.3.1 20220421 (Red Hat 11.3.1-2)] jinja version = 2.11.3 libyaml = True Using /etc/ansible/ansible.cfg as config file Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: centos-8_setup.yml *************************************************** 1 plays in /cache/centos-8_setup.yml PLAY [Enable HA repos] ********************************************************* META: ran handlers TASK [Enable HA repos] ********************************************************* task path: /cache/centos-8_setup.yml:5 Thursday 21 July 2022 19:23:03 +0000 (0:00:00.018) 0:00:00.018 ********* changed: [/cache/centos-8.qcow2] => { "ansible_facts": { "discovered_interpreter_python": "/usr/libexec/platform-python" }, "changed": true, "cmd": [ "dnf", "config-manager", "--set-enabled", "ha" ], "delta": "0:00:00.369381", "end": "2022-07-21 19:23:04.492205", "rc": 0, "start": "2022-07-21 19:23:04.122824" } META: ran handlers META: ran handlers PLAY RECAP ********************************************************************* /cache/centos-8.qcow2 : ok=1 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 Thursday 21 July 2022 19:23:04 +0000 (0:00:01.009) 0:00:01.028 ********* =============================================================================== Enable HA repos --------------------------------------------------------- 1.01s /cache/centos-8_setup.yml:5 --------------------------------------------------- PLAYBOOK: tests_create_thinp_then_remove_nvme_generated.yml ******************** 2 plays in /tmp/tmp_0pjp8ed/tests/storage/tests_create_thinp_then_remove_nvme_generated.yml PLAY [all] ********************************************************************* TASK [Gathering Facts] ********************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/tests_create_thinp_then_remove_nvme_generated.yml:3 Thursday 21 July 2022 19:23:04 +0000 (0:00:00.016) 0:00:01.044 ********* ok: [/cache/centos-8.qcow2] META: ran handlers TASK [set disk interface for test] ********************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/tests_create_thinp_then_remove_nvme_generated.yml:7 Thursday 21 July 2022 19:23:05 +0000 (0:00:01.134) 0:00:02.179 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_use_interface": "nvme" }, "changed": false } META: ran handlers META: ran handlers PLAY [all] ********************************************************************* TASK [Gathering Facts] ********************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/tests_create_thinp_then_remove.yml:2 Thursday 21 July 2022 19:23:06 +0000 (0:00:00.047) 0:00:02.227 ********* ok: [/cache/centos-8.qcow2] META: ran handlers TASK [include_role : fedora.linux_system_roles.storage] ************************ task path: /tmp/tmp_0pjp8ed/tests/storage/tests_create_thinp_then_remove.yml:14 Thursday 21 July 2022 19:23:06 +0000 (0:00:00.865) 0:00:03.092 ********* TASK [fedora.linux_system_roles.storage : set platform/version specific variables] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Thursday 21 July 2022 19:23:06 +0000 (0:00:00.036) 0:00:03.128 ********* included: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for /cache/centos-8.qcow2 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Thursday 21 July 2022 19:23:06 +0000 (0:00:00.029) 0:00:03.158 ********* ok: [/cache/centos-8.qcow2] TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Thursday 21 July 2022 19:23:07 +0000 (0:00:00.840) 0:00:03.998 ********* skipping: [/cache/centos-8.qcow2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.storage : define an empty list of pools to be used in testing] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Thursday 21 July 2022 19:23:07 +0000 (0:00:00.063) 0:00:04.061 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : define an empty list of volumes to be used in testing] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Thursday 21 July 2022 19:23:07 +0000 (0:00:00.029) 0:00:04.091 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : include the appropriate provider tasks] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Thursday 21 July 2022 19:23:07 +0000 (0:00:00.029) 0:00:04.121 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for /cache/centos-8.qcow2 TASK [fedora.linux_system_roles.storage : get a list of rpm packages installed on host machine] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Thursday 21 July 2022 19:23:07 +0000 (0:00:00.052) 0:00:04.174 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : make sure blivet is available] ******* task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:7 Thursday 21 July 2022 19:23:07 +0000 (0:00:00.016) 0:00:04.191 ********* changed: [/cache/centos-8.qcow2] => { "changed": true, "rc": 0, "results": [ "Installed: ndctl-71.1-4.el8.x86_64", "Installed: libblockdev-loop-2.24-8.el8.x86_64", "Installed: ndctl-libs-71.1-4.el8.x86_64", "Installed: libblockdev-lvm-2.24-8.el8.x86_64", "Installed: lvm2-libs-8:2.03.14-2.el8.x86_64", "Installed: device-mapper-multipath-libs-0.8.4-20.el8.x86_64", "Installed: nss-softokn-freebl-3.79.0-5.el8.x86_64", "Installed: kernel-modules-4.18.0-408.el8.x86_64", "Installed: daxctl-libs-71.1-4.el8.x86_64", "Installed: libblockdev-mdraid-2.24-8.el8.x86_64", "Installed: libblockdev-mpath-2.24-8.el8.x86_64", "Installed: device-mapper-persistent-data-0.9.0-7.el8.x86_64", "Installed: nss-sysinit-3.79.0-5.el8.x86_64", "Installed: libblockdev-nvdimm-2.24-8.el8.x86_64", "Installed: userspace-rcu-0.10.1-4.el8.x86_64", "Installed: libblockdev-part-2.24-8.el8.x86_64", "Installed: nss-util-3.79.0-5.el8.x86_64", "Installed: libblockdev-swap-2.24-8.el8.x86_64", "Installed: vdo-6.2.6.14-14.el8.x86_64", "Installed: libblockdev-utils-2.24-8.el8.x86_64", "Installed: linux-firmware-20220713-109.gitdfa29317.el8.noarch", "Installed: nspr-4.34.0-3.el8.x86_64", "Installed: mdadm-4.2-3.el8.x86_64", "Installed: python3-pyparted-1:3.11.7-4.el8.x86_64", "Installed: device-mapper-event-8:1.02.181-2.el8.x86_64", "Installed: libbytesize-1.4-3.el8.x86_64", "Installed: libblockdev-2.24-8.el8.x86_64", "Installed: nss-3.79.0-5.el8.x86_64", "Installed: python3-blivet-1:3.4.0-12.el8.noarch", "Installed: kernel-core-4.18.0-408.el8.x86_64", "Installed: libblockdev-crypto-2.24-8.el8.x86_64", "Installed: device-mapper-event-libs-8:1.02.181-2.el8.x86_64", "Installed: lsof-4.93.2-1.el8.x86_64", "Installed: libblockdev-dm-2.24-8.el8.x86_64", "Installed: python3-blockdev-2.24-8.el8.x86_64", "Installed: volume_key-libs-0.3.11-5.el8.x86_64", "Installed: blivet-data-1:3.4.0-12.el8.noarch", "Installed: python3-bytesize-1.4-3.el8.x86_64", "Installed: lvm2-8:2.03.14-2.el8.x86_64", "Installed: libblockdev-fs-2.24-8.el8.x86_64", "Installed: kmod-kvdo-6.2.6.14-84.el8.x86_64", "Installed: libblockdev-kbd-2.24-8.el8.x86_64", "Installed: nss-softokn-3.79.0-5.el8.x86_64", "Installed: libaio-0.3.112-1.el8.x86_64", "Installed: device-mapper-multipath-0.8.4-20.el8.x86_64" ] } TASK [fedora.linux_system_roles.storage : show storage_pools] ****************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:13 Thursday 21 July 2022 19:26:01 +0000 (0:02:53.759) 0:02:57.950 ********* ok: [/cache/centos-8.qcow2] => { "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined" } TASK [fedora.linux_system_roles.storage : show storage_volumes] **************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:18 Thursday 21 July 2022 19:26:01 +0000 (0:00:00.036) 0:02:57.987 ********* ok: [/cache/centos-8.qcow2] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [fedora.linux_system_roles.storage : get required packages] *************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:23 Thursday 21 July 2022 19:26:01 +0000 (0:00:00.035) 0:02:58.023 ********* ok: [/cache/centos-8.qcow2] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : enable copr repositories if needed] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:35 Thursday 21 July 2022 19:26:02 +0000 (0:00:00.764) 0:02:58.787 ********* included: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for /cache/centos-8.qcow2 TASK [fedora.linux_system_roles.storage : check if the COPR support packages should be installed] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Thursday 21 July 2022 19:26:02 +0000 (0:00:00.042) 0:02:58.829 ********* TASK [fedora.linux_system_roles.storage : make sure COPR support packages are present] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Thursday 21 July 2022 19:26:02 +0000 (0:00:00.030) 0:02:58.860 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : enable COPRs] ************************ task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:18 Thursday 21 July 2022 19:26:02 +0000 (0:00:00.034) 0:02:58.894 ********* TASK [fedora.linux_system_roles.storage : make sure required packages are installed] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:41 Thursday 21 July 2022 19:26:02 +0000 (0:00:00.032) 0:02:58.927 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.storage : get service facts] ******************* task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:47 Thursday 21 July 2022 19:26:04 +0000 (0:00:01.918) 0:03:00.845 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cockpit-motd.service": { "name": "cockpit-motd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-http.service": { "name": "cockpit-wsinstance-http.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-https-factory@.service": { "name": "cockpit-wsinstance-https-factory@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit-wsinstance-https@.service": { "name": "cockpit-wsinstance-https@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit.service": { "name": "cockpit.service", "source": "systemd", "state": "inactive", "status": "static" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "packagekit-offline-update.service": { "name": "packagekit-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "packagekit.service": { "name": "packagekit.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "rngd-wake-threshold.service": { "name": "rngd-wake-threshold.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "run-r9de59112fe304590963dee5e60a1cf6f.service": { "name": "run-r9de59112fe304590963dee5e60a1cf6f.service", "source": "systemd", "state": "running", "status": "transient" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "vdo-start-by-dev@.service": { "name": "vdo-start-by-dev@.service", "source": "systemd", "state": "unknown", "status": "static" }, "vdo.service": { "name": "vdo.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:53 Thursday 21 July 2022 19:26:06 +0000 (0:00:01.710) 0:03:02.556 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Thursday 21 July 2022 19:26:06 +0000 (0:00:00.057) 0:03:02.614 ********* TASK [fedora.linux_system_roles.storage : manage the pools and volumes to match the specified state] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Thursday 21 July 2022 19:26:06 +0000 (0:00:00.022) 0:03:02.636 ********* ok: [/cache/centos-8.qcow2] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:78 Thursday 21 July 2022 19:26:07 +0000 (0:00:00.594) 0:03:03.230 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90 Thursday 21 July 2022 19:26:07 +0000 (0:00:00.038) 0:03:03.269 ********* TASK [fedora.linux_system_roles.storage : show blivet_output] ****************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:96 Thursday 21 July 2022 19:26:07 +0000 (0:00:00.020) 0:03:03.289 ********* ok: [/cache/centos-8.qcow2] => { "blivet_output": { "actions": [], "changed": false, "crypts": [], "failed": false, "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } } TASK [fedora.linux_system_roles.storage : set the list of pools for test verification] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:101 Thursday 21 July 2022 19:26:07 +0000 (0:00:00.082) 0:03:03.372 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : set the list of volumes for test verification] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:105 Thursday 21 July 2022 19:26:07 +0000 (0:00:00.070) 0:03:03.442 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : remove obsolete mounts] ************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Thursday 21 July 2022 19:26:07 +0000 (0:00:00.085) 0:03:03.528 ********* TASK [fedora.linux_system_roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132 Thursday 21 July 2022 19:26:07 +0000 (0:00:00.068) 0:03:03.596 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : set up new/current mounts] *********** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:137 Thursday 21 July 2022 19:26:07 +0000 (0:00:00.023) 0:03:03.619 ********* TASK [fedora.linux_system_roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148 Thursday 21 July 2022 19:26:07 +0000 (0:00:00.035) 0:03:03.655 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : retrieve facts for the /etc/crypttab file] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:156 Thursday 21 July 2022 19:26:07 +0000 (0:00:00.023) 0:03:03.678 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "atime": 1658431449.4320083, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1643141385.117, "dev": 64513, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 135, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1643141019.537, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "3147672035", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : manage /etc/crypttab to account for changes we just made] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Thursday 21 July 2022 19:26:07 +0000 (0:00:00.463) 0:03:04.141 ********* TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:183 Thursday 21 July 2022 19:26:07 +0000 (0:00:00.024) 0:03:04.165 ********* ok: [/cache/centos-8.qcow2] META: role_complete for /cache/centos-8.qcow2 TASK [include_tasks] *********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/tests_create_thinp_then_remove.yml:17 Thursday 21 July 2022 19:26:08 +0000 (0:00:00.955) 0:03:05.121 ********* included: /tmp/tmp_0pjp8ed/tests/storage/get_unused_disk.yml for /cache/centos-8.qcow2 TASK [Find unused disks in the system] ***************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/get_unused_disk.yml:2 Thursday 21 July 2022 19:26:08 +0000 (0:00:00.037) 0:03:05.159 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ] } TASK [Set unused_disks if necessary] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/get_unused_disk.yml:9 Thursday 21 July 2022 19:26:10 +0000 (0:00:01.545) 0:03:06.705 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "unused_disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ] }, "changed": false } TASK [Exit playbook when there's not enough unused disks in the system] ******** task path: /tmp/tmp_0pjp8ed/tests/storage/get_unused_disk.yml:14 Thursday 21 July 2022 19:26:10 +0000 (0:00:00.039) 0:03:06.744 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print unused disks] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/get_unused_disk.yml:19 Thursday 21 July 2022 19:26:10 +0000 (0:00:00.038) 0:03:06.783 ********* ok: [/cache/centos-8.qcow2] => { "unused_disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ] } TASK [Create a thinpool device] ************************************************ task path: /tmp/tmp_0pjp8ed/tests/storage/tests_create_thinp_then_remove.yml:21 Thursday 21 July 2022 19:26:10 +0000 (0:00:00.033) 0:03:06.816 ********* TASK [fedora.linux_system_roles.storage : set platform/version specific variables] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Thursday 21 July 2022 19:26:10 +0000 (0:00:00.042) 0:03:06.858 ********* included: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for /cache/centos-8.qcow2 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Thursday 21 July 2022 19:26:10 +0000 (0:00:00.034) 0:03:06.893 ********* ok: [/cache/centos-8.qcow2] TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Thursday 21 July 2022 19:26:11 +0000 (0:00:00.569) 0:03:07.463 ********* skipping: [/cache/centos-8.qcow2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.storage : define an empty list of pools to be used in testing] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Thursday 21 July 2022 19:26:11 +0000 (0:00:00.120) 0:03:07.583 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : define an empty list of volumes to be used in testing] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Thursday 21 July 2022 19:26:11 +0000 (0:00:00.065) 0:03:07.649 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : include the appropriate provider tasks] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Thursday 21 July 2022 19:26:11 +0000 (0:00:00.063) 0:03:07.712 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for /cache/centos-8.qcow2 TASK [fedora.linux_system_roles.storage : get a list of rpm packages installed on host machine] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Thursday 21 July 2022 19:26:11 +0000 (0:00:00.055) 0:03:07.767 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : make sure blivet is available] ******* task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:7 Thursday 21 July 2022 19:26:11 +0000 (0:00:00.051) 0:03:07.819 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.storage : show storage_pools] ****************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:13 Thursday 21 July 2022 19:26:13 +0000 (0:00:01.892) 0:03:09.711 ********* ok: [/cache/centos-8.qcow2] => { "storage_pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "name": "vg1", "state": "present", "type": "lvm", "volumes": [ { "mount_point": "/opt/test1", "name": "lv1", "size": "3g", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g" } ] } ] } TASK [fedora.linux_system_roles.storage : show storage_volumes] **************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:18 Thursday 21 July 2022 19:26:13 +0000 (0:00:00.037) 0:03:09.748 ********* ok: [/cache/centos-8.qcow2] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [fedora.linux_system_roles.storage : get required packages] *************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:23 Thursday 21 July 2022 19:26:13 +0000 (0:00:00.034) 0:03:09.783 ********* ok: [/cache/centos-8.qcow2] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "lvm2" ], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : enable copr repositories if needed] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:35 Thursday 21 July 2022 19:26:14 +0000 (0:00:00.973) 0:03:10.757 ********* included: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for /cache/centos-8.qcow2 TASK [fedora.linux_system_roles.storage : check if the COPR support packages should be installed] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Thursday 21 July 2022 19:26:14 +0000 (0:00:00.047) 0:03:10.804 ********* TASK [fedora.linux_system_roles.storage : make sure COPR support packages are present] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Thursday 21 July 2022 19:26:14 +0000 (0:00:00.036) 0:03:10.840 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : enable COPRs] ************************ task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:18 Thursday 21 July 2022 19:26:14 +0000 (0:00:00.035) 0:03:10.876 ********* TASK [fedora.linux_system_roles.storage : make sure required packages are installed] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:41 Thursday 21 July 2022 19:26:14 +0000 (0:00:00.031) 0:03:10.908 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.storage : get service facts] ******************* task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:47 Thursday 21 July 2022 19:26:16 +0000 (0:00:01.799) 0:03:12.707 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cockpit-motd.service": { "name": "cockpit-motd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-http.service": { "name": "cockpit-wsinstance-http.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-https-factory@.service": { "name": "cockpit-wsinstance-https-factory@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit-wsinstance-https@.service": { "name": "cockpit-wsinstance-https@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit.service": { "name": "cockpit.service", "source": "systemd", "state": "inactive", "status": "static" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "packagekit-offline-update.service": { "name": "packagekit-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "packagekit.service": { "name": "packagekit.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "rngd-wake-threshold.service": { "name": "rngd-wake-threshold.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "vdo-start-by-dev@.service": { "name": "vdo-start-by-dev@.service", "source": "systemd", "state": "unknown", "status": "static" }, "vdo.service": { "name": "vdo.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:53 Thursday 21 July 2022 19:26:18 +0000 (0:00:01.543) 0:03:14.251 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Thursday 21 July 2022 19:26:18 +0000 (0:00:00.057) 0:03:14.308 ********* TASK [fedora.linux_system_roles.storage : manage the pools and volumes to match the specified state] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Thursday 21 July 2022 19:26:18 +0000 (0:00:00.022) 0:03:14.331 ********* changed: [/cache/centos-8.qcow2] => { "actions": [ { "action": "create format", "device": "/dev/nvme2n1", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/nvme2n1p1", "fs_type": null }, { "action": "create format", "device": "/dev/nvme2n1p1", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/nvme1n1", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/nvme1n1p1", "fs_type": null }, { "action": "create format", "device": "/dev/nvme1n1p1", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/nvme0n1", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/nvme0n1p1", "fs_type": null }, { "action": "create format", "device": "/dev/nvme0n1p1", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/vg1", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/vg1-tpool1", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/vg1-lv1", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/vg1-lv1", "fs_type": "xfs" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sr0", "/dev/vda1", "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/vdb", "/dev/vdc", "/dev/vdd", "/dev/mapper/vg1-lv1" ], "mounts": [ { "dump": 0, "fstype": "xfs", "opts": "defaults", "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/vg1-lv1", "state": "mounted" } ], "packages": [ "lvm2", "xfsprogs" ], "pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "3g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g", "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:78 Thursday 21 July 2022 19:26:21 +0000 (0:00:03.196) 0:03:17.527 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90 Thursday 21 July 2022 19:26:21 +0000 (0:00:00.039) 0:03:17.566 ********* TASK [fedora.linux_system_roles.storage : show blivet_output] ****************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:96 Thursday 21 July 2022 19:26:21 +0000 (0:00:00.024) 0:03:17.591 ********* ok: [/cache/centos-8.qcow2] => { "blivet_output": { "actions": [ { "action": "create format", "device": "/dev/nvme2n1", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/nvme2n1p1", "fs_type": null }, { "action": "create format", "device": "/dev/nvme2n1p1", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/nvme1n1", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/nvme1n1p1", "fs_type": null }, { "action": "create format", "device": "/dev/nvme1n1p1", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/nvme0n1", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/nvme0n1p1", "fs_type": null }, { "action": "create format", "device": "/dev/nvme0n1p1", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/vg1", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/vg1-tpool1", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/vg1-lv1", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/vg1-lv1", "fs_type": "xfs" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sr0", "/dev/vda1", "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/vdb", "/dev/vdc", "/dev/vdd", "/dev/mapper/vg1-lv1" ], "mounts": [ { "dump": 0, "fstype": "xfs", "opts": "defaults", "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/vg1-lv1", "state": "mounted" } ], "packages": [ "lvm2", "xfsprogs" ], "pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "3g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g", "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : set the list of pools for test verification] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:101 Thursday 21 July 2022 19:26:21 +0000 (0:00:00.084) 0:03:17.675 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "3g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g", "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : set the list of volumes for test verification] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:105 Thursday 21 July 2022 19:26:21 +0000 (0:00:00.042) 0:03:17.718 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : remove obsolete mounts] ************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Thursday 21 July 2022 19:26:21 +0000 (0:00:00.038) 0:03:17.757 ********* TASK [fedora.linux_system_roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132 Thursday 21 July 2022 19:26:21 +0000 (0:00:00.039) 0:03:17.796 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : set up new/current mounts] *********** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:137 Thursday 21 July 2022 19:26:22 +0000 (0:00:00.887) 0:03:18.684 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount changed: [/cache/centos-8.qcow2] => (item={'src': '/dev/mapper/vg1-lv1', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted'}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "opts": "defaults", "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/vg1-lv1", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/vg1-lv1" } TASK [fedora.linux_system_roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148 Thursday 21 July 2022 19:26:23 +0000 (0:00:00.588) 0:03:19.272 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : retrieve facts for the /etc/crypttab file] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:156 Thursday 21 July 2022 19:26:23 +0000 (0:00:00.611) 0:03:19.884 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "atime": 1658431449.4320083, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1643141385.117, "dev": 64513, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 135, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1643141019.537, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "3147672035", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : manage /etc/crypttab to account for changes we just made] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Thursday 21 July 2022 19:26:24 +0000 (0:00:00.384) 0:03:20.268 ********* TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:183 Thursday 21 July 2022 19:26:24 +0000 (0:00:00.023) 0:03:20.292 ********* ok: [/cache/centos-8.qcow2] META: role_complete for /cache/centos-8.qcow2 TASK [include_tasks] *********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/tests_create_thinp_then_remove.yml:38 Thursday 21 July 2022 19:26:25 +0000 (0:00:00.981) 0:03:21.274 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml for /cache/centos-8.qcow2 TASK [Print out pool information] ********************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:1 Thursday 21 July 2022 19:26:25 +0000 (0:00:00.044) 0:03:21.318 ********* ok: [/cache/centos-8.qcow2] => { "_storage_pools_list": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "3g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g", "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:6 Thursday 21 July 2022 19:26:25 +0000 (0:00:00.052) 0:03:21.371 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:14 Thursday 21 July 2022 19:26:25 +0000 (0:00:00.039) 0:03:21.410 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "info": { "/dev/mapper/vg1-lv1": { "fstype": "xfs", "label": "", "name": "/dev/mapper/vg1-lv1", "size": "3G", "type": "lvm", "uuid": "1ea60a5c-f51b-4cc6-9e7d-084fe537536f" }, "/dev/mapper/vg1-tpool1": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1-tpool": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1-tpool", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1_tdata": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1_tdata", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1_tmeta": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1_tmeta", "size": "12M", "type": "lvm", "uuid": "" }, "/dev/nvme0n1": { "fstype": "", "label": "", "name": "/dev/nvme0n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme0n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme0n1p1", "size": "10G", "type": "partition", "uuid": "ken164-erkD-vf0C-nvLO-nhmh-YBzF-bzv8kZ" }, "/dev/nvme1n1": { "fstype": "", "label": "", "name": "/dev/nvme1n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme1n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme1n1p1", "size": "10G", "type": "partition", "uuid": "eBnMVU-t1UJ-ttot-SKPn-js6i-mJjN-e9h7dx" }, "/dev/nvme2n1": { "fstype": "", "label": "", "name": "/dev/nvme2n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme2n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme2n1p1", "size": "10G", "type": "partition", "uuid": "xZT8wS-OKti-fe7H-wOMe-VpTY-S1Yx-RZZXxW" }, "/dev/sda": { "fstype": "", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sr0": { "fstype": "iso9660", "label": "cidata", "name": "/dev/sr0", "size": "364K", "type": "rom", "uuid": "2022-07-21-19-22-43-00" }, "/dev/vda": { "fstype": "", "label": "", "name": "/dev/vda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vda1": { "fstype": "xfs", "label": "", "name": "/dev/vda1", "size": "10G", "type": "partition", "uuid": "395b9844-e404-4857-afbb-c6edccaf72f3" }, "/dev/vdb": { "fstype": "", "label": "", "name": "/dev/vdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vdc": { "fstype": "", "label": "", "name": "/dev/vdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vdd": { "fstype": "", "label": "", "name": "/dev/vdd", "size": "10G", "type": "disk", "uuid": "" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:19 Thursday 21 July 2022 19:26:25 +0000 (0:00:00.504) 0:03:21.914 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002681", "end": "2022-07-21 19:26:25.762370", "rc": 0, "start": "2022-07-21 19:26:25.759689" } STDOUT: # # /etc/fstab # Created by anaconda on Tue Jan 25 20:03:39 2022 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=395b9844-e404-4857-afbb-c6edccaf72f3 / xfs defaults 0 0 /dev/mapper/vg1-lv1 /opt/test1 xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:24 Thursday 21 July 2022 19:26:26 +0000 (0:00:00.385) 0:03:22.299 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003033", "end": "2022-07-21 19:26:26.168963", "failed_when_result": false, "rc": 0, "start": "2022-07-21 19:26:26.165930" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:33 Thursday 21 July 2022 19:26:26 +0000 (0:00:00.406) 0:03:22.706 ********* included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool.yml for /cache/centos-8.qcow2 => (item={'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'name': 'vg1', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}], 'raid_chunk_size': None}) TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool.yml:5 Thursday 21 July 2022 19:26:26 +0000 (0:00:00.080) 0:03:22.786 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [include_tasks] *********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool.yml:18 Thursday 21 July 2022 19:26:26 +0000 (0:00:00.031) 0:03:22.817 ********* included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml for /cache/centos-8.qcow2 => (item=members) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-volumes.yml for /cache/centos-8.qcow2 => (item=volumes) TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:1 Thursday 21 July 2022 19:26:26 +0000 (0:00:00.045) 0:03:22.863 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_count": "3", "_storage_test_pool_pvs_lvm": [ "/dev/nvme0n1p1", "/dev/nvme1n1p1", "/dev/nvme2n1p1" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:6 Thursday 21 July 2022 19:26:26 +0000 (0:00:00.062) 0:03:22.925 ********* ok: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme0n1p1", "pv": "/dev/nvme0n1p1" } ok: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme1n1p1", "pv": "/dev/nvme1n1p1" } ok: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme2n1p1", "pv": "/dev/nvme2n1p1" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:15 Thursday 21 July 2022 19:26:27 +0000 (0:00:01.182) 0:03:24.108 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "__pvs_lvm_len": "3" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:19 Thursday 21 July 2022 19:26:27 +0000 (0:00:00.050) 0:03:24.159 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/nvme0n1p1", "/dev/nvme1n1p1", "/dev/nvme2n1p1" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:23 Thursday 21 July 2022 19:26:28 +0000 (0:00:00.052) 0:03:24.211 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:29 Thursday 21 July 2022 19:26:28 +0000 (0:00:00.049) 0:03:24.261 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:33 Thursday 21 July 2022 19:26:28 +0000 (0:00:00.037) 0:03:24.298 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_type": "partition" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:37 Thursday 21 July 2022 19:26:28 +0000 (0:00:00.048) 0:03:24.347 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:41 Thursday 21 July 2022 19:26:28 +0000 (0:00:00.022) 0:03:24.370 ********* ok: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme0n1p1" } MSG: All assertions passed ok: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme1n1p1" } MSG: All assertions passed ok: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme2n1p1" } MSG: All assertions passed TASK [Check MD RAID] *********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:50 Thursday 21 July 2022 19:26:28 +0000 (0:00:00.067) 0:03:24.437 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml for /cache/centos-8.qcow2 TASK [get information about RAID] ********************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:6 Thursday 21 July 2022 19:26:28 +0000 (0:00:00.041) 0:03:24.479 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:12 Thursday 21 July 2022 19:26:28 +0000 (0:00:00.022) 0:03:24.502 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:16 Thursday 21 July 2022 19:26:28 +0000 (0:00:00.025) 0:03:24.527 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:20 Thursday 21 July 2022 19:26:28 +0000 (0:00:00.027) 0:03:24.554 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID active devices count] ***************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:24 Thursday 21 July 2022 19:26:28 +0000 (0:00:00.033) 0:03:24.588 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID spare devices count] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:30 Thursday 21 July 2022 19:26:28 +0000 (0:00:00.029) 0:03:24.617 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID metadata version] ********************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:36 Thursday 21 July 2022 19:26:28 +0000 (0:00:00.024) 0:03:24.642 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:44 Thursday 21 July 2022 19:26:28 +0000 (0:00:00.028) 0:03:24.670 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:53 Thursday 21 July 2022 19:26:28 +0000 (0:00:00.036) 0:03:24.707 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-lvmraid.yml for /cache/centos-8.qcow2 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-lvmraid.yml:1 Thursday 21 July 2022 19:26:28 +0000 (0:00:00.048) 0:03:24.755 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-lvmraid.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}) TASK [Get information about LVM RAID] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-lvmraid.yml:3 Thursday 21 July 2022 19:26:28 +0000 (0:00:00.043) 0:03:24.798 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is LVM RAID] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-lvmraid.yml:8 Thursday 21 July 2022 19:26:28 +0000 (0:00:00.027) 0:03:24.826 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-lvmraid.yml:12 Thursday 21 July 2022 19:26:28 +0000 (0:00:00.027) 0:03:24.853 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:56 Thursday 21 July 2022 19:26:28 +0000 (0:00:00.056) 0:03:24.909 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-thin.yml for /cache/centos-8.qcow2 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-thin.yml:1 Thursday 21 July 2022 19:26:28 +0000 (0:00:00.044) 0:03:24.954 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}) TASK [Get information about thinpool] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml:3 Thursday 21 July 2022 19:26:28 +0000 (0:00:00.042) 0:03:24.996 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "lvs", "--noheading", "-o", "pool_lv", "--select", "lv_name=lv1&&segtype=thin", "vg1" ], "delta": "0:00:00.041750", "end": "2022-07-21 19:26:28.883655", "rc": 0, "start": "2022-07-21 19:26:28.841905" } STDOUT: tpool1 TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml:8 Thursday 21 July 2022 19:26:29 +0000 (0:00:00.427) 0:03:25.423 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml:13 Thursday 21 July 2022 19:26:29 +0000 (0:00:00.056) 0:03:25.479 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml:17 Thursday 21 July 2022 19:26:29 +0000 (0:00:00.059) 0:03:25.539 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_lvmraid_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:59 Thursday 21 July 2022 19:26:29 +0000 (0:00:00.041) 0:03:25.580 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml for /cache/centos-8.qcow2 TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml:4 Thursday 21 July 2022 19:26:29 +0000 (0:00:00.046) 0:03:25.627 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml:8 Thursday 21 July 2022 19:26:29 +0000 (0:00:00.053) 0:03:25.681 ********* skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "_storage_test_pool_member_path": "/dev/nvme0n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "_storage_test_pool_member_path": "/dev/nvme1n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "_storage_test_pool_member_path": "/dev/nvme2n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml:15 Thursday 21 July 2022 19:26:29 +0000 (0:00:00.029) 0:03:25.711 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme0n1p1) included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme1n1p1) included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme2n1p1) TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 19:26:29 +0000 (0:00:00.057) 0:03:25.768 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:4 Thursday 21 July 2022 19:26:29 +0000 (0:00:00.050) 0:03:25.819 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:9 Thursday 21 July 2022 19:26:29 +0000 (0:00:00.046) 0:03:25.865 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:15 Thursday 21 July 2022 19:26:29 +0000 (0:00:00.033) 0:03:25.899 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:21 Thursday 21 July 2022 19:26:29 +0000 (0:00:00.033) 0:03:25.933 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:27 Thursday 21 July 2022 19:26:29 +0000 (0:00:00.034) 0:03:25.967 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 19:26:29 +0000 (0:00:00.031) 0:03:25.999 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:4 Thursday 21 July 2022 19:26:29 +0000 (0:00:00.046) 0:03:26.045 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:9 Thursday 21 July 2022 19:26:29 +0000 (0:00:00.046) 0:03:26.092 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:15 Thursday 21 July 2022 19:26:29 +0000 (0:00:00.036) 0:03:26.128 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:21 Thursday 21 July 2022 19:26:29 +0000 (0:00:00.033) 0:03:26.162 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:27 Thursday 21 July 2022 19:26:30 +0000 (0:00:00.047) 0:03:26.210 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 19:26:30 +0000 (0:00:00.034) 0:03:26.245 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:4 Thursday 21 July 2022 19:26:30 +0000 (0:00:00.045) 0:03:26.290 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:9 Thursday 21 July 2022 19:26:30 +0000 (0:00:00.045) 0:03:26.336 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:15 Thursday 21 July 2022 19:26:30 +0000 (0:00:00.033) 0:03:26.369 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:21 Thursday 21 July 2022 19:26:30 +0000 (0:00:00.037) 0:03:26.407 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:27 Thursday 21 July 2022 19:26:30 +0000 (0:00:00.045) 0:03:26.452 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml:22 Thursday 21 July 2022 19:26:30 +0000 (0:00:00.069) 0:03:26.521 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:62 Thursday 21 July 2022 19:26:30 +0000 (0:00:00.102) 0:03:26.623 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-vdo.yml for /cache/centos-8.qcow2 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-vdo.yml:1 Thursday 21 July 2022 19:26:30 +0000 (0:00:00.050) 0:03:26.674 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}) TASK [get information about VDO deduplication] ********************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:3 Thursday 21 July 2022 19:26:30 +0000 (0:00:00.050) 0:03:26.724 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:8 Thursday 21 July 2022 19:26:30 +0000 (0:00:00.024) 0:03:26.749 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:11 Thursday 21 July 2022 19:26:30 +0000 (0:00:00.023) 0:03:26.772 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:16 Thursday 21 July 2022 19:26:30 +0000 (0:00:00.022) 0:03:26.795 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:21 Thursday 21 July 2022 19:26:30 +0000 (0:00:00.023) 0:03:26.819 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:24 Thursday 21 July 2022 19:26:30 +0000 (0:00:00.022) 0:03:26.841 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:29 Thursday 21 July 2022 19:26:30 +0000 (0:00:00.022) 0:03:26.863 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:39 Thursday 21 July 2022 19:26:30 +0000 (0:00:00.021) 0:03:26.885 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:65 Thursday 21 July 2022 19:26:30 +0000 (0:00:00.033) 0:03:26.919 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [verify the volumes] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-volumes.yml:3 Thursday 21 July 2022 19:26:30 +0000 (0:00:00.031) 0:03:26.950 ********* included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}) TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume.yml:2 Thursday 21 July 2022 19:26:30 +0000 (0:00:00.050) 0:03:27.000 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [include_tasks] *********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume.yml:10 Thursday 21 July 2022 19:26:30 +0000 (0:00:00.047) 0:03:27.048 ********* included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml for /cache/centos-8.qcow2 => (item=mount) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml for /cache/centos-8.qcow2 => (item=fstab) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fs.yml for /cache/centos-8.qcow2 => (item=fs) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml for /cache/centos-8.qcow2 => (item=device) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml for /cache/centos-8.qcow2 => (item=encryption) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml for /cache/centos-8.qcow2 => (item=md) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml for /cache/centos-8.qcow2 => (item=size) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml for /cache/centos-8.qcow2 => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:6 Thursday 21 July 2022 19:26:30 +0000 (0:00:00.085) 0:03:27.133 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/vg1-lv1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:10 Thursday 21 July 2022 19:26:30 +0000 (0:00:00.042) 0:03:27.175 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_mount_device_matches": [ { "block_available": 770083, "block_size": 4096, "block_total": 783872, "block_used": 13789, "device": "/dev/mapper/vg1-lv1", "fstype": "xfs", "inode_available": 1572861, "inode_total": 1572864, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=64k,sunit=128,swidth=128,noquota", "size_available": 3154259968, "size_total": 3210739712, "uuid": "1ea60a5c-f51b-4cc6-9e7d-084fe537536f" } ], "storage_test_mount_expected_match_count": "1", "storage_test_mount_point_matches": [ { "block_available": 770083, "block_size": 4096, "block_total": 783872, "block_used": 13789, "device": "/dev/mapper/vg1-lv1", "fstype": "xfs", "inode_available": 1572861, "inode_total": 1572864, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=64k,sunit=128,swidth=128,noquota", "size_available": 3154259968, "size_total": 3210739712, "uuid": "1ea60a5c-f51b-4cc6-9e7d-084fe537536f" } ], "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Verify the current mount state by device] ******************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:20 Thursday 21 July 2022 19:26:31 +0000 (0:00:00.056) 0:03:27.232 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the current mount state by mount point] *************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:29 Thursday 21 July 2022 19:26:31 +0000 (0:00:00.052) 0:03:27.284 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the mount fs type] ************************************************ task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:37 Thursday 21 July 2022 19:26:31 +0000 (0:00:00.047) 0:03:27.332 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [command] ***************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:46 Thursday 21 July 2022 19:26:31 +0000 (0:00:00.048) 0:03:27.381 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:50 Thursday 21 July 2022 19:26:31 +0000 (0:00:00.022) 0:03:27.403 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:55 Thursday 21 July 2022 19:26:31 +0000 (0:00:00.022) 0:03:27.426 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:65 Thursday 21 July 2022 19:26:31 +0000 (0:00:00.024) 0:03:27.450 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_mount_device_matches": null, "storage_test_mount_expected_match_count": null, "storage_test_mount_point_matches": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:2 Thursday 21 July 2022 19:26:31 +0000 (0:00:00.033) 0:03:27.483 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/vg1-lv1 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:12 Thursday 21 July 2022 19:26:31 +0000 (0:00:00.059) 0:03:27.542 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:19 Thursday 21 July 2022 19:26:31 +0000 (0:00:00.047) 0:03:27.589 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:25 Thursday 21 July 2022 19:26:31 +0000 (0:00:00.052) 0:03:27.642 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:34 Thursday 21 July 2022 19:26:31 +0000 (0:00:00.040) 0:03:27.682 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fs.yml:4 Thursday 21 July 2022 19:26:31 +0000 (0:00:00.036) 0:03:27.719 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fs.yml:10 Thursday 21 July 2022 19:26:31 +0000 (0:00:00.040) 0:03:27.759 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:4 Thursday 21 July 2022 19:26:31 +0000 (0:00:00.037) 0:03:27.797 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "atime": 1658431580.9230084, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1658431580.9230084, "dev": 6, "device_type": 64772, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 103033, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1658431580.9230084, "nlink": 1, "path": "/dev/mapper/vg1-lv1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:10 Thursday 21 July 2022 19:26:32 +0000 (0:00:00.442) 0:03:28.239 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about this volume] ********************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:15 Thursday 21 July 2022 19:26:32 +0000 (0:00:00.040) 0:03:28.279 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [(1/2) Process volume type (set initial value)] *************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:21 Thursday 21 July 2022 19:26:32 +0000 (0:00:00.040) 0:03:28.320 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [(2/2) Process volume type (get RAID value)] ****************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:25 Thursday 21 July 2022 19:26:32 +0000 (0:00:00.036) 0:03:28.357 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:30 Thursday 21 July 2022 19:26:32 +0000 (0:00:00.025) 0:03:28.382 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:3 Thursday 21 July 2022 19:26:32 +0000 (0:00:00.040) 0:03:28.422 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:10 Thursday 21 July 2022 19:26:32 +0000 (0:00:00.026) 0:03:28.449 ********* changed: [/cache/centos-8.qcow2] => { "changed": true, "rc": 0, "results": [ "Installed: cryptsetup-libs-2.3.7-2.el8.x86_64", "Installed: cryptsetup-2.3.7-2.el8.x86_64", "Removed: cryptsetup-libs-2.3.3-4.el8.x86_64" ] } TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:15 Thursday 21 July 2022 19:26:35 +0000 (0:00:03.574) 0:03:32.023 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:21 Thursday 21 July 2022 19:26:35 +0000 (0:00:00.024) 0:03:32.047 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:27 Thursday 21 July 2022 19:26:35 +0000 (0:00:00.027) 0:03:32.075 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:33 Thursday 21 July 2022 19:26:35 +0000 (0:00:00.063) 0:03:32.139 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:39 Thursday 21 July 2022 19:26:35 +0000 (0:00:00.025) 0:03:32.165 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:44 Thursday 21 July 2022 19:26:35 +0000 (0:00:00.023) 0:03:32.188 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:50 Thursday 21 July 2022 19:26:36 +0000 (0:00:00.023) 0:03:32.211 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:56 Thursday 21 July 2022 19:26:36 +0000 (0:00:00.024) 0:03:32.235 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:62 Thursday 21 July 2022 19:26:36 +0000 (0:00:00.026) 0:03:32.262 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:67 Thursday 21 July 2022 19:26:36 +0000 (0:00:00.053) 0:03:32.316 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:72 Thursday 21 July 2022 19:26:36 +0000 (0:00:00.052) 0:03:32.368 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:78 Thursday 21 July 2022 19:26:36 +0000 (0:00:00.039) 0:03:32.408 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:84 Thursday 21 July 2022 19:26:36 +0000 (0:00:00.039) 0:03:32.447 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:90 Thursday 21 July 2022 19:26:36 +0000 (0:00:00.039) 0:03:32.486 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [get information about RAID] ********************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:7 Thursday 21 July 2022 19:26:36 +0000 (0:00:00.079) 0:03:32.566 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:13 Thursday 21 July 2022 19:26:36 +0000 (0:00:00.041) 0:03:32.607 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:17 Thursday 21 July 2022 19:26:36 +0000 (0:00:00.038) 0:03:32.646 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:21 Thursday 21 July 2022 19:26:36 +0000 (0:00:00.040) 0:03:32.686 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID active devices count] ***************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:25 Thursday 21 July 2022 19:26:36 +0000 (0:00:00.040) 0:03:32.727 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID spare devices count] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:31 Thursday 21 July 2022 19:26:36 +0000 (0:00:00.043) 0:03:32.770 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID metadata version] ********************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:37 Thursday 21 July 2022 19:26:36 +0000 (0:00:00.040) 0:03:32.811 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the actual size of the volume] ************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:3 Thursday 21 July 2022 19:26:36 +0000 (0:00:00.040) 0:03:32.852 ********* ok: [/cache/centos-8.qcow2] => { "bytes": 3221225472, "changed": false, "lvm": "3g", "parted": "3GiB", "size": "3 GiB" } TASK [parse the requested size of the volume] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:9 Thursday 21 July 2022 19:26:37 +0000 (0:00:00.520) 0:03:33.372 ********* ok: [/cache/centos-8.qcow2] => { "bytes": 3221225472, "changed": false, "lvm": "3g", "parted": "3GiB", "size": "3 GiB" } TASK [Establish base value for expected size] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:15 Thursday 21 July 2022 19:26:37 +0000 (0:00:00.480) 0:03:33.853 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_expected_size": "3221225472" }, "changed": false } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:20 Thursday 21 July 2022 19:26:37 +0000 (0:00:00.052) 0:03:33.905 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_expected_size": "3221225472" } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:25 Thursday 21 July 2022 19:26:37 +0000 (0:00:00.035) 0:03:33.940 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:28 Thursday 21 July 2022 19:26:37 +0000 (0:00:00.035) 0:03:33.976 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Get the size of parent/pool device] ************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:31 Thursday 21 July 2022 19:26:37 +0000 (0:00:00.041) 0:03:34.017 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:36 Thursday 21 July 2022 19:26:37 +0000 (0:00:00.036) 0:03:34.054 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:39 Thursday 21 July 2022 19:26:37 +0000 (0:00:00.035) 0:03:34.090 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:44 Thursday 21 July 2022 19:26:37 +0000 (0:00:00.035) 0:03:34.125 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_actual_size": { "bytes": 3221225472, "changed": false, "failed": false, "lvm": "3g", "parted": "3GiB", "size": "3 GiB" } } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:47 Thursday 21 July 2022 19:26:37 +0000 (0:00:00.039) 0:03:34.164 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_expected_size": "3221225472" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:50 Thursday 21 July 2022 19:26:38 +0000 (0:00:00.037) 0:03:34.202 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:6 Thursday 21 July 2022 19:26:38 +0000 (0:00:00.052) 0:03:34.255 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "vg1/lv1" ], "delta": "0:00:00.034423", "end": "2022-07-21 19:26:38.148836", "rc": 0, "start": "2022-07-21 19:26:38.114413" } STDOUT: LVM2_LV_NAME=lv1 LVM2_LV_ATTR=Vwi-aotz-- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=thin TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:14 Thursday 21 July 2022 19:26:38 +0000 (0:00:00.436) 0:03:34.692 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_lv_segtype": [ "thin" ] }, "changed": false } TASK [check segment type] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:17 Thursday 21 July 2022 19:26:38 +0000 (0:00:00.054) 0:03:34.747 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:22 Thursday 21 July 2022 19:26:38 +0000 (0:00:00.053) 0:03:34.801 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the requested cache size] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:26 Thursday 21 July 2022 19:26:38 +0000 (0:00:00.041) 0:03:34.842 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:32 Thursday 21 July 2022 19:26:38 +0000 (0:00:00.040) 0:03:34.883 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:36 Thursday 21 July 2022 19:26:38 +0000 (0:00:00.039) 0:03:34.923 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume.yml:16 Thursday 21 July 2022 19:26:38 +0000 (0:00:00.039) 0:03:34.963 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:43 Thursday 21 July 2022 19:26:38 +0000 (0:00:00.039) 0:03:35.003 ********* TASK [Clean up variable namespace] ********************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:53 Thursday 21 July 2022 19:26:38 +0000 (0:00:00.024) 0:03:35.027 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Repeat previous invocation to verify idempotence] ************************ task path: /tmp/tmp_0pjp8ed/tests/storage/tests_create_thinp_then_remove.yml:40 Thursday 21 July 2022 19:26:38 +0000 (0:00:00.036) 0:03:35.063 ********* TASK [fedora.linux_system_roles.storage : set platform/version specific variables] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Thursday 21 July 2022 19:26:38 +0000 (0:00:00.050) 0:03:35.114 ********* included: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for /cache/centos-8.qcow2 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Thursday 21 July 2022 19:26:38 +0000 (0:00:00.035) 0:03:35.149 ********* ok: [/cache/centos-8.qcow2] TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Thursday 21 July 2022 19:26:39 +0000 (0:00:00.536) 0:03:35.685 ********* skipping: [/cache/centos-8.qcow2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.storage : define an empty list of pools to be used in testing] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Thursday 21 July 2022 19:26:39 +0000 (0:00:00.115) 0:03:35.800 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : define an empty list of volumes to be used in testing] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Thursday 21 July 2022 19:26:39 +0000 (0:00:00.035) 0:03:35.836 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : include the appropriate provider tasks] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Thursday 21 July 2022 19:26:39 +0000 (0:00:00.072) 0:03:35.909 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for /cache/centos-8.qcow2 TASK [fedora.linux_system_roles.storage : get a list of rpm packages installed on host machine] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Thursday 21 July 2022 19:26:39 +0000 (0:00:00.059) 0:03:35.968 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : make sure blivet is available] ******* task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:7 Thursday 21 July 2022 19:26:39 +0000 (0:00:00.023) 0:03:35.991 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.storage : show storage_pools] ****************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:13 Thursday 21 July 2022 19:26:41 +0000 (0:00:01.851) 0:03:37.842 ********* ok: [/cache/centos-8.qcow2] => { "storage_pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "name": "vg1", "type": "lvm", "volumes": [ { "mount_point": "/opt/test1", "name": "lv1", "size": "3g", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g" } ] } ] } TASK [fedora.linux_system_roles.storage : show storage_volumes] **************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:18 Thursday 21 July 2022 19:26:41 +0000 (0:00:00.042) 0:03:37.885 ********* ok: [/cache/centos-8.qcow2] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [fedora.linux_system_roles.storage : get required packages] *************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:23 Thursday 21 July 2022 19:26:41 +0000 (0:00:00.037) 0:03:37.922 ********* ok: [/cache/centos-8.qcow2] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "lvm2" ], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : enable copr repositories if needed] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:35 Thursday 21 July 2022 19:26:43 +0000 (0:00:01.999) 0:03:39.921 ********* included: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for /cache/centos-8.qcow2 TASK [fedora.linux_system_roles.storage : check if the COPR support packages should be installed] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Thursday 21 July 2022 19:26:43 +0000 (0:00:00.046) 0:03:39.968 ********* TASK [fedora.linux_system_roles.storage : make sure COPR support packages are present] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Thursday 21 July 2022 19:26:43 +0000 (0:00:00.033) 0:03:40.001 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : enable COPRs] ************************ task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:18 Thursday 21 July 2022 19:26:43 +0000 (0:00:00.039) 0:03:40.040 ********* TASK [fedora.linux_system_roles.storage : make sure required packages are installed] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:41 Thursday 21 July 2022 19:26:43 +0000 (0:00:00.033) 0:03:40.074 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.storage : get service facts] ******************* task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:47 Thursday 21 July 2022 19:26:45 +0000 (0:00:01.909) 0:03:41.983 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cockpit-motd.service": { "name": "cockpit-motd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-http.service": { "name": "cockpit-wsinstance-http.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-https-factory@.service": { "name": "cockpit-wsinstance-https-factory@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit-wsinstance-https@.service": { "name": "cockpit-wsinstance-https@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit.service": { "name": "cockpit.service", "source": "systemd", "state": "inactive", "status": "static" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "running", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "lvm2-pvscan@259:4.service": { "name": "lvm2-pvscan@259:4.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@259:5.service": { "name": "lvm2-pvscan@259:5.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@259:6.service": { "name": "lvm2-pvscan@259:6.service", "source": "systemd", "state": "stopped", "status": "active" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "packagekit-offline-update.service": { "name": "packagekit-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "packagekit.service": { "name": "packagekit.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "rngd-wake-threshold.service": { "name": "rngd-wake-threshold.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "vdo-start-by-dev@.service": { "name": "vdo-start-by-dev@.service", "source": "systemd", "state": "unknown", "status": "static" }, "vdo.service": { "name": "vdo.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:53 Thursday 21 July 2022 19:26:47 +0000 (0:00:01.632) 0:03:43.616 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Thursday 21 July 2022 19:26:47 +0000 (0:00:00.057) 0:03:43.673 ********* TASK [fedora.linux_system_roles.storage : manage the pools and volumes to match the specified state] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Thursday 21 July 2022 19:26:47 +0000 (0:00:00.027) 0:03:43.701 ********* ok: [/cache/centos-8.qcow2] => { "actions": [], "changed": false, "crypts": [], "leaves": [ "/dev/sr0", "/dev/vda1", "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/mapper/vg1-lv1", "/dev/vdb", "/dev/vdc", "/dev/vdd" ], "mounts": [ { "dump": 0, "fstype": "xfs", "opts": "defaults", "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/vg1-lv1", "state": "mounted" } ], "packages": [ "xfsprogs", "lvm2" ], "pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "3g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g", "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:78 Thursday 21 July 2022 19:26:49 +0000 (0:00:02.053) 0:03:45.754 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90 Thursday 21 July 2022 19:26:49 +0000 (0:00:00.040) 0:03:45.795 ********* TASK [fedora.linux_system_roles.storage : show blivet_output] ****************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:96 Thursday 21 July 2022 19:26:49 +0000 (0:00:00.024) 0:03:45.819 ********* ok: [/cache/centos-8.qcow2] => { "blivet_output": { "actions": [], "changed": false, "crypts": [], "failed": false, "leaves": [ "/dev/sr0", "/dev/vda1", "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/mapper/vg1-lv1", "/dev/vdb", "/dev/vdc", "/dev/vdd" ], "mounts": [ { "dump": 0, "fstype": "xfs", "opts": "defaults", "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/vg1-lv1", "state": "mounted" } ], "packages": [ "xfsprogs", "lvm2" ], "pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "3g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g", "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : set the list of pools for test verification] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:101 Thursday 21 July 2022 19:26:49 +0000 (0:00:00.089) 0:03:45.909 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "3g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g", "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : set the list of volumes for test verification] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:105 Thursday 21 July 2022 19:26:49 +0000 (0:00:00.045) 0:03:45.954 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : remove obsolete mounts] ************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Thursday 21 July 2022 19:26:49 +0000 (0:00:00.036) 0:03:45.991 ********* TASK [fedora.linux_system_roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132 Thursday 21 July 2022 19:26:49 +0000 (0:00:00.040) 0:03:46.031 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : set up new/current mounts] *********** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:137 Thursday 21 July 2022 19:26:50 +0000 (0:00:00.728) 0:03:46.760 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount ok: [/cache/centos-8.qcow2] => (item={'src': '/dev/mapper/vg1-lv1', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted'}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": false, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "opts": "defaults", "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/vg1-lv1", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/vg1-lv1" } TASK [fedora.linux_system_roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148 Thursday 21 July 2022 19:26:50 +0000 (0:00:00.424) 0:03:47.184 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : retrieve facts for the /etc/crypttab file] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:156 Thursday 21 July 2022 19:26:51 +0000 (0:00:00.655) 0:03:47.840 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "atime": 1658431449.4320083, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1643141385.117, "dev": 64513, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 135, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1643141019.537, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "3147672035", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : manage /etc/crypttab to account for changes we just made] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Thursday 21 July 2022 19:26:52 +0000 (0:00:00.392) 0:03:48.232 ********* TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:183 Thursday 21 July 2022 19:26:52 +0000 (0:00:00.025) 0:03:48.258 ********* ok: [/cache/centos-8.qcow2] META: role_complete for /cache/centos-8.qcow2 TASK [include_tasks] *********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/tests_create_thinp_then_remove.yml:56 Thursday 21 July 2022 19:26:53 +0000 (0:00:01.019) 0:03:49.278 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml for /cache/centos-8.qcow2 TASK [Print out pool information] ********************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:1 Thursday 21 July 2022 19:26:53 +0000 (0:00:00.049) 0:03:49.327 ********* ok: [/cache/centos-8.qcow2] => { "_storage_pools_list": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "3g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g", "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:6 Thursday 21 July 2022 19:26:53 +0000 (0:00:00.056) 0:03:49.384 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:14 Thursday 21 July 2022 19:26:53 +0000 (0:00:00.039) 0:03:49.423 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "info": { "/dev/mapper/vg1-lv1": { "fstype": "xfs", "label": "", "name": "/dev/mapper/vg1-lv1", "size": "3G", "type": "lvm", "uuid": "1ea60a5c-f51b-4cc6-9e7d-084fe537536f" }, "/dev/mapper/vg1-tpool1": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1-tpool": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1-tpool", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1_tdata": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1_tdata", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1_tmeta": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1_tmeta", "size": "12M", "type": "lvm", "uuid": "" }, "/dev/nvme0n1": { "fstype": "", "label": "", "name": "/dev/nvme0n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme0n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme0n1p1", "size": "10G", "type": "partition", "uuid": "ken164-erkD-vf0C-nvLO-nhmh-YBzF-bzv8kZ" }, "/dev/nvme1n1": { "fstype": "", "label": "", "name": "/dev/nvme1n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme1n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme1n1p1", "size": "10G", "type": "partition", "uuid": "eBnMVU-t1UJ-ttot-SKPn-js6i-mJjN-e9h7dx" }, "/dev/nvme2n1": { "fstype": "", "label": "", "name": "/dev/nvme2n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme2n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme2n1p1", "size": "10G", "type": "partition", "uuid": "xZT8wS-OKti-fe7H-wOMe-VpTY-S1Yx-RZZXxW" }, "/dev/sda": { "fstype": "", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sr0": { "fstype": "iso9660", "label": "cidata", "name": "/dev/sr0", "size": "364K", "type": "rom", "uuid": "2022-07-21-19-22-43-00" }, "/dev/vda": { "fstype": "", "label": "", "name": "/dev/vda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vda1": { "fstype": "xfs", "label": "", "name": "/dev/vda1", "size": "10G", "type": "partition", "uuid": "395b9844-e404-4857-afbb-c6edccaf72f3" }, "/dev/vdb": { "fstype": "", "label": "", "name": "/dev/vdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vdc": { "fstype": "", "label": "", "name": "/dev/vdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vdd": { "fstype": "", "label": "", "name": "/dev/vdd", "size": "10G", "type": "disk", "uuid": "" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:19 Thursday 21 July 2022 19:26:53 +0000 (0:00:00.392) 0:03:49.816 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002398", "end": "2022-07-21 19:26:53.637568", "rc": 0, "start": "2022-07-21 19:26:53.635170" } STDOUT: # # /etc/fstab # Created by anaconda on Tue Jan 25 20:03:39 2022 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=395b9844-e404-4857-afbb-c6edccaf72f3 / xfs defaults 0 0 /dev/mapper/vg1-lv1 /opt/test1 xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:24 Thursday 21 July 2022 19:26:53 +0000 (0:00:00.357) 0:03:50.173 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003303", "end": "2022-07-21 19:26:54.032001", "failed_when_result": false, "rc": 0, "start": "2022-07-21 19:26:54.028698" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:33 Thursday 21 July 2022 19:26:54 +0000 (0:00:00.400) 0:03:50.574 ********* included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool.yml for /cache/centos-8.qcow2 => (item={'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'name': 'vg1', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}], 'raid_chunk_size': None}) TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool.yml:5 Thursday 21 July 2022 19:26:54 +0000 (0:00:00.061) 0:03:50.636 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [include_tasks] *********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool.yml:18 Thursday 21 July 2022 19:26:54 +0000 (0:00:00.075) 0:03:50.711 ********* included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml for /cache/centos-8.qcow2 => (item=members) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-volumes.yml for /cache/centos-8.qcow2 => (item=volumes) TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:1 Thursday 21 July 2022 19:26:54 +0000 (0:00:00.048) 0:03:50.760 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_count": "3", "_storage_test_pool_pvs_lvm": [ "/dev/nvme0n1p1", "/dev/nvme1n1p1", "/dev/nvme2n1p1" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:6 Thursday 21 July 2022 19:26:54 +0000 (0:00:00.095) 0:03:50.856 ********* ok: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme0n1p1", "pv": "/dev/nvme0n1p1" } ok: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme1n1p1", "pv": "/dev/nvme1n1p1" } ok: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme2n1p1", "pv": "/dev/nvme2n1p1" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:15 Thursday 21 July 2022 19:26:55 +0000 (0:00:01.185) 0:03:52.041 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "__pvs_lvm_len": "3" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:19 Thursday 21 July 2022 19:26:55 +0000 (0:00:00.051) 0:03:52.092 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/nvme0n1p1", "/dev/nvme1n1p1", "/dev/nvme2n1p1" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:23 Thursday 21 July 2022 19:26:55 +0000 (0:00:00.052) 0:03:52.145 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:29 Thursday 21 July 2022 19:26:56 +0000 (0:00:00.053) 0:03:52.199 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:33 Thursday 21 July 2022 19:26:56 +0000 (0:00:00.040) 0:03:52.239 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_type": "partition" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:37 Thursday 21 July 2022 19:26:56 +0000 (0:00:00.053) 0:03:52.293 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:41 Thursday 21 July 2022 19:26:56 +0000 (0:00:00.027) 0:03:52.320 ********* ok: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme0n1p1" } MSG: All assertions passed ok: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme1n1p1" } MSG: All assertions passed ok: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme2n1p1" } MSG: All assertions passed TASK [Check MD RAID] *********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:50 Thursday 21 July 2022 19:26:56 +0000 (0:00:00.077) 0:03:52.398 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml for /cache/centos-8.qcow2 TASK [get information about RAID] ********************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:6 Thursday 21 July 2022 19:26:56 +0000 (0:00:00.047) 0:03:52.445 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:12 Thursday 21 July 2022 19:26:56 +0000 (0:00:00.024) 0:03:52.470 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:16 Thursday 21 July 2022 19:26:56 +0000 (0:00:00.023) 0:03:52.493 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:20 Thursday 21 July 2022 19:26:56 +0000 (0:00:00.023) 0:03:52.517 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID active devices count] ***************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:24 Thursday 21 July 2022 19:26:56 +0000 (0:00:00.022) 0:03:52.540 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID spare devices count] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:30 Thursday 21 July 2022 19:26:56 +0000 (0:00:00.022) 0:03:52.563 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID metadata version] ********************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:36 Thursday 21 July 2022 19:26:56 +0000 (0:00:00.022) 0:03:52.586 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:44 Thursday 21 July 2022 19:26:56 +0000 (0:00:00.026) 0:03:52.612 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:53 Thursday 21 July 2022 19:26:56 +0000 (0:00:00.038) 0:03:52.651 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-lvmraid.yml for /cache/centos-8.qcow2 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-lvmraid.yml:1 Thursday 21 July 2022 19:26:56 +0000 (0:00:00.043) 0:03:52.695 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-lvmraid.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}) TASK [Get information about LVM RAID] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-lvmraid.yml:3 Thursday 21 July 2022 19:26:56 +0000 (0:00:00.042) 0:03:52.737 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is LVM RAID] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-lvmraid.yml:8 Thursday 21 July 2022 19:26:56 +0000 (0:00:00.030) 0:03:52.768 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-lvmraid.yml:12 Thursday 21 July 2022 19:26:56 +0000 (0:00:00.029) 0:03:52.798 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:56 Thursday 21 July 2022 19:26:56 +0000 (0:00:00.035) 0:03:52.834 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-thin.yml for /cache/centos-8.qcow2 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-thin.yml:1 Thursday 21 July 2022 19:26:56 +0000 (0:00:00.047) 0:03:52.881 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}) TASK [Get information about thinpool] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml:3 Thursday 21 July 2022 19:26:56 +0000 (0:00:00.046) 0:03:52.927 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "lvs", "--noheading", "-o", "pool_lv", "--select", "lv_name=lv1&&segtype=thin", "vg1" ], "delta": "0:00:00.043527", "end": "2022-07-21 19:26:56.888682", "rc": 0, "start": "2022-07-21 19:26:56.845155" } STDOUT: tpool1 TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml:8 Thursday 21 July 2022 19:26:57 +0000 (0:00:00.501) 0:03:53.429 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml:13 Thursday 21 July 2022 19:26:57 +0000 (0:00:00.095) 0:03:53.524 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml:17 Thursday 21 July 2022 19:26:57 +0000 (0:00:00.054) 0:03:53.579 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_lvmraid_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:59 Thursday 21 July 2022 19:26:57 +0000 (0:00:00.040) 0:03:53.619 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml for /cache/centos-8.qcow2 TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml:4 Thursday 21 July 2022 19:26:57 +0000 (0:00:00.046) 0:03:53.665 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml:8 Thursday 21 July 2022 19:26:57 +0000 (0:00:00.051) 0:03:53.717 ********* skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "_storage_test_pool_member_path": "/dev/nvme0n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "_storage_test_pool_member_path": "/dev/nvme1n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "_storage_test_pool_member_path": "/dev/nvme2n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml:15 Thursday 21 July 2022 19:26:57 +0000 (0:00:00.035) 0:03:53.752 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme0n1p1) included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme1n1p1) included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme2n1p1) TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 19:26:57 +0000 (0:00:00.054) 0:03:53.807 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:4 Thursday 21 July 2022 19:26:57 +0000 (0:00:00.054) 0:03:53.861 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:9 Thursday 21 July 2022 19:26:57 +0000 (0:00:00.051) 0:03:53.912 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:15 Thursday 21 July 2022 19:26:57 +0000 (0:00:00.038) 0:03:53.950 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:21 Thursday 21 July 2022 19:26:57 +0000 (0:00:00.039) 0:03:53.989 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:27 Thursday 21 July 2022 19:26:57 +0000 (0:00:00.042) 0:03:54.032 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 19:26:57 +0000 (0:00:00.036) 0:03:54.069 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:4 Thursday 21 July 2022 19:26:57 +0000 (0:00:00.051) 0:03:54.121 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:9 Thursday 21 July 2022 19:26:57 +0000 (0:00:00.054) 0:03:54.175 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:15 Thursday 21 July 2022 19:26:58 +0000 (0:00:00.039) 0:03:54.214 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:21 Thursday 21 July 2022 19:26:58 +0000 (0:00:00.038) 0:03:54.252 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:27 Thursday 21 July 2022 19:26:58 +0000 (0:00:00.038) 0:03:54.290 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 19:26:58 +0000 (0:00:00.037) 0:03:54.327 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:4 Thursday 21 July 2022 19:26:58 +0000 (0:00:00.050) 0:03:54.378 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:9 Thursday 21 July 2022 19:26:58 +0000 (0:00:00.053) 0:03:54.431 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:15 Thursday 21 July 2022 19:26:58 +0000 (0:00:00.036) 0:03:54.468 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:21 Thursday 21 July 2022 19:26:58 +0000 (0:00:00.041) 0:03:54.509 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:27 Thursday 21 July 2022 19:26:58 +0000 (0:00:00.038) 0:03:54.548 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml:22 Thursday 21 July 2022 19:26:58 +0000 (0:00:00.037) 0:03:54.586 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:62 Thursday 21 July 2022 19:26:58 +0000 (0:00:00.034) 0:03:54.621 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-vdo.yml for /cache/centos-8.qcow2 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-vdo.yml:1 Thursday 21 July 2022 19:26:58 +0000 (0:00:00.065) 0:03:54.686 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}) TASK [get information about VDO deduplication] ********************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:3 Thursday 21 July 2022 19:26:58 +0000 (0:00:00.050) 0:03:54.737 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:8 Thursday 21 July 2022 19:26:58 +0000 (0:00:00.026) 0:03:54.764 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:11 Thursday 21 July 2022 19:26:58 +0000 (0:00:00.027) 0:03:54.791 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:16 Thursday 21 July 2022 19:26:58 +0000 (0:00:00.026) 0:03:54.817 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:21 Thursday 21 July 2022 19:26:58 +0000 (0:00:00.028) 0:03:54.845 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:24 Thursday 21 July 2022 19:26:58 +0000 (0:00:00.024) 0:03:54.870 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:29 Thursday 21 July 2022 19:26:58 +0000 (0:00:00.023) 0:03:54.893 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:39 Thursday 21 July 2022 19:26:58 +0000 (0:00:00.024) 0:03:54.918 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:65 Thursday 21 July 2022 19:26:58 +0000 (0:00:00.136) 0:03:55.054 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [verify the volumes] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-volumes.yml:3 Thursday 21 July 2022 19:26:58 +0000 (0:00:00.037) 0:03:55.092 ********* included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}) TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume.yml:2 Thursday 21 July 2022 19:26:58 +0000 (0:00:00.046) 0:03:55.139 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [include_tasks] *********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume.yml:10 Thursday 21 July 2022 19:26:59 +0000 (0:00:00.053) 0:03:55.192 ********* included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml for /cache/centos-8.qcow2 => (item=mount) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml for /cache/centos-8.qcow2 => (item=fstab) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fs.yml for /cache/centos-8.qcow2 => (item=fs) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml for /cache/centos-8.qcow2 => (item=device) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml for /cache/centos-8.qcow2 => (item=encryption) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml for /cache/centos-8.qcow2 => (item=md) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml for /cache/centos-8.qcow2 => (item=size) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml for /cache/centos-8.qcow2 => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:6 Thursday 21 July 2022 19:26:59 +0000 (0:00:00.079) 0:03:55.271 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/vg1-lv1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:10 Thursday 21 July 2022 19:26:59 +0000 (0:00:00.040) 0:03:55.312 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_mount_device_matches": [ { "block_available": 770083, "block_size": 4096, "block_total": 783872, "block_used": 13789, "device": "/dev/mapper/vg1-lv1", "fstype": "xfs", "inode_available": 1572861, "inode_total": 1572864, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=64k,sunit=128,swidth=128,noquota", "size_available": 3154259968, "size_total": 3210739712, "uuid": "1ea60a5c-f51b-4cc6-9e7d-084fe537536f" } ], "storage_test_mount_expected_match_count": "1", "storage_test_mount_point_matches": [ { "block_available": 770083, "block_size": 4096, "block_total": 783872, "block_used": 13789, "device": "/dev/mapper/vg1-lv1", "fstype": "xfs", "inode_available": 1572861, "inode_total": 1572864, "inode_used": 3, "mount": "/opt/test1", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=64k,sunit=128,swidth=128,noquota", "size_available": 3154259968, "size_total": 3210739712, "uuid": "1ea60a5c-f51b-4cc6-9e7d-084fe537536f" } ], "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Verify the current mount state by device] ******************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:20 Thursday 21 July 2022 19:26:59 +0000 (0:00:00.054) 0:03:55.366 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the current mount state by mount point] *************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:29 Thursday 21 July 2022 19:26:59 +0000 (0:00:00.049) 0:03:55.415 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the mount fs type] ************************************************ task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:37 Thursday 21 July 2022 19:26:59 +0000 (0:00:00.046) 0:03:55.462 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [command] ***************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:46 Thursday 21 July 2022 19:26:59 +0000 (0:00:00.054) 0:03:55.517 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:50 Thursday 21 July 2022 19:26:59 +0000 (0:00:00.025) 0:03:55.543 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:55 Thursday 21 July 2022 19:26:59 +0000 (0:00:00.025) 0:03:55.568 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:65 Thursday 21 July 2022 19:26:59 +0000 (0:00:00.023) 0:03:55.592 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_mount_device_matches": null, "storage_test_mount_expected_match_count": null, "storage_test_mount_point_matches": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:2 Thursday 21 July 2022 19:26:59 +0000 (0:00:00.034) 0:03:55.627 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/vg1-lv1 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:12 Thursday 21 July 2022 19:26:59 +0000 (0:00:00.063) 0:03:55.691 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:19 Thursday 21 July 2022 19:26:59 +0000 (0:00:00.052) 0:03:55.743 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:25 Thursday 21 July 2022 19:26:59 +0000 (0:00:00.051) 0:03:55.795 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:34 Thursday 21 July 2022 19:26:59 +0000 (0:00:00.042) 0:03:55.837 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fs.yml:4 Thursday 21 July 2022 19:26:59 +0000 (0:00:00.035) 0:03:55.873 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fs.yml:10 Thursday 21 July 2022 19:26:59 +0000 (0:00:00.040) 0:03:55.913 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:4 Thursday 21 July 2022 19:26:59 +0000 (0:00:00.041) 0:03:55.955 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "atime": 1658431580.9230084, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1658431580.9230084, "dev": 6, "device_type": 64772, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 103033, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1658431580.9230084, "nlink": 1, "path": "/dev/mapper/vg1-lv1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:10 Thursday 21 July 2022 19:27:00 +0000 (0:00:00.382) 0:03:56.338 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about this volume] ********************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:15 Thursday 21 July 2022 19:27:00 +0000 (0:00:00.040) 0:03:56.379 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [(1/2) Process volume type (set initial value)] *************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:21 Thursday 21 July 2022 19:27:00 +0000 (0:00:00.039) 0:03:56.418 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [(2/2) Process volume type (get RAID value)] ****************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:25 Thursday 21 July 2022 19:27:00 +0000 (0:00:00.039) 0:03:56.458 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:30 Thursday 21 July 2022 19:27:00 +0000 (0:00:00.025) 0:03:56.483 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:3 Thursday 21 July 2022 19:27:00 +0000 (0:00:00.040) 0:03:56.524 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:10 Thursday 21 July 2022 19:27:00 +0000 (0:00:00.025) 0:03:56.550 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:15 Thursday 21 July 2022 19:27:02 +0000 (0:00:01.909) 0:03:58.460 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:21 Thursday 21 July 2022 19:27:02 +0000 (0:00:00.026) 0:03:58.487 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:27 Thursday 21 July 2022 19:27:02 +0000 (0:00:00.025) 0:03:58.512 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:33 Thursday 21 July 2022 19:27:02 +0000 (0:00:00.124) 0:03:58.636 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:39 Thursday 21 July 2022 19:27:02 +0000 (0:00:00.032) 0:03:58.669 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:44 Thursday 21 July 2022 19:27:02 +0000 (0:00:00.025) 0:03:58.695 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:50 Thursday 21 July 2022 19:27:02 +0000 (0:00:00.025) 0:03:58.720 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:56 Thursday 21 July 2022 19:27:02 +0000 (0:00:00.028) 0:03:58.748 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:62 Thursday 21 July 2022 19:27:02 +0000 (0:00:00.027) 0:03:58.776 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:67 Thursday 21 July 2022 19:27:02 +0000 (0:00:00.055) 0:03:58.831 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:72 Thursday 21 July 2022 19:27:02 +0000 (0:00:00.056) 0:03:58.888 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:78 Thursday 21 July 2022 19:27:02 +0000 (0:00:00.042) 0:03:58.930 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:84 Thursday 21 July 2022 19:27:02 +0000 (0:00:00.038) 0:03:58.968 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:90 Thursday 21 July 2022 19:27:02 +0000 (0:00:00.038) 0:03:59.006 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [get information about RAID] ********************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:7 Thursday 21 July 2022 19:27:02 +0000 (0:00:00.037) 0:03:59.044 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:13 Thursday 21 July 2022 19:27:02 +0000 (0:00:00.040) 0:03:59.085 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:17 Thursday 21 July 2022 19:27:02 +0000 (0:00:00.035) 0:03:59.120 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:21 Thursday 21 July 2022 19:27:02 +0000 (0:00:00.035) 0:03:59.156 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID active devices count] ***************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:25 Thursday 21 July 2022 19:27:02 +0000 (0:00:00.035) 0:03:59.191 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID spare devices count] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:31 Thursday 21 July 2022 19:27:03 +0000 (0:00:00.039) 0:03:59.231 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID metadata version] ********************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:37 Thursday 21 July 2022 19:27:03 +0000 (0:00:00.036) 0:03:59.267 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the actual size of the volume] ************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:3 Thursday 21 July 2022 19:27:03 +0000 (0:00:00.039) 0:03:59.307 ********* ok: [/cache/centos-8.qcow2] => { "bytes": 3221225472, "changed": false, "lvm": "3g", "parted": "3GiB", "size": "3 GiB" } TASK [parse the requested size of the volume] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:9 Thursday 21 July 2022 19:27:03 +0000 (0:00:00.392) 0:03:59.700 ********* ok: [/cache/centos-8.qcow2] => { "bytes": 3221225472, "changed": false, "lvm": "3g", "parted": "3GiB", "size": "3 GiB" } TASK [Establish base value for expected size] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:15 Thursday 21 July 2022 19:27:03 +0000 (0:00:00.382) 0:04:00.082 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_expected_size": "3221225472" }, "changed": false } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:20 Thursday 21 July 2022 19:27:03 +0000 (0:00:00.050) 0:04:00.133 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_expected_size": "3221225472" } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:25 Thursday 21 July 2022 19:27:03 +0000 (0:00:00.038) 0:04:00.172 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:28 Thursday 21 July 2022 19:27:04 +0000 (0:00:00.039) 0:04:00.211 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Get the size of parent/pool device] ************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:31 Thursday 21 July 2022 19:27:04 +0000 (0:00:00.039) 0:04:00.251 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:36 Thursday 21 July 2022 19:27:04 +0000 (0:00:00.038) 0:04:00.290 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:39 Thursday 21 July 2022 19:27:04 +0000 (0:00:00.042) 0:04:00.333 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:44 Thursday 21 July 2022 19:27:04 +0000 (0:00:00.038) 0:04:00.371 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_actual_size": { "bytes": 3221225472, "changed": false, "failed": false, "lvm": "3g", "parted": "3GiB", "size": "3 GiB" } } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:47 Thursday 21 July 2022 19:27:04 +0000 (0:00:00.039) 0:04:00.411 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_expected_size": "3221225472" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:50 Thursday 21 July 2022 19:27:04 +0000 (0:00:00.038) 0:04:00.450 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:6 Thursday 21 July 2022 19:27:04 +0000 (0:00:00.066) 0:04:00.516 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "vg1/lv1" ], "delta": "0:00:00.036696", "end": "2022-07-21 19:27:04.401616", "rc": 0, "start": "2022-07-21 19:27:04.364920" } STDOUT: LVM2_LV_NAME=lv1 LVM2_LV_ATTR=Vwi-aotz-- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=thin TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:14 Thursday 21 July 2022 19:27:04 +0000 (0:00:00.427) 0:04:00.944 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_lv_segtype": [ "thin" ] }, "changed": false } TASK [check segment type] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:17 Thursday 21 July 2022 19:27:04 +0000 (0:00:00.094) 0:04:01.039 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:22 Thursday 21 July 2022 19:27:04 +0000 (0:00:00.094) 0:04:01.133 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the requested cache size] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:26 Thursday 21 July 2022 19:27:04 +0000 (0:00:00.038) 0:04:01.172 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:32 Thursday 21 July 2022 19:27:05 +0000 (0:00:00.039) 0:04:01.211 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:36 Thursday 21 July 2022 19:27:05 +0000 (0:00:00.075) 0:04:01.287 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume.yml:16 Thursday 21 July 2022 19:27:05 +0000 (0:00:00.039) 0:04:01.326 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:43 Thursday 21 July 2022 19:27:05 +0000 (0:00:00.038) 0:04:01.365 ********* TASK [Clean up variable namespace] ********************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:53 Thursday 21 July 2022 19:27:05 +0000 (0:00:00.024) 0:04:01.389 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Change thinlv fs type] *************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/tests_create_thinp_then_remove.yml:58 Thursday 21 July 2022 19:27:05 +0000 (0:00:00.036) 0:04:01.426 ********* TASK [fedora.linux_system_roles.storage : set platform/version specific variables] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Thursday 21 July 2022 19:27:05 +0000 (0:00:00.055) 0:04:01.481 ********* included: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for /cache/centos-8.qcow2 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Thursday 21 July 2022 19:27:05 +0000 (0:00:00.035) 0:04:01.516 ********* ok: [/cache/centos-8.qcow2] TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Thursday 21 July 2022 19:27:05 +0000 (0:00:00.545) 0:04:02.062 ********* skipping: [/cache/centos-8.qcow2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.storage : define an empty list of pools to be used in testing] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Thursday 21 July 2022 19:27:05 +0000 (0:00:00.077) 0:04:02.139 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : define an empty list of volumes to be used in testing] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Thursday 21 July 2022 19:27:05 +0000 (0:00:00.037) 0:04:02.176 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : include the appropriate provider tasks] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Thursday 21 July 2022 19:27:06 +0000 (0:00:00.038) 0:04:02.215 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for /cache/centos-8.qcow2 TASK [fedora.linux_system_roles.storage : get a list of rpm packages installed on host machine] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Thursday 21 July 2022 19:27:06 +0000 (0:00:00.069) 0:04:02.284 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : make sure blivet is available] ******* task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:7 Thursday 21 July 2022 19:27:06 +0000 (0:00:00.025) 0:04:02.310 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.storage : show storage_pools] ****************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:13 Thursday 21 July 2022 19:27:07 +0000 (0:00:01.802) 0:04:04.112 ********* ok: [/cache/centos-8.qcow2] => { "storage_pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "name": "vg1", "type": "lvm", "volumes": [ { "fs_type": "xfs", "name": "lv1", "thin": true, "thin_pool_name": "tpool1" } ] } ] } TASK [fedora.linux_system_roles.storage : show storage_volumes] **************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:18 Thursday 21 July 2022 19:27:08 +0000 (0:00:00.090) 0:04:04.202 ********* ok: [/cache/centos-8.qcow2] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [fedora.linux_system_roles.storage : get required packages] *************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:23 Thursday 21 July 2022 19:27:08 +0000 (0:00:00.040) 0:04:04.243 ********* ok: [/cache/centos-8.qcow2] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "lvm2", "xfsprogs" ], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : enable copr repositories if needed] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:35 Thursday 21 July 2022 19:27:09 +0000 (0:00:01.860) 0:04:06.104 ********* included: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for /cache/centos-8.qcow2 TASK [fedora.linux_system_roles.storage : check if the COPR support packages should be installed] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Thursday 21 July 2022 19:27:09 +0000 (0:00:00.050) 0:04:06.154 ********* TASK [fedora.linux_system_roles.storage : make sure COPR support packages are present] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Thursday 21 July 2022 19:27:09 +0000 (0:00:00.035) 0:04:06.190 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : enable COPRs] ************************ task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:18 Thursday 21 July 2022 19:27:10 +0000 (0:00:00.082) 0:04:06.273 ********* TASK [fedora.linux_system_roles.storage : make sure required packages are installed] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:41 Thursday 21 July 2022 19:27:10 +0000 (0:00:00.037) 0:04:06.310 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.storage : get service facts] ******************* task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:47 Thursday 21 July 2022 19:27:11 +0000 (0:00:01.815) 0:04:08.126 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cockpit-motd.service": { "name": "cockpit-motd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-http.service": { "name": "cockpit-wsinstance-http.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-https-factory@.service": { "name": "cockpit-wsinstance-https-factory@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit-wsinstance-https@.service": { "name": "cockpit-wsinstance-https@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit.service": { "name": "cockpit.service", "source": "systemd", "state": "inactive", "status": "static" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "running", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "lvm2-pvscan@259:4.service": { "name": "lvm2-pvscan@259:4.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@259:5.service": { "name": "lvm2-pvscan@259:5.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@259:6.service": { "name": "lvm2-pvscan@259:6.service", "source": "systemd", "state": "stopped", "status": "active" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "packagekit-offline-update.service": { "name": "packagekit-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "packagekit.service": { "name": "packagekit.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "rngd-wake-threshold.service": { "name": "rngd-wake-threshold.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "vdo-start-by-dev@.service": { "name": "vdo-start-by-dev@.service", "source": "systemd", "state": "unknown", "status": "static" }, "vdo.service": { "name": "vdo.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:53 Thursday 21 July 2022 19:27:13 +0000 (0:00:01.643) 0:04:09.769 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Thursday 21 July 2022 19:27:13 +0000 (0:00:00.061) 0:04:09.831 ********* TASK [fedora.linux_system_roles.storage : manage the pools and volumes to match the specified state] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Thursday 21 July 2022 19:27:13 +0000 (0:00:00.023) 0:04:09.855 ********* ok: [/cache/centos-8.qcow2] => { "actions": [], "changed": false, "crypts": [], "leaves": [ "/dev/sr0", "/dev/vda1", "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/mapper/vg1-lv1", "/dev/vdb", "/dev/vdc", "/dev/vdd" ], "mounts": [ { "path": "/opt/test1", "state": "absent" } ], "packages": [ "xfsprogs", "lvm2" ], "pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": 3221225472, "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:78 Thursday 21 July 2022 19:27:15 +0000 (0:00:02.020) 0:04:11.875 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90 Thursday 21 July 2022 19:27:15 +0000 (0:00:00.040) 0:04:11.916 ********* TASK [fedora.linux_system_roles.storage : show blivet_output] ****************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:96 Thursday 21 July 2022 19:27:15 +0000 (0:00:00.022) 0:04:11.939 ********* ok: [/cache/centos-8.qcow2] => { "blivet_output": { "actions": [], "changed": false, "crypts": [], "failed": false, "leaves": [ "/dev/sr0", "/dev/vda1", "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/mapper/vg1-lv1", "/dev/vdb", "/dev/vdc", "/dev/vdd" ], "mounts": [ { "path": "/opt/test1", "state": "absent" } ], "packages": [ "xfsprogs", "lvm2" ], "pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": 3221225472, "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : set the list of pools for test verification] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:101 Thursday 21 July 2022 19:27:15 +0000 (0:00:00.041) 0:04:11.980 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": 3221225472, "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : set the list of volumes for test verification] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:105 Thursday 21 July 2022 19:27:15 +0000 (0:00:00.037) 0:04:12.017 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : remove obsolete mounts] ************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Thursday 21 July 2022 19:27:15 +0000 (0:00:00.036) 0:04:12.054 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount changed: [/cache/centos-8.qcow2] => (item={'path': '/opt/test1', 'state': 'absent'}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": true, "dump": "0", "fstab": "/etc/fstab", "mount_info": { "path": "/opt/test1", "state": "absent" }, "name": "/opt/test1", "opts": "defaults", "passno": "0" } TASK [fedora.linux_system_roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132 Thursday 21 July 2022 19:27:16 +0000 (0:00:00.421) 0:04:12.476 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : set up new/current mounts] *********** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:137 Thursday 21 July 2022 19:27:16 +0000 (0:00:00.635) 0:04:13.111 ********* TASK [fedora.linux_system_roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148 Thursday 21 July 2022 19:27:16 +0000 (0:00:00.039) 0:04:13.150 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : retrieve facts for the /etc/crypttab file] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:156 Thursday 21 July 2022 19:27:17 +0000 (0:00:00.616) 0:04:13.767 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "atime": 1658431449.4320083, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1643141385.117, "dev": 64513, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 135, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1643141019.537, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "3147672035", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : manage /etc/crypttab to account for changes we just made] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Thursday 21 July 2022 19:27:17 +0000 (0:00:00.416) 0:04:14.184 ********* TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:183 Thursday 21 July 2022 19:27:18 +0000 (0:00:00.024) 0:04:14.208 ********* ok: [/cache/centos-8.qcow2] META: role_complete for /cache/centos-8.qcow2 TASK [include_tasks] *********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/tests_create_thinp_then_remove.yml:72 Thursday 21 July 2022 19:27:18 +0000 (0:00:00.948) 0:04:15.157 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml for /cache/centos-8.qcow2 TASK [Print out pool information] ********************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:1 Thursday 21 July 2022 19:27:19 +0000 (0:00:00.050) 0:04:15.207 ********* ok: [/cache/centos-8.qcow2] => { "_storage_pools_list": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": 3221225472, "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:6 Thursday 21 July 2022 19:27:19 +0000 (0:00:00.052) 0:04:15.260 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:14 Thursday 21 July 2022 19:27:19 +0000 (0:00:00.037) 0:04:15.297 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "info": { "/dev/mapper/vg1-lv1": { "fstype": "xfs", "label": "", "name": "/dev/mapper/vg1-lv1", "size": "3G", "type": "lvm", "uuid": "1ea60a5c-f51b-4cc6-9e7d-084fe537536f" }, "/dev/mapper/vg1-tpool1": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1-tpool": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1-tpool", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1_tdata": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1_tdata", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1_tmeta": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1_tmeta", "size": "12M", "type": "lvm", "uuid": "" }, "/dev/nvme0n1": { "fstype": "", "label": "", "name": "/dev/nvme0n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme0n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme0n1p1", "size": "10G", "type": "partition", "uuid": "ken164-erkD-vf0C-nvLO-nhmh-YBzF-bzv8kZ" }, "/dev/nvme1n1": { "fstype": "", "label": "", "name": "/dev/nvme1n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme1n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme1n1p1", "size": "10G", "type": "partition", "uuid": "eBnMVU-t1UJ-ttot-SKPn-js6i-mJjN-e9h7dx" }, "/dev/nvme2n1": { "fstype": "", "label": "", "name": "/dev/nvme2n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme2n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme2n1p1", "size": "10G", "type": "partition", "uuid": "xZT8wS-OKti-fe7H-wOMe-VpTY-S1Yx-RZZXxW" }, "/dev/sda": { "fstype": "", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sr0": { "fstype": "iso9660", "label": "cidata", "name": "/dev/sr0", "size": "364K", "type": "rom", "uuid": "2022-07-21-19-22-43-00" }, "/dev/vda": { "fstype": "", "label": "", "name": "/dev/vda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vda1": { "fstype": "xfs", "label": "", "name": "/dev/vda1", "size": "10G", "type": "partition", "uuid": "395b9844-e404-4857-afbb-c6edccaf72f3" }, "/dev/vdb": { "fstype": "", "label": "", "name": "/dev/vdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vdc": { "fstype": "", "label": "", "name": "/dev/vdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vdd": { "fstype": "", "label": "", "name": "/dev/vdd", "size": "10G", "type": "disk", "uuid": "" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:19 Thursday 21 July 2022 19:27:19 +0000 (0:00:00.378) 0:04:15.676 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003504", "end": "2022-07-21 19:27:19.514067", "rc": 0, "start": "2022-07-21 19:27:19.510563" } STDOUT: # # /etc/fstab # Created by anaconda on Tue Jan 25 20:03:39 2022 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=395b9844-e404-4857-afbb-c6edccaf72f3 / xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:24 Thursday 21 July 2022 19:27:19 +0000 (0:00:00.377) 0:04:16.053 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002968", "end": "2022-07-21 19:27:19.897484", "failed_when_result": false, "rc": 0, "start": "2022-07-21 19:27:19.894516" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:33 Thursday 21 July 2022 19:27:20 +0000 (0:00:00.385) 0:04:16.439 ********* included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool.yml for /cache/centos-8.qcow2 => (item={'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'name': 'vg1', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': None, 'name': 'lv1', 'raid_level': None, 'size': 3221225472, 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}], 'raid_chunk_size': None}) TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool.yml:5 Thursday 21 July 2022 19:27:20 +0000 (0:00:00.060) 0:04:16.499 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [include_tasks] *********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool.yml:18 Thursday 21 July 2022 19:27:20 +0000 (0:00:00.070) 0:04:16.570 ********* included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml for /cache/centos-8.qcow2 => (item=members) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-volumes.yml for /cache/centos-8.qcow2 => (item=volumes) TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:1 Thursday 21 July 2022 19:27:20 +0000 (0:00:00.049) 0:04:16.619 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_count": "3", "_storage_test_pool_pvs_lvm": [ "/dev/nvme0n1p1", "/dev/nvme1n1p1", "/dev/nvme2n1p1" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:6 Thursday 21 July 2022 19:27:20 +0000 (0:00:00.089) 0:04:16.709 ********* ok: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme0n1p1", "pv": "/dev/nvme0n1p1" } ok: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme1n1p1", "pv": "/dev/nvme1n1p1" } ok: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme2n1p1", "pv": "/dev/nvme2n1p1" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:15 Thursday 21 July 2022 19:27:21 +0000 (0:00:01.190) 0:04:17.899 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "__pvs_lvm_len": "3" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:19 Thursday 21 July 2022 19:27:21 +0000 (0:00:00.139) 0:04:18.039 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/nvme0n1p1", "/dev/nvme1n1p1", "/dev/nvme2n1p1" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:23 Thursday 21 July 2022 19:27:21 +0000 (0:00:00.052) 0:04:18.092 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:29 Thursday 21 July 2022 19:27:21 +0000 (0:00:00.053) 0:04:18.145 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:33 Thursday 21 July 2022 19:27:21 +0000 (0:00:00.040) 0:04:18.186 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_type": "partition" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:37 Thursday 21 July 2022 19:27:22 +0000 (0:00:00.058) 0:04:18.244 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:41 Thursday 21 July 2022 19:27:22 +0000 (0:00:00.025) 0:04:18.270 ********* ok: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme0n1p1" } MSG: All assertions passed ok: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme1n1p1" } MSG: All assertions passed ok: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme2n1p1" } MSG: All assertions passed TASK [Check MD RAID] *********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:50 Thursday 21 July 2022 19:27:22 +0000 (0:00:00.096) 0:04:18.366 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml for /cache/centos-8.qcow2 TASK [get information about RAID] ********************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:6 Thursday 21 July 2022 19:27:22 +0000 (0:00:00.044) 0:04:18.411 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:12 Thursday 21 July 2022 19:27:22 +0000 (0:00:00.039) 0:04:18.450 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:16 Thursday 21 July 2022 19:27:22 +0000 (0:00:00.027) 0:04:18.478 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:20 Thursday 21 July 2022 19:27:22 +0000 (0:00:00.024) 0:04:18.502 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID active devices count] ***************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:24 Thursday 21 July 2022 19:27:22 +0000 (0:00:00.023) 0:04:18.526 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID spare devices count] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:30 Thursday 21 July 2022 19:27:22 +0000 (0:00:00.024) 0:04:18.550 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID metadata version] ********************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:36 Thursday 21 July 2022 19:27:22 +0000 (0:00:00.023) 0:04:18.574 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:44 Thursday 21 July 2022 19:27:22 +0000 (0:00:00.024) 0:04:18.598 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:53 Thursday 21 July 2022 19:27:22 +0000 (0:00:00.035) 0:04:18.633 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-lvmraid.yml for /cache/centos-8.qcow2 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-lvmraid.yml:1 Thursday 21 July 2022 19:27:22 +0000 (0:00:00.045) 0:04:18.679 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-lvmraid.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': None, 'name': 'lv1', 'raid_level': None, 'size': 3221225472, 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}) TASK [Get information about LVM RAID] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-lvmraid.yml:3 Thursday 21 July 2022 19:27:22 +0000 (0:00:00.047) 0:04:18.726 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is LVM RAID] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-lvmraid.yml:8 Thursday 21 July 2022 19:27:22 +0000 (0:00:00.030) 0:04:18.757 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-lvmraid.yml:12 Thursday 21 July 2022 19:27:22 +0000 (0:00:00.027) 0:04:18.785 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:56 Thursday 21 July 2022 19:27:22 +0000 (0:00:00.028) 0:04:18.813 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-thin.yml for /cache/centos-8.qcow2 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-thin.yml:1 Thursday 21 July 2022 19:27:22 +0000 (0:00:00.046) 0:04:18.860 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': None, 'name': 'lv1', 'raid_level': None, 'size': 3221225472, 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}) TASK [Get information about thinpool] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml:3 Thursday 21 July 2022 19:27:22 +0000 (0:00:00.044) 0:04:18.905 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "lvs", "--noheading", "-o", "pool_lv", "--select", "lv_name=lv1&&segtype=thin", "vg1" ], "delta": "0:00:00.033008", "end": "2022-07-21 19:27:22.776828", "rc": 0, "start": "2022-07-21 19:27:22.743820" } STDOUT: tpool1 TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml:8 Thursday 21 July 2022 19:27:23 +0000 (0:00:00.409) 0:04:19.314 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml:13 Thursday 21 July 2022 19:27:23 +0000 (0:00:00.059) 0:04:19.373 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml:17 Thursday 21 July 2022 19:27:23 +0000 (0:00:00.058) 0:04:19.432 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_lvmraid_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:59 Thursday 21 July 2022 19:27:23 +0000 (0:00:00.049) 0:04:19.482 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml for /cache/centos-8.qcow2 TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml:4 Thursday 21 July 2022 19:27:23 +0000 (0:00:00.052) 0:04:19.534 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml:8 Thursday 21 July 2022 19:27:23 +0000 (0:00:00.143) 0:04:19.678 ********* skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "_storage_test_pool_member_path": "/dev/nvme0n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "_storage_test_pool_member_path": "/dev/nvme1n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "_storage_test_pool_member_path": "/dev/nvme2n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml:15 Thursday 21 July 2022 19:27:23 +0000 (0:00:00.032) 0:04:19.711 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme0n1p1) included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme1n1p1) included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme2n1p1) TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 19:27:23 +0000 (0:00:00.053) 0:04:19.764 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:4 Thursday 21 July 2022 19:27:23 +0000 (0:00:00.048) 0:04:19.812 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:9 Thursday 21 July 2022 19:27:23 +0000 (0:00:00.049) 0:04:19.861 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:15 Thursday 21 July 2022 19:27:23 +0000 (0:00:00.036) 0:04:19.898 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:21 Thursday 21 July 2022 19:27:23 +0000 (0:00:00.043) 0:04:19.942 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:27 Thursday 21 July 2022 19:27:23 +0000 (0:00:00.039) 0:04:19.981 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 19:27:23 +0000 (0:00:00.038) 0:04:20.019 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:4 Thursday 21 July 2022 19:27:23 +0000 (0:00:00.052) 0:04:20.072 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:9 Thursday 21 July 2022 19:27:23 +0000 (0:00:00.050) 0:04:20.122 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:15 Thursday 21 July 2022 19:27:23 +0000 (0:00:00.039) 0:04:20.162 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:21 Thursday 21 July 2022 19:27:24 +0000 (0:00:00.036) 0:04:20.199 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:27 Thursday 21 July 2022 19:27:24 +0000 (0:00:00.040) 0:04:20.239 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 19:27:24 +0000 (0:00:00.037) 0:04:20.277 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:4 Thursday 21 July 2022 19:27:24 +0000 (0:00:00.050) 0:04:20.327 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:9 Thursday 21 July 2022 19:27:24 +0000 (0:00:00.056) 0:04:20.384 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:15 Thursday 21 July 2022 19:27:24 +0000 (0:00:00.039) 0:04:20.424 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:21 Thursday 21 July 2022 19:27:24 +0000 (0:00:00.037) 0:04:20.461 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:27 Thursday 21 July 2022 19:27:24 +0000 (0:00:00.036) 0:04:20.498 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml:22 Thursday 21 July 2022 19:27:24 +0000 (0:00:00.039) 0:04:20.537 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:62 Thursday 21 July 2022 19:27:24 +0000 (0:00:00.034) 0:04:20.572 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-vdo.yml for /cache/centos-8.qcow2 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-vdo.yml:1 Thursday 21 July 2022 19:27:24 +0000 (0:00:00.048) 0:04:20.621 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': None, 'name': 'lv1', 'raid_level': None, 'size': 3221225472, 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}) TASK [get information about VDO deduplication] ********************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:3 Thursday 21 July 2022 19:27:24 +0000 (0:00:00.046) 0:04:20.667 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:8 Thursday 21 July 2022 19:27:24 +0000 (0:00:00.024) 0:04:20.691 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:11 Thursday 21 July 2022 19:27:24 +0000 (0:00:00.023) 0:04:20.715 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:16 Thursday 21 July 2022 19:27:24 +0000 (0:00:00.024) 0:04:20.739 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:21 Thursday 21 July 2022 19:27:24 +0000 (0:00:00.023) 0:04:20.762 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:24 Thursday 21 July 2022 19:27:24 +0000 (0:00:00.023) 0:04:20.786 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:29 Thursday 21 July 2022 19:27:24 +0000 (0:00:00.023) 0:04:20.809 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:39 Thursday 21 July 2022 19:27:24 +0000 (0:00:00.024) 0:04:20.834 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:65 Thursday 21 July 2022 19:27:24 +0000 (0:00:00.037) 0:04:20.871 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [verify the volumes] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-volumes.yml:3 Thursday 21 July 2022 19:27:24 +0000 (0:00:00.038) 0:04:20.910 ********* included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': None, 'name': 'lv1', 'raid_level': None, 'size': 3221225472, 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1', '_kernel_device': '/dev/dm-4', '_raw_kernel_device': '/dev/dm-4'}) TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume.yml:2 Thursday 21 July 2022 19:27:24 +0000 (0:00:00.044) 0:04:20.954 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [include_tasks] *********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume.yml:10 Thursday 21 July 2022 19:27:24 +0000 (0:00:00.087) 0:04:21.042 ********* included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml for /cache/centos-8.qcow2 => (item=mount) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml for /cache/centos-8.qcow2 => (item=fstab) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fs.yml for /cache/centos-8.qcow2 => (item=fs) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml for /cache/centos-8.qcow2 => (item=device) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml for /cache/centos-8.qcow2 => (item=encryption) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml for /cache/centos-8.qcow2 => (item=md) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml for /cache/centos-8.qcow2 => (item=size) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml for /cache/centos-8.qcow2 => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:6 Thursday 21 July 2022 19:27:24 +0000 (0:00:00.117) 0:04:21.159 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/vg1-lv1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:10 Thursday 21 July 2022 19:27:25 +0000 (0:00:00.044) 0:04:21.204 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_mount_device_matches": [], "storage_test_mount_expected_match_count": "0", "storage_test_mount_point_matches": [], "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Verify the current mount state by device] ******************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:20 Thursday 21 July 2022 19:27:25 +0000 (0:00:00.059) 0:04:21.264 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by mount point] *************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:29 Thursday 21 July 2022 19:27:25 +0000 (0:00:00.026) 0:04:21.290 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the mount fs type] ************************************************ task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:37 Thursday 21 July 2022 19:27:25 +0000 (0:00:00.054) 0:04:21.344 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [command] ***************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:46 Thursday 21 July 2022 19:27:25 +0000 (0:00:00.041) 0:04:21.385 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:50 Thursday 21 July 2022 19:27:25 +0000 (0:00:00.026) 0:04:21.412 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:55 Thursday 21 July 2022 19:27:25 +0000 (0:00:00.026) 0:04:21.438 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:65 Thursday 21 July 2022 19:27:25 +0000 (0:00:00.025) 0:04:21.464 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_mount_device_matches": null, "storage_test_mount_expected_match_count": null, "storage_test_mount_point_matches": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:2 Thursday 21 July 2022 19:27:25 +0000 (0:00:00.039) 0:04:21.503 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "0", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "0", "storage_test_fstab_id_matches": [], "storage_test_fstab_mount_options_matches": [], "storage_test_fstab_mount_point_matches": [] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:12 Thursday 21 July 2022 19:27:25 +0000 (0:00:00.060) 0:04:21.564 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:19 Thursday 21 July 2022 19:27:25 +0000 (0:00:00.050) 0:04:21.615 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:25 Thursday 21 July 2022 19:27:25 +0000 (0:00:00.052) 0:04:21.667 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:34 Thursday 21 July 2022 19:27:25 +0000 (0:00:00.040) 0:04:21.707 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fs.yml:4 Thursday 21 July 2022 19:27:25 +0000 (0:00:00.037) 0:04:21.744 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fs.yml:10 Thursday 21 July 2022 19:27:25 +0000 (0:00:00.041) 0:04:21.786 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:4 Thursday 21 July 2022 19:27:25 +0000 (0:00:00.044) 0:04:21.830 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "atime": 1658431580.9230084, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1658431580.9230084, "dev": 6, "device_type": 64772, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 103033, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1658431580.9230084, "nlink": 1, "path": "/dev/mapper/vg1-lv1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:10 Thursday 21 July 2022 19:27:26 +0000 (0:00:00.414) 0:04:22.245 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about this volume] ********************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:15 Thursday 21 July 2022 19:27:26 +0000 (0:00:00.041) 0:04:22.286 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [(1/2) Process volume type (set initial value)] *************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:21 Thursday 21 July 2022 19:27:26 +0000 (0:00:00.040) 0:04:22.327 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [(2/2) Process volume type (get RAID value)] ****************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:25 Thursday 21 July 2022 19:27:26 +0000 (0:00:00.038) 0:04:22.365 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:30 Thursday 21 July 2022 19:27:26 +0000 (0:00:00.023) 0:04:22.389 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:3 Thursday 21 July 2022 19:27:26 +0000 (0:00:00.037) 0:04:22.426 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:10 Thursday 21 July 2022 19:27:26 +0000 (0:00:00.023) 0:04:22.450 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:15 Thursday 21 July 2022 19:27:28 +0000 (0:00:01.933) 0:04:24.383 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:21 Thursday 21 July 2022 19:27:28 +0000 (0:00:00.028) 0:04:24.412 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:27 Thursday 21 July 2022 19:27:28 +0000 (0:00:00.033) 0:04:24.446 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:33 Thursday 21 July 2022 19:27:28 +0000 (0:00:00.060) 0:04:24.506 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:39 Thursday 21 July 2022 19:27:28 +0000 (0:00:00.027) 0:04:24.533 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:44 Thursday 21 July 2022 19:27:28 +0000 (0:00:00.023) 0:04:24.557 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:50 Thursday 21 July 2022 19:27:28 +0000 (0:00:00.023) 0:04:24.580 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:56 Thursday 21 July 2022 19:27:28 +0000 (0:00:00.024) 0:04:24.605 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:62 Thursday 21 July 2022 19:27:28 +0000 (0:00:00.022) 0:04:24.627 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:67 Thursday 21 July 2022 19:27:28 +0000 (0:00:00.051) 0:04:24.679 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:72 Thursday 21 July 2022 19:27:28 +0000 (0:00:00.053) 0:04:24.732 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:78 Thursday 21 July 2022 19:27:28 +0000 (0:00:00.040) 0:04:24.772 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:84 Thursday 21 July 2022 19:27:28 +0000 (0:00:00.038) 0:04:24.810 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:90 Thursday 21 July 2022 19:27:28 +0000 (0:00:00.037) 0:04:24.848 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [get information about RAID] ********************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:7 Thursday 21 July 2022 19:27:28 +0000 (0:00:00.035) 0:04:24.884 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:13 Thursday 21 July 2022 19:27:28 +0000 (0:00:00.033) 0:04:24.917 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:17 Thursday 21 July 2022 19:27:28 +0000 (0:00:00.033) 0:04:24.951 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:21 Thursday 21 July 2022 19:27:28 +0000 (0:00:00.033) 0:04:24.985 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID active devices count] ***************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:25 Thursday 21 July 2022 19:27:28 +0000 (0:00:00.035) 0:04:25.021 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID spare devices count] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:31 Thursday 21 July 2022 19:27:28 +0000 (0:00:00.038) 0:04:25.059 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID metadata version] ********************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:37 Thursday 21 July 2022 19:27:28 +0000 (0:00:00.035) 0:04:25.094 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the actual size of the volume] ************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:3 Thursday 21 July 2022 19:27:28 +0000 (0:00:00.034) 0:04:25.128 ********* ok: [/cache/centos-8.qcow2] => { "bytes": 3221225472, "changed": false, "lvm": "3g", "parted": "3GiB", "size": "3 GiB" } TASK [parse the requested size of the volume] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:9 Thursday 21 July 2022 19:27:29 +0000 (0:00:00.358) 0:04:25.487 ********* ok: [/cache/centos-8.qcow2] => { "bytes": 3221225472, "changed": false, "lvm": "3g", "parted": "3GiB", "size": "3 GiB" } TASK [Establish base value for expected size] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:15 Thursday 21 July 2022 19:27:29 +0000 (0:00:00.399) 0:04:25.886 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_expected_size": "3221225472" }, "changed": false } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:20 Thursday 21 July 2022 19:27:29 +0000 (0:00:00.102) 0:04:25.989 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_expected_size": "3221225472" } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:25 Thursday 21 July 2022 19:27:29 +0000 (0:00:00.038) 0:04:26.028 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:28 Thursday 21 July 2022 19:27:29 +0000 (0:00:00.040) 0:04:26.068 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Get the size of parent/pool device] ************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:31 Thursday 21 July 2022 19:27:29 +0000 (0:00:00.041) 0:04:26.110 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:36 Thursday 21 July 2022 19:27:29 +0000 (0:00:00.041) 0:04:26.151 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:39 Thursday 21 July 2022 19:27:30 +0000 (0:00:00.088) 0:04:26.240 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:44 Thursday 21 July 2022 19:27:30 +0000 (0:00:00.048) 0:04:26.288 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_actual_size": { "bytes": 3221225472, "changed": false, "failed": false, "lvm": "3g", "parted": "3GiB", "size": "3 GiB" } } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:47 Thursday 21 July 2022 19:27:30 +0000 (0:00:00.043) 0:04:26.332 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_expected_size": "3221225472" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:50 Thursday 21 July 2022 19:27:30 +0000 (0:00:00.046) 0:04:26.378 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:6 Thursday 21 July 2022 19:27:30 +0000 (0:00:00.054) 0:04:26.433 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "vg1/lv1" ], "delta": "0:00:00.039023", "end": "2022-07-21 19:27:30.329809", "rc": 0, "start": "2022-07-21 19:27:30.290786" } STDOUT: LVM2_LV_NAME=lv1 LVM2_LV_ATTR=Vwi-a-tz-- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=thin TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:14 Thursday 21 July 2022 19:27:30 +0000 (0:00:00.441) 0:04:26.875 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_lv_segtype": [ "thin" ] }, "changed": false } TASK [check segment type] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:17 Thursday 21 July 2022 19:27:30 +0000 (0:00:00.052) 0:04:26.927 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:22 Thursday 21 July 2022 19:27:30 +0000 (0:00:00.056) 0:04:26.984 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the requested cache size] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:26 Thursday 21 July 2022 19:27:30 +0000 (0:00:00.041) 0:04:27.025 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:32 Thursday 21 July 2022 19:27:30 +0000 (0:00:00.043) 0:04:27.069 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:36 Thursday 21 July 2022 19:27:30 +0000 (0:00:00.041) 0:04:27.110 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume.yml:16 Thursday 21 July 2022 19:27:30 +0000 (0:00:00.041) 0:04:27.151 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:43 Thursday 21 July 2022 19:27:30 +0000 (0:00:00.039) 0:04:27.191 ********* TASK [Clean up variable namespace] ********************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:53 Thursday 21 July 2022 19:27:31 +0000 (0:00:00.024) 0:04:27.216 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Create new LV under existing thinpool] *********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/tests_create_thinp_then_remove.yml:74 Thursday 21 July 2022 19:27:31 +0000 (0:00:00.045) 0:04:27.261 ********* TASK [fedora.linux_system_roles.storage : set platform/version specific variables] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Thursday 21 July 2022 19:27:31 +0000 (0:00:00.087) 0:04:27.349 ********* included: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for /cache/centos-8.qcow2 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Thursday 21 July 2022 19:27:31 +0000 (0:00:00.063) 0:04:27.412 ********* ok: [/cache/centos-8.qcow2] TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Thursday 21 July 2022 19:27:31 +0000 (0:00:00.542) 0:04:27.955 ********* skipping: [/cache/centos-8.qcow2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.storage : define an empty list of pools to be used in testing] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Thursday 21 July 2022 19:27:31 +0000 (0:00:00.082) 0:04:28.037 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : define an empty list of volumes to be used in testing] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Thursday 21 July 2022 19:27:31 +0000 (0:00:00.040) 0:04:28.077 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : include the appropriate provider tasks] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Thursday 21 July 2022 19:27:31 +0000 (0:00:00.040) 0:04:28.118 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for /cache/centos-8.qcow2 TASK [fedora.linux_system_roles.storage : get a list of rpm packages installed on host machine] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Thursday 21 July 2022 19:27:31 +0000 (0:00:00.064) 0:04:28.183 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : make sure blivet is available] ******* task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:7 Thursday 21 July 2022 19:27:32 +0000 (0:00:00.023) 0:04:28.206 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.storage : show storage_pools] ****************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:13 Thursday 21 July 2022 19:27:33 +0000 (0:00:01.913) 0:04:30.119 ********* ok: [/cache/centos-8.qcow2] => { "storage_pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "name": "vg1", "type": "lvm", "volumes": [ { "mount_point": "/opt/test2", "name": "lv2", "size": "4g", "thin": true, "thin_pool_name": "tpool1" } ] } ] } TASK [fedora.linux_system_roles.storage : show storage_volumes] **************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:18 Thursday 21 July 2022 19:27:34 +0000 (0:00:00.101) 0:04:30.220 ********* ok: [/cache/centos-8.qcow2] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [fedora.linux_system_roles.storage : get required packages] *************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:23 Thursday 21 July 2022 19:27:34 +0000 (0:00:00.048) 0:04:30.269 ********* ok: [/cache/centos-8.qcow2] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "lvm2" ], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : enable copr repositories if needed] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:35 Thursday 21 July 2022 19:27:36 +0000 (0:00:01.924) 0:04:32.194 ********* included: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for /cache/centos-8.qcow2 TASK [fedora.linux_system_roles.storage : check if the COPR support packages should be installed] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Thursday 21 July 2022 19:27:36 +0000 (0:00:00.052) 0:04:32.246 ********* TASK [fedora.linux_system_roles.storage : make sure COPR support packages are present] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Thursday 21 July 2022 19:27:36 +0000 (0:00:00.038) 0:04:32.285 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : enable COPRs] ************************ task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:18 Thursday 21 July 2022 19:27:36 +0000 (0:00:00.038) 0:04:32.323 ********* TASK [fedora.linux_system_roles.storage : make sure required packages are installed] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:41 Thursday 21 July 2022 19:27:36 +0000 (0:00:00.036) 0:04:32.360 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.storage : get service facts] ******************* task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:47 Thursday 21 July 2022 19:27:38 +0000 (0:00:01.866) 0:04:34.227 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cockpit-motd.service": { "name": "cockpit-motd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-http.service": { "name": "cockpit-wsinstance-http.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-https-factory@.service": { "name": "cockpit-wsinstance-https-factory@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit-wsinstance-https@.service": { "name": "cockpit-wsinstance-https@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit.service": { "name": "cockpit.service", "source": "systemd", "state": "inactive", "status": "static" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "running", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "lvm2-pvscan@259:4.service": { "name": "lvm2-pvscan@259:4.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@259:5.service": { "name": "lvm2-pvscan@259:5.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@259:6.service": { "name": "lvm2-pvscan@259:6.service", "source": "systemd", "state": "stopped", "status": "active" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "packagekit-offline-update.service": { "name": "packagekit-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "packagekit.service": { "name": "packagekit.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "rngd-wake-threshold.service": { "name": "rngd-wake-threshold.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "vdo-start-by-dev@.service": { "name": "vdo-start-by-dev@.service", "source": "systemd", "state": "unknown", "status": "static" }, "vdo.service": { "name": "vdo.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:53 Thursday 21 July 2022 19:27:39 +0000 (0:00:01.598) 0:04:35.825 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Thursday 21 July 2022 19:27:39 +0000 (0:00:00.058) 0:04:35.884 ********* TASK [fedora.linux_system_roles.storage : manage the pools and volumes to match the specified state] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Thursday 21 July 2022 19:27:39 +0000 (0:00:00.023) 0:04:35.908 ********* changed: [/cache/centos-8.qcow2] => { "actions": [ { "action": "create device", "device": "/dev/mapper/vg1-lv2", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/vg1-lv2", "fs_type": "xfs" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sr0", "/dev/vda1", "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/mapper/vg1-lv1", "/dev/vdb", "/dev/vdc", "/dev/vdd", "/dev/mapper/vg1-lv2" ], "mounts": [ { "dump": 0, "fstype": "xfs", "opts": "defaults", "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/vg1-lv2", "state": "mounted" } ], "packages": [ "xfsprogs", "lvm2" ], "pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv2", "_kernel_device": "/dev/dm-5", "_mount_id": "/dev/mapper/vg1-lv2", "_raw_device": "/dev/mapper/vg1-lv2", "_raw_kernel_device": "/dev/dm-5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "name": "lv2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "4g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:78 Thursday 21 July 2022 19:27:41 +0000 (0:00:02.215) 0:04:38.123 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90 Thursday 21 July 2022 19:27:41 +0000 (0:00:00.039) 0:04:38.163 ********* TASK [fedora.linux_system_roles.storage : show blivet_output] ****************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:96 Thursday 21 July 2022 19:27:41 +0000 (0:00:00.023) 0:04:38.186 ********* ok: [/cache/centos-8.qcow2] => { "blivet_output": { "actions": [ { "action": "create device", "device": "/dev/mapper/vg1-lv2", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/vg1-lv2", "fs_type": "xfs" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sr0", "/dev/vda1", "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/mapper/vg1-lv1", "/dev/vdb", "/dev/vdc", "/dev/vdd", "/dev/mapper/vg1-lv2" ], "mounts": [ { "dump": 0, "fstype": "xfs", "opts": "defaults", "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/vg1-lv2", "state": "mounted" } ], "packages": [ "xfsprogs", "lvm2" ], "pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv2", "_kernel_device": "/dev/dm-5", "_mount_id": "/dev/mapper/vg1-lv2", "_raw_device": "/dev/mapper/vg1-lv2", "_raw_kernel_device": "/dev/dm-5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "name": "lv2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "4g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : set the list of pools for test verification] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:101 Thursday 21 July 2022 19:27:42 +0000 (0:00:00.043) 0:04:38.229 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv2", "_kernel_device": "/dev/dm-5", "_mount_id": "/dev/mapper/vg1-lv2", "_raw_device": "/dev/mapper/vg1-lv2", "_raw_kernel_device": "/dev/dm-5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "name": "lv2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "4g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : set the list of volumes for test verification] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:105 Thursday 21 July 2022 19:27:42 +0000 (0:00:00.041) 0:04:38.271 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : remove obsolete mounts] ************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Thursday 21 July 2022 19:27:42 +0000 (0:00:00.040) 0:04:38.312 ********* TASK [fedora.linux_system_roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132 Thursday 21 July 2022 19:27:42 +0000 (0:00:00.040) 0:04:38.352 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : set up new/current mounts] *********** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:137 Thursday 21 July 2022 19:27:42 +0000 (0:00:00.618) 0:04:38.971 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount changed: [/cache/centos-8.qcow2] => (item={'src': '/dev/mapper/vg1-lv2', 'path': '/opt/test2', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted'}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "opts": "defaults", "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/vg1-lv2", "state": "mounted" }, "name": "/opt/test2", "opts": "defaults", "passno": "0", "src": "/dev/mapper/vg1-lv2" } TASK [fedora.linux_system_roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148 Thursday 21 July 2022 19:27:43 +0000 (0:00:00.441) 0:04:39.412 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : retrieve facts for the /etc/crypttab file] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:156 Thursday 21 July 2022 19:27:43 +0000 (0:00:00.680) 0:04:40.093 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "atime": 1658431449.4320083, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1643141385.117, "dev": 64513, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 135, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1643141019.537, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "3147672035", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : manage /etc/crypttab to account for changes we just made] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Thursday 21 July 2022 19:27:44 +0000 (0:00:00.420) 0:04:40.513 ********* TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:183 Thursday 21 July 2022 19:27:44 +0000 (0:00:00.024) 0:04:40.538 ********* ok: [/cache/centos-8.qcow2] META: role_complete for /cache/centos-8.qcow2 TASK [include_tasks] *********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/tests_create_thinp_then_remove.yml:89 Thursday 21 July 2022 19:27:45 +0000 (0:00:00.961) 0:04:41.500 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml for /cache/centos-8.qcow2 TASK [Print out pool information] ********************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:1 Thursday 21 July 2022 19:27:45 +0000 (0:00:00.051) 0:04:41.552 ********* ok: [/cache/centos-8.qcow2] => { "_storage_pools_list": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv2", "_kernel_device": "/dev/dm-5", "_mount_id": "/dev/mapper/vg1-lv2", "_raw_device": "/dev/mapper/vg1-lv2", "_raw_kernel_device": "/dev/dm-5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "name": "lv2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "4g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:6 Thursday 21 July 2022 19:27:45 +0000 (0:00:00.055) 0:04:41.607 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:14 Thursday 21 July 2022 19:27:45 +0000 (0:00:00.038) 0:04:41.646 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "info": { "/dev/mapper/vg1-lv1": { "fstype": "xfs", "label": "", "name": "/dev/mapper/vg1-lv1", "size": "3G", "type": "lvm", "uuid": "1ea60a5c-f51b-4cc6-9e7d-084fe537536f" }, "/dev/mapper/vg1-lv2": { "fstype": "xfs", "label": "", "name": "/dev/mapper/vg1-lv2", "size": "4G", "type": "lvm", "uuid": "fe659719-27f3-485c-81fa-5bf5eab40f73" }, "/dev/mapper/vg1-tpool1": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1-tpool": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1-tpool", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1_tdata": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1_tdata", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1_tmeta": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1_tmeta", "size": "12M", "type": "lvm", "uuid": "" }, "/dev/nvme0n1": { "fstype": "", "label": "", "name": "/dev/nvme0n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme0n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme0n1p1", "size": "10G", "type": "partition", "uuid": "ken164-erkD-vf0C-nvLO-nhmh-YBzF-bzv8kZ" }, "/dev/nvme1n1": { "fstype": "", "label": "", "name": "/dev/nvme1n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme1n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme1n1p1", "size": "10G", "type": "partition", "uuid": "eBnMVU-t1UJ-ttot-SKPn-js6i-mJjN-e9h7dx" }, "/dev/nvme2n1": { "fstype": "", "label": "", "name": "/dev/nvme2n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme2n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme2n1p1", "size": "10G", "type": "partition", "uuid": "xZT8wS-OKti-fe7H-wOMe-VpTY-S1Yx-RZZXxW" }, "/dev/sda": { "fstype": "", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sr0": { "fstype": "iso9660", "label": "cidata", "name": "/dev/sr0", "size": "364K", "type": "rom", "uuid": "2022-07-21-19-22-43-00" }, "/dev/vda": { "fstype": "", "label": "", "name": "/dev/vda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vda1": { "fstype": "xfs", "label": "", "name": "/dev/vda1", "size": "10G", "type": "partition", "uuid": "395b9844-e404-4857-afbb-c6edccaf72f3" }, "/dev/vdb": { "fstype": "", "label": "", "name": "/dev/vdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vdc": { "fstype": "", "label": "", "name": "/dev/vdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vdd": { "fstype": "", "label": "", "name": "/dev/vdd", "size": "10G", "type": "disk", "uuid": "" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:19 Thursday 21 July 2022 19:27:45 +0000 (0:00:00.399) 0:04:42.045 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002561", "end": "2022-07-21 19:27:45.875655", "rc": 0, "start": "2022-07-21 19:27:45.873094" } STDOUT: # # /etc/fstab # Created by anaconda on Tue Jan 25 20:03:39 2022 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=395b9844-e404-4857-afbb-c6edccaf72f3 / xfs defaults 0 0 /dev/mapper/vg1-lv2 /opt/test2 xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:24 Thursday 21 July 2022 19:27:46 +0000 (0:00:00.370) 0:04:42.416 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003814", "end": "2022-07-21 19:27:46.272106", "failed_when_result": false, "rc": 0, "start": "2022-07-21 19:27:46.268292" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:33 Thursday 21 July 2022 19:27:46 +0000 (0:00:00.401) 0:04:42.818 ********* included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool.yml for /cache/centos-8.qcow2 => (item={'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'name': 'vg1', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'name': 'lv2', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv2', '_raw_device': '/dev/mapper/vg1-lv2', '_mount_id': '/dev/mapper/vg1-lv2', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'}], 'raid_chunk_size': None}) TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool.yml:5 Thursday 21 July 2022 19:27:46 +0000 (0:00:00.067) 0:04:42.886 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [include_tasks] *********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool.yml:18 Thursday 21 July 2022 19:27:46 +0000 (0:00:00.071) 0:04:42.957 ********* included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml for /cache/centos-8.qcow2 => (item=members) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-volumes.yml for /cache/centos-8.qcow2 => (item=volumes) TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:1 Thursday 21 July 2022 19:27:46 +0000 (0:00:00.083) 0:04:43.040 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_count": "3", "_storage_test_pool_pvs_lvm": [ "/dev/nvme0n1p1", "/dev/nvme1n1p1", "/dev/nvme2n1p1" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:6 Thursday 21 July 2022 19:27:46 +0000 (0:00:00.061) 0:04:43.102 ********* ok: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme0n1p1", "pv": "/dev/nvme0n1p1" } ok: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme1n1p1", "pv": "/dev/nvme1n1p1" } ok: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme2n1p1", "pv": "/dev/nvme2n1p1" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:15 Thursday 21 July 2022 19:27:47 +0000 (0:00:01.073) 0:04:44.176 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "__pvs_lvm_len": "3" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:19 Thursday 21 July 2022 19:27:48 +0000 (0:00:00.048) 0:04:44.224 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/nvme0n1p1", "/dev/nvme1n1p1", "/dev/nvme2n1p1" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:23 Thursday 21 July 2022 19:27:48 +0000 (0:00:00.048) 0:04:44.273 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:29 Thursday 21 July 2022 19:27:48 +0000 (0:00:00.049) 0:04:44.322 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:33 Thursday 21 July 2022 19:27:48 +0000 (0:00:00.035) 0:04:44.358 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_type": "partition" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:37 Thursday 21 July 2022 19:27:48 +0000 (0:00:00.047) 0:04:44.406 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:41 Thursday 21 July 2022 19:27:48 +0000 (0:00:00.027) 0:04:44.433 ********* ok: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme0n1p1" } MSG: All assertions passed ok: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme1n1p1" } MSG: All assertions passed ok: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme2n1p1" } MSG: All assertions passed TASK [Check MD RAID] *********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:50 Thursday 21 July 2022 19:27:48 +0000 (0:00:00.074) 0:04:44.507 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml for /cache/centos-8.qcow2 TASK [get information about RAID] ********************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:6 Thursday 21 July 2022 19:27:48 +0000 (0:00:00.045) 0:04:44.553 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:12 Thursday 21 July 2022 19:27:48 +0000 (0:00:00.025) 0:04:44.578 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:16 Thursday 21 July 2022 19:27:48 +0000 (0:00:00.024) 0:04:44.603 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:20 Thursday 21 July 2022 19:27:48 +0000 (0:00:00.023) 0:04:44.626 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID active devices count] ***************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:24 Thursday 21 July 2022 19:27:48 +0000 (0:00:00.024) 0:04:44.651 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID spare devices count] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:30 Thursday 21 July 2022 19:27:48 +0000 (0:00:00.023) 0:04:44.674 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID metadata version] ********************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:36 Thursday 21 July 2022 19:27:48 +0000 (0:00:00.023) 0:04:44.697 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:44 Thursday 21 July 2022 19:27:48 +0000 (0:00:00.023) 0:04:44.721 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:53 Thursday 21 July 2022 19:27:48 +0000 (0:00:00.035) 0:04:44.757 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-lvmraid.yml for /cache/centos-8.qcow2 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-lvmraid.yml:1 Thursday 21 July 2022 19:27:48 +0000 (0:00:00.043) 0:04:44.801 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-lvmraid.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'name': 'lv2', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv2', '_raw_device': '/dev/mapper/vg1-lv2', '_mount_id': '/dev/mapper/vg1-lv2', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'}) TASK [Get information about LVM RAID] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-lvmraid.yml:3 Thursday 21 July 2022 19:27:48 +0000 (0:00:00.042) 0:04:44.843 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is LVM RAID] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-lvmraid.yml:8 Thursday 21 July 2022 19:27:48 +0000 (0:00:00.026) 0:04:44.870 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-lvmraid.yml:12 Thursday 21 July 2022 19:27:48 +0000 (0:00:00.026) 0:04:44.897 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:56 Thursday 21 July 2022 19:27:48 +0000 (0:00:00.031) 0:04:44.928 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-thin.yml for /cache/centos-8.qcow2 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-thin.yml:1 Thursday 21 July 2022 19:27:48 +0000 (0:00:00.050) 0:04:44.979 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'name': 'lv2', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv2', '_raw_device': '/dev/mapper/vg1-lv2', '_mount_id': '/dev/mapper/vg1-lv2', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'}) TASK [Get information about thinpool] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml:3 Thursday 21 July 2022 19:27:48 +0000 (0:00:00.047) 0:04:45.027 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "lvs", "--noheading", "-o", "pool_lv", "--select", "lv_name=lv2&&segtype=thin", "vg1" ], "delta": "0:00:00.038518", "end": "2022-07-21 19:27:48.971632", "rc": 0, "start": "2022-07-21 19:27:48.933114" } STDOUT: tpool1 TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml:8 Thursday 21 July 2022 19:27:49 +0000 (0:00:00.495) 0:04:45.522 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml:13 Thursday 21 July 2022 19:27:49 +0000 (0:00:00.060) 0:04:45.583 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml:17 Thursday 21 July 2022 19:27:49 +0000 (0:00:00.055) 0:04:45.639 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_lvmraid_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:59 Thursday 21 July 2022 19:27:49 +0000 (0:00:00.040) 0:04:45.679 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml for /cache/centos-8.qcow2 TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml:4 Thursday 21 July 2022 19:27:49 +0000 (0:00:00.049) 0:04:45.728 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml:8 Thursday 21 July 2022 19:27:49 +0000 (0:00:00.048) 0:04:45.777 ********* skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "_storage_test_pool_member_path": "/dev/nvme0n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "_storage_test_pool_member_path": "/dev/nvme1n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "_storage_test_pool_member_path": "/dev/nvme2n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml:15 Thursday 21 July 2022 19:27:49 +0000 (0:00:00.033) 0:04:45.811 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme0n1p1) included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme1n1p1) included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme2n1p1) TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 19:27:49 +0000 (0:00:00.053) 0:04:45.864 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:4 Thursday 21 July 2022 19:27:49 +0000 (0:00:00.053) 0:04:45.918 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:9 Thursday 21 July 2022 19:27:49 +0000 (0:00:00.054) 0:04:45.973 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:15 Thursday 21 July 2022 19:27:49 +0000 (0:00:00.038) 0:04:46.012 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:21 Thursday 21 July 2022 19:27:49 +0000 (0:00:00.039) 0:04:46.051 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:27 Thursday 21 July 2022 19:27:49 +0000 (0:00:00.036) 0:04:46.088 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 19:27:49 +0000 (0:00:00.037) 0:04:46.125 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:4 Thursday 21 July 2022 19:27:49 +0000 (0:00:00.053) 0:04:46.179 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:9 Thursday 21 July 2022 19:27:50 +0000 (0:00:00.051) 0:04:46.230 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:15 Thursday 21 July 2022 19:27:50 +0000 (0:00:00.035) 0:04:46.266 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:21 Thursday 21 July 2022 19:27:50 +0000 (0:00:00.041) 0:04:46.308 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:27 Thursday 21 July 2022 19:27:50 +0000 (0:00:00.036) 0:04:46.345 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 19:27:50 +0000 (0:00:00.039) 0:04:46.384 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:4 Thursday 21 July 2022 19:27:50 +0000 (0:00:00.055) 0:04:46.440 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:9 Thursday 21 July 2022 19:27:50 +0000 (0:00:00.052) 0:04:46.493 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:15 Thursday 21 July 2022 19:27:50 +0000 (0:00:00.036) 0:04:46.529 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:21 Thursday 21 July 2022 19:27:50 +0000 (0:00:00.035) 0:04:46.565 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:27 Thursday 21 July 2022 19:27:50 +0000 (0:00:00.039) 0:04:46.605 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml:22 Thursday 21 July 2022 19:27:50 +0000 (0:00:00.036) 0:04:46.642 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:62 Thursday 21 July 2022 19:27:50 +0000 (0:00:00.035) 0:04:46.678 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-vdo.yml for /cache/centos-8.qcow2 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-vdo.yml:1 Thursday 21 July 2022 19:27:50 +0000 (0:00:00.051) 0:04:46.729 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'name': 'lv2', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv2', '_raw_device': '/dev/mapper/vg1-lv2', '_mount_id': '/dev/mapper/vg1-lv2', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'}) TASK [get information about VDO deduplication] ********************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:3 Thursday 21 July 2022 19:27:50 +0000 (0:00:00.047) 0:04:46.776 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:8 Thursday 21 July 2022 19:27:50 +0000 (0:00:00.026) 0:04:46.802 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:11 Thursday 21 July 2022 19:27:50 +0000 (0:00:00.069) 0:04:46.872 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:16 Thursday 21 July 2022 19:27:50 +0000 (0:00:00.027) 0:04:46.899 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:21 Thursday 21 July 2022 19:27:50 +0000 (0:00:00.025) 0:04:46.924 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:24 Thursday 21 July 2022 19:27:50 +0000 (0:00:00.024) 0:04:46.949 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:29 Thursday 21 July 2022 19:27:50 +0000 (0:00:00.025) 0:04:46.974 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:39 Thursday 21 July 2022 19:27:50 +0000 (0:00:00.024) 0:04:46.998 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:65 Thursday 21 July 2022 19:27:50 +0000 (0:00:00.036) 0:04:47.035 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [verify the volumes] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-volumes.yml:3 Thursday 21 July 2022 19:27:50 +0000 (0:00:00.036) 0:04:47.072 ********* included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'name': 'lv2', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv2', '_raw_device': '/dev/mapper/vg1-lv2', '_mount_id': '/dev/mapper/vg1-lv2', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'}) TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume.yml:2 Thursday 21 July 2022 19:27:50 +0000 (0:00:00.047) 0:04:47.119 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [include_tasks] *********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume.yml:10 Thursday 21 July 2022 19:27:50 +0000 (0:00:00.053) 0:04:47.172 ********* included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml for /cache/centos-8.qcow2 => (item=mount) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml for /cache/centos-8.qcow2 => (item=fstab) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fs.yml for /cache/centos-8.qcow2 => (item=fs) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml for /cache/centos-8.qcow2 => (item=device) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml for /cache/centos-8.qcow2 => (item=encryption) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml for /cache/centos-8.qcow2 => (item=md) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml for /cache/centos-8.qcow2 => (item=size) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml for /cache/centos-8.qcow2 => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:6 Thursday 21 July 2022 19:27:51 +0000 (0:00:00.088) 0:04:47.261 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/vg1-lv2" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:10 Thursday 21 July 2022 19:27:51 +0000 (0:00:00.055) 0:04:47.316 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_mount_device_matches": [ { "block_available": 1030395, "block_size": 4096, "block_total": 1046016, "block_used": 15621, "device": "/dev/mapper/vg1-lv2", "fstype": "xfs", "inode_available": 2097149, "inode_total": 2097152, "inode_used": 3, "mount": "/opt/test2", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=64k,sunit=128,swidth=128,noquota", "size_available": 4220497920, "size_total": 4284481536, "uuid": "fe659719-27f3-485c-81fa-5bf5eab40f73" } ], "storage_test_mount_expected_match_count": "1", "storage_test_mount_point_matches": [ { "block_available": 1030395, "block_size": 4096, "block_total": 1046016, "block_used": 15621, "device": "/dev/mapper/vg1-lv2", "fstype": "xfs", "inode_available": 2097149, "inode_total": 2097152, "inode_used": 3, "mount": "/opt/test2", "options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=64k,sunit=128,swidth=128,noquota", "size_available": 4220497920, "size_total": 4284481536, "uuid": "fe659719-27f3-485c-81fa-5bf5eab40f73" } ], "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Verify the current mount state by device] ******************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:20 Thursday 21 July 2022 19:27:51 +0000 (0:00:00.060) 0:04:47.376 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the current mount state by mount point] *************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:29 Thursday 21 July 2022 19:27:51 +0000 (0:00:00.056) 0:04:47.433 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the mount fs type] ************************************************ task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:37 Thursday 21 July 2022 19:27:51 +0000 (0:00:00.056) 0:04:47.489 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [command] ***************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:46 Thursday 21 July 2022 19:27:51 +0000 (0:00:00.053) 0:04:47.543 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:50 Thursday 21 July 2022 19:27:51 +0000 (0:00:00.025) 0:04:47.569 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:55 Thursday 21 July 2022 19:27:51 +0000 (0:00:00.025) 0:04:47.594 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:65 Thursday 21 July 2022 19:27:51 +0000 (0:00:00.024) 0:04:47.619 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_mount_device_matches": null, "storage_test_mount_expected_match_count": null, "storage_test_mount_point_matches": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:2 Thursday 21 July 2022 19:27:51 +0000 (0:00:00.036) 0:04:47.656 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/vg1-lv2 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test2 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test2 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:12 Thursday 21 July 2022 19:27:51 +0000 (0:00:00.095) 0:04:47.751 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:19 Thursday 21 July 2022 19:27:51 +0000 (0:00:00.056) 0:04:47.807 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:25 Thursday 21 July 2022 19:27:51 +0000 (0:00:00.056) 0:04:47.864 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:34 Thursday 21 July 2022 19:27:51 +0000 (0:00:00.040) 0:04:47.904 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fs.yml:4 Thursday 21 July 2022 19:27:51 +0000 (0:00:00.041) 0:04:47.945 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fs.yml:10 Thursday 21 July 2022 19:27:51 +0000 (0:00:00.042) 0:04:47.988 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:4 Thursday 21 July 2022 19:27:51 +0000 (0:00:00.043) 0:04:48.032 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "atime": 1658431661.5230083, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1658431661.5230083, "dev": 6, "device_type": 64773, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 148702, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1658431661.5230083, "nlink": 1, "path": "/dev/mapper/vg1-lv2", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:10 Thursday 21 July 2022 19:27:52 +0000 (0:00:00.391) 0:04:48.424 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about this volume] ********************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:15 Thursday 21 July 2022 19:27:52 +0000 (0:00:00.040) 0:04:48.464 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [(1/2) Process volume type (set initial value)] *************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:21 Thursday 21 July 2022 19:27:52 +0000 (0:00:00.040) 0:04:48.505 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [(2/2) Process volume type (get RAID value)] ****************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:25 Thursday 21 July 2022 19:27:52 +0000 (0:00:00.082) 0:04:48.588 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:30 Thursday 21 July 2022 19:27:52 +0000 (0:00:00.063) 0:04:48.652 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:3 Thursday 21 July 2022 19:27:52 +0000 (0:00:00.038) 0:04:48.690 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:10 Thursday 21 July 2022 19:27:52 +0000 (0:00:00.023) 0:04:48.713 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:15 Thursday 21 July 2022 19:27:54 +0000 (0:00:01.916) 0:04:50.630 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:21 Thursday 21 July 2022 19:27:54 +0000 (0:00:00.026) 0:04:50.656 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:27 Thursday 21 July 2022 19:27:54 +0000 (0:00:00.026) 0:04:50.683 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:33 Thursday 21 July 2022 19:27:54 +0000 (0:00:00.055) 0:04:50.739 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:39 Thursday 21 July 2022 19:27:54 +0000 (0:00:00.028) 0:04:50.768 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:44 Thursday 21 July 2022 19:27:54 +0000 (0:00:00.028) 0:04:50.796 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:50 Thursday 21 July 2022 19:27:54 +0000 (0:00:00.027) 0:04:50.824 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:56 Thursday 21 July 2022 19:27:54 +0000 (0:00:00.024) 0:04:50.849 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:62 Thursday 21 July 2022 19:27:54 +0000 (0:00:00.025) 0:04:50.874 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:67 Thursday 21 July 2022 19:27:54 +0000 (0:00:00.054) 0:04:50.929 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:72 Thursday 21 July 2022 19:27:54 +0000 (0:00:00.053) 0:04:50.982 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:78 Thursday 21 July 2022 19:27:54 +0000 (0:00:00.041) 0:04:51.023 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:84 Thursday 21 July 2022 19:27:54 +0000 (0:00:00.043) 0:04:51.067 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:90 Thursday 21 July 2022 19:27:54 +0000 (0:00:00.041) 0:04:51.108 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [get information about RAID] ********************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:7 Thursday 21 July 2022 19:27:54 +0000 (0:00:00.040) 0:04:51.149 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:13 Thursday 21 July 2022 19:27:54 +0000 (0:00:00.040) 0:04:51.190 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:17 Thursday 21 July 2022 19:27:55 +0000 (0:00:00.042) 0:04:51.232 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:21 Thursday 21 July 2022 19:27:55 +0000 (0:00:00.042) 0:04:51.275 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID active devices count] ***************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:25 Thursday 21 July 2022 19:27:55 +0000 (0:00:00.040) 0:04:51.315 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID spare devices count] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:31 Thursday 21 July 2022 19:27:55 +0000 (0:00:00.037) 0:04:51.353 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID metadata version] ********************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:37 Thursday 21 July 2022 19:27:55 +0000 (0:00:00.042) 0:04:51.395 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the actual size of the volume] ************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:3 Thursday 21 July 2022 19:27:55 +0000 (0:00:00.040) 0:04:51.436 ********* ok: [/cache/centos-8.qcow2] => { "bytes": 4294967296, "changed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } TASK [parse the requested size of the volume] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:9 Thursday 21 July 2022 19:27:55 +0000 (0:00:00.392) 0:04:51.828 ********* ok: [/cache/centos-8.qcow2] => { "bytes": 4294967296, "changed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } TASK [Establish base value for expected size] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:15 Thursday 21 July 2022 19:27:56 +0000 (0:00:00.392) 0:04:52.220 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_expected_size": "4294967296" }, "changed": false } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:20 Thursday 21 July 2022 19:27:56 +0000 (0:00:00.095) 0:04:52.316 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_expected_size": "4294967296" } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:25 Thursday 21 July 2022 19:27:56 +0000 (0:00:00.036) 0:04:52.352 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:28 Thursday 21 July 2022 19:27:56 +0000 (0:00:00.039) 0:04:52.392 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Get the size of parent/pool device] ************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:31 Thursday 21 July 2022 19:27:56 +0000 (0:00:00.035) 0:04:52.428 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:36 Thursday 21 July 2022 19:27:56 +0000 (0:00:00.036) 0:04:52.465 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:39 Thursday 21 July 2022 19:27:56 +0000 (0:00:00.036) 0:04:52.502 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:44 Thursday 21 July 2022 19:27:56 +0000 (0:00:00.040) 0:04:52.542 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_actual_size": { "bytes": 4294967296, "changed": false, "failed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:47 Thursday 21 July 2022 19:27:56 +0000 (0:00:00.036) 0:04:52.579 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_expected_size": "4294967296" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:50 Thursday 21 July 2022 19:27:56 +0000 (0:00:00.081) 0:04:52.660 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:6 Thursday 21 July 2022 19:27:56 +0000 (0:00:00.057) 0:04:52.717 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "vg1/lv2" ], "delta": "0:00:00.039297", "end": "2022-07-21 19:27:56.601317", "rc": 0, "start": "2022-07-21 19:27:56.562020" } STDOUT: LVM2_LV_NAME=lv2 LVM2_LV_ATTR=Vwi-aotz-- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=thin TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:14 Thursday 21 July 2022 19:27:56 +0000 (0:00:00.425) 0:04:53.143 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_lv_segtype": [ "thin" ] }, "changed": false } TASK [check segment type] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:17 Thursday 21 July 2022 19:27:57 +0000 (0:00:00.127) 0:04:53.270 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:22 Thursday 21 July 2022 19:27:57 +0000 (0:00:00.050) 0:04:53.320 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the requested cache size] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:26 Thursday 21 July 2022 19:27:57 +0000 (0:00:00.037) 0:04:53.358 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:32 Thursday 21 July 2022 19:27:57 +0000 (0:00:00.040) 0:04:53.399 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:36 Thursday 21 July 2022 19:27:57 +0000 (0:00:00.045) 0:04:53.444 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume.yml:16 Thursday 21 July 2022 19:27:57 +0000 (0:00:00.040) 0:04:53.485 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:43 Thursday 21 July 2022 19:27:57 +0000 (0:00:00.038) 0:04:53.523 ********* TASK [Clean up variable namespace] ********************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:53 Thursday 21 July 2022 19:27:57 +0000 (0:00:00.023) 0:04:53.547 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Remove existing LV under existing thinpool] ****************************** task path: /tmp/tmp_0pjp8ed/tests/storage/tests_create_thinp_then_remove.yml:91 Thursday 21 July 2022 19:27:57 +0000 (0:00:00.038) 0:04:53.586 ********* TASK [fedora.linux_system_roles.storage : set platform/version specific variables] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Thursday 21 July 2022 19:27:57 +0000 (0:00:00.065) 0:04:53.652 ********* included: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for /cache/centos-8.qcow2 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Thursday 21 July 2022 19:27:57 +0000 (0:00:00.036) 0:04:53.688 ********* ok: [/cache/centos-8.qcow2] TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Thursday 21 July 2022 19:27:58 +0000 (0:00:00.514) 0:04:54.203 ********* skipping: [/cache/centos-8.qcow2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.storage : define an empty list of pools to be used in testing] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Thursday 21 July 2022 19:27:58 +0000 (0:00:00.079) 0:04:54.282 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : define an empty list of volumes to be used in testing] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Thursday 21 July 2022 19:27:58 +0000 (0:00:00.037) 0:04:54.320 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : include the appropriate provider tasks] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Thursday 21 July 2022 19:27:58 +0000 (0:00:00.038) 0:04:54.359 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for /cache/centos-8.qcow2 TASK [fedora.linux_system_roles.storage : get a list of rpm packages installed on host machine] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Thursday 21 July 2022 19:27:58 +0000 (0:00:00.059) 0:04:54.418 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : make sure blivet is available] ******* task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:7 Thursday 21 July 2022 19:27:58 +0000 (0:00:00.024) 0:04:54.443 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.storage : show storage_pools] ****************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:13 Thursday 21 July 2022 19:28:00 +0000 (0:00:01.844) 0:04:56.287 ********* ok: [/cache/centos-8.qcow2] => { "storage_pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "name": "vg1", "type": "lvm", "volumes": [ { "mount_point": "/opt/test2", "name": "lv2", "state": "absent", "thin": true, "thin_pool_name": "tpool1" } ] } ] } TASK [fedora.linux_system_roles.storage : show storage_volumes] **************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:18 Thursday 21 July 2022 19:28:00 +0000 (0:00:00.083) 0:04:56.371 ********* ok: [/cache/centos-8.qcow2] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [fedora.linux_system_roles.storage : get required packages] *************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:23 Thursday 21 July 2022 19:28:00 +0000 (0:00:00.037) 0:04:56.409 ********* ok: [/cache/centos-8.qcow2] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "lvm2" ], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : enable copr repositories if needed] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:35 Thursday 21 July 2022 19:28:02 +0000 (0:00:02.192) 0:04:58.601 ********* included: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for /cache/centos-8.qcow2 TASK [fedora.linux_system_roles.storage : check if the COPR support packages should be installed] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Thursday 21 July 2022 19:28:02 +0000 (0:00:00.051) 0:04:58.652 ********* TASK [fedora.linux_system_roles.storage : make sure COPR support packages are present] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Thursday 21 July 2022 19:28:02 +0000 (0:00:00.039) 0:04:58.692 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : enable COPRs] ************************ task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:18 Thursday 21 July 2022 19:28:02 +0000 (0:00:00.041) 0:04:58.734 ********* TASK [fedora.linux_system_roles.storage : make sure required packages are installed] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:41 Thursday 21 July 2022 19:28:02 +0000 (0:00:00.038) 0:04:58.772 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.storage : get service facts] ******************* task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:47 Thursday 21 July 2022 19:28:04 +0000 (0:00:01.815) 0:05:00.588 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cockpit-motd.service": { "name": "cockpit-motd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-http.service": { "name": "cockpit-wsinstance-http.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-https-factory@.service": { "name": "cockpit-wsinstance-https-factory@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit-wsinstance-https@.service": { "name": "cockpit-wsinstance-https@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit.service": { "name": "cockpit.service", "source": "systemd", "state": "inactive", "status": "static" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "running", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "lvm2-pvscan@259:4.service": { "name": "lvm2-pvscan@259:4.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@259:5.service": { "name": "lvm2-pvscan@259:5.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@259:6.service": { "name": "lvm2-pvscan@259:6.service", "source": "systemd", "state": "stopped", "status": "active" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "packagekit-offline-update.service": { "name": "packagekit-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "packagekit.service": { "name": "packagekit.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "rngd-wake-threshold.service": { "name": "rngd-wake-threshold.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "vdo-start-by-dev@.service": { "name": "vdo-start-by-dev@.service", "source": "systemd", "state": "unknown", "status": "static" }, "vdo.service": { "name": "vdo.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:53 Thursday 21 July 2022 19:28:05 +0000 (0:00:01.553) 0:05:02.142 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Thursday 21 July 2022 19:28:06 +0000 (0:00:00.061) 0:05:02.203 ********* TASK [fedora.linux_system_roles.storage : manage the pools and volumes to match the specified state] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Thursday 21 July 2022 19:28:06 +0000 (0:00:00.023) 0:05:02.227 ********* changed: [/cache/centos-8.qcow2] => { "actions": [ { "action": "destroy format", "device": "/dev/mapper/vg1-lv2", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/vg1-lv2", "fs_type": null } ], "changed": true, "crypts": [], "leaves": [ "/dev/sr0", "/dev/vda1", "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/mapper/vg1-lv1", "/dev/vdb", "/dev/vdc", "/dev/vdd" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test2", "src": "/dev/mapper/vg1-lv2", "state": "absent" } ], "packages": [ "lvm2", "xfsprogs" ], "pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv2", "_mount_id": "/dev/mapper/vg1-lv2", "_raw_device": "/dev/mapper/vg1-lv2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "name": "lv2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": 4294967296, "state": "absent", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:78 Thursday 21 July 2022 19:28:08 +0000 (0:00:02.530) 0:05:04.758 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90 Thursday 21 July 2022 19:28:08 +0000 (0:00:00.039) 0:05:04.798 ********* TASK [fedora.linux_system_roles.storage : show blivet_output] ****************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:96 Thursday 21 July 2022 19:28:08 +0000 (0:00:00.024) 0:05:04.822 ********* ok: [/cache/centos-8.qcow2] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/mapper/vg1-lv2", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/vg1-lv2", "fs_type": null } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sr0", "/dev/vda1", "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/mapper/vg1-lv1", "/dev/vdb", "/dev/vdc", "/dev/vdd" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test2", "src": "/dev/mapper/vg1-lv2", "state": "absent" } ], "packages": [ "lvm2", "xfsprogs" ], "pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv2", "_mount_id": "/dev/mapper/vg1-lv2", "_raw_device": "/dev/mapper/vg1-lv2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "name": "lv2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": 4294967296, "state": "absent", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : set the list of pools for test verification] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:101 Thursday 21 July 2022 19:28:08 +0000 (0:00:00.045) 0:05:04.867 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv2", "_mount_id": "/dev/mapper/vg1-lv2", "_raw_device": "/dev/mapper/vg1-lv2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "name": "lv2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": 4294967296, "state": "absent", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : set the list of volumes for test verification] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:105 Thursday 21 July 2022 19:28:08 +0000 (0:00:00.040) 0:05:04.908 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : remove obsolete mounts] ************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Thursday 21 July 2022 19:28:08 +0000 (0:00:00.040) 0:05:04.949 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount changed: [/cache/centos-8.qcow2] => (item={'src': '/dev/mapper/vg1-lv2', 'path': '/opt/test2', 'state': 'absent', 'fstype': 'xfs'}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "fstype": "xfs", "path": "/opt/test2", "src": "/dev/mapper/vg1-lv2", "state": "absent" }, "name": "/opt/test2", "opts": "defaults", "passno": "0", "src": "/dev/mapper/vg1-lv2" } TASK [fedora.linux_system_roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132 Thursday 21 July 2022 19:28:09 +0000 (0:00:00.412) 0:05:05.361 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : set up new/current mounts] *********** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:137 Thursday 21 July 2022 19:28:09 +0000 (0:00:00.621) 0:05:05.982 ********* TASK [fedora.linux_system_roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148 Thursday 21 July 2022 19:28:09 +0000 (0:00:00.044) 0:05:06.027 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : retrieve facts for the /etc/crypttab file] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:156 Thursday 21 July 2022 19:28:10 +0000 (0:00:00.645) 0:05:06.672 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "atime": 1658431449.4320083, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1643141385.117, "dev": 64513, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 135, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1643141019.537, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "3147672035", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : manage /etc/crypttab to account for changes we just made] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Thursday 21 July 2022 19:28:10 +0000 (0:00:00.428) 0:05:07.101 ********* TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:183 Thursday 21 July 2022 19:28:10 +0000 (0:00:00.026) 0:05:07.127 ********* ok: [/cache/centos-8.qcow2] META: role_complete for /cache/centos-8.qcow2 TASK [include_tasks] *********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/tests_create_thinp_then_remove.yml:106 Thursday 21 July 2022 19:28:11 +0000 (0:00:01.041) 0:05:08.168 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml for /cache/centos-8.qcow2 TASK [Print out pool information] ********************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:1 Thursday 21 July 2022 19:28:12 +0000 (0:00:00.054) 0:05:08.222 ********* ok: [/cache/centos-8.qcow2] => { "_storage_pools_list": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv2", "_mount_id": "/dev/mapper/vg1-lv2", "_raw_device": "/dev/mapper/vg1-lv2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "name": "lv2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": 4294967296, "state": "absent", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:6 Thursday 21 July 2022 19:28:12 +0000 (0:00:00.054) 0:05:08.277 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:14 Thursday 21 July 2022 19:28:12 +0000 (0:00:00.039) 0:05:08.316 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "info": { "/dev/mapper/vg1-lv1": { "fstype": "xfs", "label": "", "name": "/dev/mapper/vg1-lv1", "size": "3G", "type": "lvm", "uuid": "1ea60a5c-f51b-4cc6-9e7d-084fe537536f" }, "/dev/mapper/vg1-tpool1": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1-tpool": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1-tpool", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1_tdata": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1_tdata", "size": "10G", "type": "lvm", "uuid": "" }, "/dev/mapper/vg1-tpool1_tmeta": { "fstype": "", "label": "", "name": "/dev/mapper/vg1-tpool1_tmeta", "size": "12M", "type": "lvm", "uuid": "" }, "/dev/nvme0n1": { "fstype": "", "label": "", "name": "/dev/nvme0n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme0n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme0n1p1", "size": "10G", "type": "partition", "uuid": "ken164-erkD-vf0C-nvLO-nhmh-YBzF-bzv8kZ" }, "/dev/nvme1n1": { "fstype": "", "label": "", "name": "/dev/nvme1n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme1n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme1n1p1", "size": "10G", "type": "partition", "uuid": "eBnMVU-t1UJ-ttot-SKPn-js6i-mJjN-e9h7dx" }, "/dev/nvme2n1": { "fstype": "", "label": "", "name": "/dev/nvme2n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme2n1p1": { "fstype": "LVM2_member", "label": "", "name": "/dev/nvme2n1p1", "size": "10G", "type": "partition", "uuid": "xZT8wS-OKti-fe7H-wOMe-VpTY-S1Yx-RZZXxW" }, "/dev/sda": { "fstype": "", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sr0": { "fstype": "iso9660", "label": "cidata", "name": "/dev/sr0", "size": "364K", "type": "rom", "uuid": "2022-07-21-19-22-43-00" }, "/dev/vda": { "fstype": "", "label": "", "name": "/dev/vda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vda1": { "fstype": "xfs", "label": "", "name": "/dev/vda1", "size": "10G", "type": "partition", "uuid": "395b9844-e404-4857-afbb-c6edccaf72f3" }, "/dev/vdb": { "fstype": "", "label": "", "name": "/dev/vdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vdc": { "fstype": "", "label": "", "name": "/dev/vdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vdd": { "fstype": "", "label": "", "name": "/dev/vdd", "size": "10G", "type": "disk", "uuid": "" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:19 Thursday 21 July 2022 19:28:12 +0000 (0:00:00.382) 0:05:08.699 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002671", "end": "2022-07-21 19:28:12.524946", "rc": 0, "start": "2022-07-21 19:28:12.522275" } STDOUT: # # /etc/fstab # Created by anaconda on Tue Jan 25 20:03:39 2022 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=395b9844-e404-4857-afbb-c6edccaf72f3 / xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:24 Thursday 21 July 2022 19:28:12 +0000 (0:00:00.364) 0:05:09.064 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002601", "end": "2022-07-21 19:28:12.892883", "failed_when_result": false, "rc": 0, "start": "2022-07-21 19:28:12.890282" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:33 Thursday 21 July 2022 19:28:13 +0000 (0:00:00.372) 0:05:09.436 ********* included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool.yml for /cache/centos-8.qcow2 => (item={'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'name': 'vg1', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'name': 'lv2', 'raid_level': None, 'size': 4294967296, 'state': 'absent', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv2', '_raw_device': '/dev/mapper/vg1-lv2', '_mount_id': '/dev/mapper/vg1-lv2'}], 'raid_chunk_size': None}) TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool.yml:5 Thursday 21 July 2022 19:28:13 +0000 (0:00:00.057) 0:05:09.494 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [include_tasks] *********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool.yml:18 Thursday 21 July 2022 19:28:13 +0000 (0:00:00.033) 0:05:09.528 ********* included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml for /cache/centos-8.qcow2 => (item=members) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-volumes.yml for /cache/centos-8.qcow2 => (item=volumes) TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:1 Thursday 21 July 2022 19:28:13 +0000 (0:00:00.046) 0:05:09.574 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_count": "3", "_storage_test_pool_pvs_lvm": [ "/dev/nvme0n1p1", "/dev/nvme1n1p1", "/dev/nvme2n1p1" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:6 Thursday 21 July 2022 19:28:13 +0000 (0:00:00.053) 0:05:09.628 ********* ok: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme0n1p1", "pv": "/dev/nvme0n1p1" } ok: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme1n1p1", "pv": "/dev/nvme1n1p1" } ok: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/nvme2n1p1", "pv": "/dev/nvme2n1p1" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:15 Thursday 21 July 2022 19:28:14 +0000 (0:00:01.094) 0:05:10.723 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "__pvs_lvm_len": "3" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:19 Thursday 21 July 2022 19:28:14 +0000 (0:00:00.079) 0:05:10.802 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/nvme0n1p1", "/dev/nvme1n1p1", "/dev/nvme2n1p1" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:23 Thursday 21 July 2022 19:28:14 +0000 (0:00:00.082) 0:05:10.884 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:29 Thursday 21 July 2022 19:28:14 +0000 (0:00:00.083) 0:05:10.967 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:33 Thursday 21 July 2022 19:28:14 +0000 (0:00:00.036) 0:05:11.004 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_type": "partition" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:37 Thursday 21 July 2022 19:28:14 +0000 (0:00:00.054) 0:05:11.059 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:41 Thursday 21 July 2022 19:28:14 +0000 (0:00:00.027) 0:05:11.086 ********* ok: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme0n1p1" } MSG: All assertions passed ok: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme1n1p1" } MSG: All assertions passed ok: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/nvme2n1p1" } MSG: All assertions passed TASK [Check MD RAID] *********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:50 Thursday 21 July 2022 19:28:14 +0000 (0:00:00.075) 0:05:11.161 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml for /cache/centos-8.qcow2 TASK [get information about RAID] ********************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:6 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.042) 0:05:11.204 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:12 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.023) 0:05:11.228 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:16 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.023) 0:05:11.251 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:20 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.026) 0:05:11.278 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID active devices count] ***************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:24 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.027) 0:05:11.305 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID spare devices count] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:30 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.026) 0:05:11.331 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID metadata version] ********************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:36 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.026) 0:05:11.358 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:44 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.023) 0:05:11.382 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:53 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.036) 0:05:11.419 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-lvmraid.yml for /cache/centos-8.qcow2 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-lvmraid.yml:1 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.043) 0:05:11.462 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-lvmraid.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'name': 'lv2', 'raid_level': None, 'size': 4294967296, 'state': 'absent', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv2', '_raw_device': '/dev/mapper/vg1-lv2', '_mount_id': '/dev/mapper/vg1-lv2'}) TASK [Get information about LVM RAID] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-lvmraid.yml:3 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.046) 0:05:11.508 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is LVM RAID] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-lvmraid.yml:8 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.028) 0:05:11.537 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-lvmraid.yml:12 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.028) 0:05:11.566 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:56 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.031) 0:05:11.597 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-thin.yml for /cache/centos-8.qcow2 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-thin.yml:1 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.047) 0:05:11.645 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'name': 'lv2', 'raid_level': None, 'size': 4294967296, 'state': 'absent', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv2', '_raw_device': '/dev/mapper/vg1-lv2', '_mount_id': '/dev/mapper/vg1-lv2'}) TASK [Get information about thinpool] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml:3 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.044) 0:05:11.689 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml:8 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.032) 0:05:11.722 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml:13 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.029) 0:05:11.752 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml:17 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.027) 0:05:11.780 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check member encryption] ************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:59 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.027) 0:05:11.807 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml for /cache/centos-8.qcow2 TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml:4 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.048) 0:05:11.856 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml:8 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.049) 0:05:11.906 ********* skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme0n1p1) => { "_storage_test_pool_member_path": "/dev/nvme0n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme1n1p1) => { "_storage_test_pool_member_path": "/dev/nvme1n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=/dev/nvme2n1p1) => { "_storage_test_pool_member_path": "/dev/nvme2n1p1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml:15 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.031) 0:05:11.937 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme0n1p1) included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme1n1p1) included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml for /cache/centos-8.qcow2 => (item=/dev/nvme2n1p1) TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.051) 0:05:11.988 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:4 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.050) 0:05:12.039 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:9 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.085) 0:05:12.124 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:15 Thursday 21 July 2022 19:28:15 +0000 (0:00:00.036) 0:05:12.161 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:21 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.036) 0:05:12.197 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:27 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.072) 0:05:12.269 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.037) 0:05:12.307 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:4 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.051) 0:05:12.359 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:9 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.053) 0:05:12.412 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:15 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.036) 0:05:12.449 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:21 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.038) 0:05:12.487 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:27 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.039) 0:05:12.527 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:1 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.040) 0:05:12.567 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:4 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.051) 0:05:12.619 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:9 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.051) 0:05:12.671 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:15 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.041) 0:05:12.712 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:21 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.038) 0:05:12.750 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-crypttab.yml:27 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.036) 0:05:12.787 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml:22 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.038) 0:05:12.825 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:62 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.038) 0:05:12.864 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-vdo.yml for /cache/centos-8.qcow2 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-vdo.yml:1 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.049) 0:05:12.913 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'name': 'lv2', 'raid_level': None, 'size': 4294967296, 'state': 'absent', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv2', '_raw_device': '/dev/mapper/vg1-lv2', '_mount_id': '/dev/mapper/vg1-lv2'}) TASK [get information about VDO deduplication] ********************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:3 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.048) 0:05:12.961 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:8 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.025) 0:05:12.987 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:11 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.026) 0:05:13.014 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:16 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.025) 0:05:13.039 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:21 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.025) 0:05:13.065 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:24 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.023) 0:05:13.088 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:29 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.024) 0:05:13.113 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:39 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.026) 0:05:13.139 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:65 Thursday 21 July 2022 19:28:16 +0000 (0:00:00.036) 0:05:13.176 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [verify the volumes] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-volumes.yml:3 Thursday 21 July 2022 19:28:17 +0000 (0:00:00.036) 0:05:13.212 ********* included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'name': 'lv2', 'raid_level': None, 'size': 4294967296, 'state': 'absent', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': None, 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv2', '_raw_device': '/dev/mapper/vg1-lv2', '_mount_id': '/dev/mapper/vg1-lv2'}) TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume.yml:2 Thursday 21 July 2022 19:28:17 +0000 (0:00:00.043) 0:05:13.256 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_volume_present": false, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [include_tasks] *********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume.yml:10 Thursday 21 July 2022 19:28:17 +0000 (0:00:00.050) 0:05:13.307 ********* included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml for /cache/centos-8.qcow2 => (item=mount) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml for /cache/centos-8.qcow2 => (item=fstab) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fs.yml for /cache/centos-8.qcow2 => (item=fs) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml for /cache/centos-8.qcow2 => (item=device) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml for /cache/centos-8.qcow2 => (item=encryption) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml for /cache/centos-8.qcow2 => (item=md) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml for /cache/centos-8.qcow2 => (item=size) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml for /cache/centos-8.qcow2 => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:6 Thursday 21 July 2022 19:28:17 +0000 (0:00:00.078) 0:05:13.386 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/vg1-lv2" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:10 Thursday 21 July 2022 19:28:17 +0000 (0:00:00.042) 0:05:13.428 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_mount_device_matches": [], "storage_test_mount_expected_match_count": "0", "storage_test_mount_point_matches": [], "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Verify the current mount state by device] ******************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:20 Thursday 21 July 2022 19:28:17 +0000 (0:00:00.057) 0:05:13.485 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by mount point] *************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:29 Thursday 21 July 2022 19:28:17 +0000 (0:00:00.026) 0:05:13.512 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the mount fs type] ************************************************ task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:37 Thursday 21 July 2022 19:28:17 +0000 (0:00:00.094) 0:05:13.607 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [command] ***************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:46 Thursday 21 July 2022 19:28:17 +0000 (0:00:00.037) 0:05:13.644 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:50 Thursday 21 July 2022 19:28:17 +0000 (0:00:00.058) 0:05:13.703 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:55 Thursday 21 July 2022 19:28:17 +0000 (0:00:00.024) 0:05:13.727 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:65 Thursday 21 July 2022 19:28:17 +0000 (0:00:00.022) 0:05:13.750 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_mount_device_matches": null, "storage_test_mount_expected_match_count": null, "storage_test_mount_point_matches": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:2 Thursday 21 July 2022 19:28:17 +0000 (0:00:00.038) 0:05:13.789 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "0", "storage_test_fstab_expected_mount_options_matches": "0", "storage_test_fstab_expected_mount_point_matches": "0", "storage_test_fstab_id_matches": [], "storage_test_fstab_mount_options_matches": [], "storage_test_fstab_mount_point_matches": [] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:12 Thursday 21 July 2022 19:28:17 +0000 (0:00:00.063) 0:05:13.852 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the fstab mount point] ******************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:19 Thursday 21 July 2022 19:28:17 +0000 (0:00:00.023) 0:05:13.876 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:25 Thursday 21 July 2022 19:28:17 +0000 (0:00:00.061) 0:05:13.937 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:34 Thursday 21 July 2022 19:28:17 +0000 (0:00:00.043) 0:05:13.981 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fs.yml:4 Thursday 21 July 2022 19:28:17 +0000 (0:00:00.041) 0:05:14.022 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fs label] ********************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fs.yml:10 Thursday 21 July 2022 19:28:17 +0000 (0:00:00.026) 0:05:14.048 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [See whether the device node is present] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:4 Thursday 21 July 2022 19:28:17 +0000 (0:00:00.026) 0:05:14.075 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "exists": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:10 Thursday 21 July 2022 19:28:18 +0000 (0:00:00.386) 0:05:14.461 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about this volume] ********************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:15 Thursday 21 July 2022 19:28:18 +0000 (0:00:00.041) 0:05:14.502 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [(1/2) Process volume type (set initial value)] *************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:21 Thursday 21 July 2022 19:28:18 +0000 (0:00:00.027) 0:05:14.529 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [(2/2) Process volume type (get RAID value)] ****************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:25 Thursday 21 July 2022 19:28:18 +0000 (0:00:00.041) 0:05:14.571 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:30 Thursday 21 July 2022 19:28:18 +0000 (0:00:00.028) 0:05:14.599 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:3 Thursday 21 July 2022 19:28:18 +0000 (0:00:00.025) 0:05:14.624 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:10 Thursday 21 July 2022 19:28:18 +0000 (0:00:00.025) 0:05:14.650 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:15 Thursday 21 July 2022 19:28:20 +0000 (0:00:01.883) 0:05:16.533 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:21 Thursday 21 July 2022 19:28:20 +0000 (0:00:00.027) 0:05:16.560 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:27 Thursday 21 July 2022 19:28:20 +0000 (0:00:00.026) 0:05:16.586 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:33 Thursday 21 July 2022 19:28:20 +0000 (0:00:00.024) 0:05:16.611 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:39 Thursday 21 July 2022 19:28:20 +0000 (0:00:00.025) 0:05:16.637 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:44 Thursday 21 July 2022 19:28:20 +0000 (0:00:00.027) 0:05:16.664 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:50 Thursday 21 July 2022 19:28:20 +0000 (0:00:00.027) 0:05:16.691 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:56 Thursday 21 July 2022 19:28:20 +0000 (0:00:00.026) 0:05:16.718 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:62 Thursday 21 July 2022 19:28:20 +0000 (0:00:00.027) 0:05:16.745 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:67 Thursday 21 July 2022 19:28:20 +0000 (0:00:00.051) 0:05:16.797 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:72 Thursday 21 July 2022 19:28:20 +0000 (0:00:00.098) 0:05:16.895 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:78 Thursday 21 July 2022 19:28:20 +0000 (0:00:00.037) 0:05:16.933 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:84 Thursday 21 July 2022 19:28:20 +0000 (0:00:00.035) 0:05:16.969 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:90 Thursday 21 July 2022 19:28:20 +0000 (0:00:00.035) 0:05:17.005 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [get information about RAID] ********************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:7 Thursday 21 July 2022 19:28:20 +0000 (0:00:00.078) 0:05:17.083 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:13 Thursday 21 July 2022 19:28:20 +0000 (0:00:00.042) 0:05:17.126 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:17 Thursday 21 July 2022 19:28:20 +0000 (0:00:00.042) 0:05:17.169 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:21 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.039) 0:05:17.209 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID active devices count] ***************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:25 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.038) 0:05:17.247 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID spare devices count] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:31 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.040) 0:05:17.288 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID metadata version] ********************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:37 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.036) 0:05:17.324 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the actual size of the volume] ************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:3 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.039) 0:05:17.364 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the requested size of the volume] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:9 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.024) 0:05:17.388 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:15 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.038) 0:05:17.427 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:20 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.040) 0:05:17.467 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_expected_size": "4294967296" } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:25 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.096) 0:05:17.564 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:28 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.077) 0:05:17.641 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Get the size of parent/pool device] ************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:31 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.037) 0:05:17.679 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:36 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.035) 0:05:17.715 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:39 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.034) 0:05:17.749 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:44 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.037) 0:05:17.787 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_actual_size": { "changed": false, "skip_reason": "Conditional result was False", "skipped": true } } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:47 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.036) 0:05:17.823 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_expected_size": "4294967296" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:50 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.038) 0:05:17.861 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:6 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.025) 0:05:17.887 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:14 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.028) 0:05:17.915 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check segment type] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:17 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.026) 0:05:17.941 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:22 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.026) 0:05:17.967 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the requested cache size] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:26 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.025) 0:05:17.993 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:32 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.025) 0:05:18.018 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:36 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.025) 0:05:18.044 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume.yml:16 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.026) 0:05:18.070 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:43 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.038) 0:05:18.109 ********* TASK [Clean up variable namespace] ********************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:53 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.026) 0:05:18.135 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Cleanup] ***************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/tests_create_thinp_then_remove.yml:108 Thursday 21 July 2022 19:28:21 +0000 (0:00:00.040) 0:05:18.175 ********* TASK [fedora.linux_system_roles.storage : set platform/version specific variables] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Thursday 21 July 2022 19:28:22 +0000 (0:00:00.083) 0:05:18.258 ********* included: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for /cache/centos-8.qcow2 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Thursday 21 July 2022 19:28:22 +0000 (0:00:00.037) 0:05:18.296 ********* ok: [/cache/centos-8.qcow2] TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Thursday 21 July 2022 19:28:22 +0000 (0:00:00.558) 0:05:18.855 ********* skipping: [/cache/centos-8.qcow2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [/cache/centos-8.qcow2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [/cache/centos-8.qcow2] => (item=CentOS_8.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs" ] }, "ansible_included_var_files": [ "/tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.storage : define an empty list of pools to be used in testing] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Thursday 21 July 2022 19:28:22 +0000 (0:00:00.077) 0:05:18.932 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : define an empty list of volumes to be used in testing] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Thursday 21 July 2022 19:28:22 +0000 (0:00:00.035) 0:05:18.968 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : include the appropriate provider tasks] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Thursday 21 July 2022 19:28:22 +0000 (0:00:00.041) 0:05:19.009 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for /cache/centos-8.qcow2 TASK [fedora.linux_system_roles.storage : get a list of rpm packages installed on host machine] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Thursday 21 July 2022 19:28:22 +0000 (0:00:00.060) 0:05:19.070 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : make sure blivet is available] ******* task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:7 Thursday 21 July 2022 19:28:22 +0000 (0:00:00.022) 0:05:19.093 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.storage : show storage_pools] ****************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:13 Thursday 21 July 2022 19:28:24 +0000 (0:00:01.903) 0:05:20.996 ********* ok: [/cache/centos-8.qcow2] => { "storage_pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "name": "vg1", "state": "absent", "type": "lvm", "volumes": [ { "mount_point": "/opt/test1", "name": "lv1", "size": "3g", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g" } ] } ] } TASK [fedora.linux_system_roles.storage : show storage_volumes] **************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:18 Thursday 21 July 2022 19:28:24 +0000 (0:00:00.039) 0:05:21.036 ********* ok: [/cache/centos-8.qcow2] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [fedora.linux_system_roles.storage : get required packages] *************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:23 Thursday 21 July 2022 19:28:24 +0000 (0:00:00.035) 0:05:21.071 ********* ok: [/cache/centos-8.qcow2] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : enable copr repositories if needed] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:35 Thursday 21 July 2022 19:28:26 +0000 (0:00:01.875) 0:05:22.946 ********* included: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for /cache/centos-8.qcow2 TASK [fedora.linux_system_roles.storage : check if the COPR support packages should be installed] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Thursday 21 July 2022 19:28:26 +0000 (0:00:00.051) 0:05:22.998 ********* TASK [fedora.linux_system_roles.storage : make sure COPR support packages are present] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Thursday 21 July 2022 19:28:26 +0000 (0:00:00.036) 0:05:23.035 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : enable COPRs] ************************ task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:18 Thursday 21 July 2022 19:28:26 +0000 (0:00:00.038) 0:05:23.074 ********* TASK [fedora.linux_system_roles.storage : make sure required packages are installed] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:41 Thursday 21 July 2022 19:28:26 +0000 (0:00:00.035) 0:05:23.109 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.storage : get service facts] ******************* task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:47 Thursday 21 July 2022 19:28:28 +0000 (0:00:01.891) 0:05:25.001 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cockpit-motd.service": { "name": "cockpit-motd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-http.service": { "name": "cockpit-wsinstance-http.service", "source": "systemd", "state": "inactive", "status": "static" }, "cockpit-wsinstance-https-factory@.service": { "name": "cockpit-wsinstance-https-factory@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit-wsinstance-https@.service": { "name": "cockpit-wsinstance-https@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cockpit.service": { "name": "cockpit.service", "source": "systemd", "state": "inactive", "status": "static" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "running", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "lvm2-pvscan@259:4.service": { "name": "lvm2-pvscan@259:4.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@259:5.service": { "name": "lvm2-pvscan@259:5.service", "source": "systemd", "state": "stopped", "status": "active" }, "lvm2-pvscan@259:6.service": { "name": "lvm2-pvscan@259:6.service", "source": "systemd", "state": "stopped", "status": "active" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "packagekit-offline-update.service": { "name": "packagekit-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "packagekit.service": { "name": "packagekit.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "rngd-wake-threshold.service": { "name": "rngd-wake-threshold.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "vdo-start-by-dev@.service": { "name": "vdo-start-by-dev@.service", "source": "systemd", "state": "unknown", "status": "static" }, "vdo.service": { "name": "vdo.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:53 Thursday 21 July 2022 19:28:30 +0000 (0:00:01.560) 0:05:26.561 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Thursday 21 July 2022 19:28:30 +0000 (0:00:00.060) 0:05:26.621 ********* TASK [fedora.linux_system_roles.storage : manage the pools and volumes to match the specified state] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Thursday 21 July 2022 19:28:30 +0000 (0:00:00.021) 0:05:26.643 ********* changed: [/cache/centos-8.qcow2] => { "actions": [ { "action": "destroy format", "device": "/dev/mapper/vg1-lv1", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/vg1-lv1", "fs_type": null }, { "action": "destroy device", "device": "/dev/mapper/vg1-tpool1", "fs_type": null }, { "action": "destroy device", "device": "/dev/vg1", "fs_type": null }, { "action": "destroy format", "device": "/dev/nvme0n1p1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/nvme0n1p1", "fs_type": null }, { "action": "destroy format", "device": "/dev/nvme0n1", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/nvme1n1p1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/nvme1n1p1", "fs_type": null }, { "action": "destroy format", "device": "/dev/nvme1n1", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/nvme2n1p1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/nvme2n1p1", "fs_type": null }, { "action": "destroy format", "device": "/dev/nvme2n1", "fs_type": "disklabel" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sr0", "/dev/vda1", "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/nvme0n1", "/dev/nvme1n1", "/dev/nvme2n1", "/dev/vdb", "/dev/vdc", "/dev/vdd" ], "mounts": [], "packages": [ "xfsprogs" ], "pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "3g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g", "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:78 Thursday 21 July 2022 19:28:33 +0000 (0:00:03.220) 0:05:29.864 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90 Thursday 21 July 2022 19:28:33 +0000 (0:00:00.044) 0:05:29.909 ********* TASK [fedora.linux_system_roles.storage : show blivet_output] ****************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:96 Thursday 21 July 2022 19:28:33 +0000 (0:00:00.022) 0:05:29.932 ********* ok: [/cache/centos-8.qcow2] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/mapper/vg1-lv1", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/vg1-lv1", "fs_type": null }, { "action": "destroy device", "device": "/dev/mapper/vg1-tpool1", "fs_type": null }, { "action": "destroy device", "device": "/dev/vg1", "fs_type": null }, { "action": "destroy format", "device": "/dev/nvme0n1p1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/nvme0n1p1", "fs_type": null }, { "action": "destroy format", "device": "/dev/nvme0n1", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/nvme1n1p1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/nvme1n1p1", "fs_type": null }, { "action": "destroy format", "device": "/dev/nvme1n1", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/nvme2n1p1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/nvme2n1p1", "fs_type": null }, { "action": "destroy format", "device": "/dev/nvme2n1", "fs_type": "disklabel" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sr0", "/dev/vda1", "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/nvme0n1", "/dev/nvme1n1", "/dev/nvme2n1", "/dev/vdb", "/dev/vdc", "/dev/vdd" ], "mounts": [], "packages": [ "xfsprogs" ], "pools": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "3g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g", "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : set the list of pools for test verification] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:101 Thursday 21 July 2022 19:28:33 +0000 (0:00:00.095) 0:05:30.028 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "3g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g", "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : set the list of volumes for test verification] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:105 Thursday 21 July 2022 19:28:33 +0000 (0:00:00.042) 0:05:30.070 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : remove obsolete mounts] ************** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Thursday 21 July 2022 19:28:33 +0000 (0:00:00.077) 0:05:30.147 ********* TASK [fedora.linux_system_roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132 Thursday 21 July 2022 19:28:33 +0000 (0:00:00.038) 0:05:30.186 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : set up new/current mounts] *********** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:137 Thursday 21 July 2022 19:28:34 +0000 (0:00:00.058) 0:05:30.245 ********* TASK [fedora.linux_system_roles.storage : tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148 Thursday 21 July 2022 19:28:34 +0000 (0:00:00.039) 0:05:30.284 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : retrieve facts for the /etc/crypttab file] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:156 Thursday 21 July 2022 19:28:34 +0000 (0:00:00.027) 0:05:30.311 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "atime": 1658431449.4320083, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1643141385.117, "dev": 64513, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 135, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1643141019.537, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "3147672035", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : manage /etc/crypttab to account for changes we just made] *** task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Thursday 21 July 2022 19:28:34 +0000 (0:00:00.390) 0:05:30.702 ********* TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:183 Thursday 21 July 2022 19:28:34 +0000 (0:00:00.023) 0:05:30.725 ********* ok: [/cache/centos-8.qcow2] META: role_complete for /cache/centos-8.qcow2 TASK [include_tasks] *********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/tests_create_thinp_then_remove.yml:125 Thursday 21 July 2022 19:28:35 +0000 (0:00:00.990) 0:05:31.716 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml for /cache/centos-8.qcow2 TASK [Print out pool information] ********************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:1 Thursday 21 July 2022 19:28:35 +0000 (0:00:00.054) 0:05:31.770 ********* ok: [/cache/centos-8.qcow2] => { "_storage_pools_list": [ { "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/vg1-lv1", "_mount_id": "/dev/mapper/vg1-lv1", "_raw_device": "/dev/mapper/vg1-lv1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "nvme0n1", "nvme1n1", "nvme2n1" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "name": "lv1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "size": "3g", "state": "present", "thin": true, "thin_pool_name": "tpool1", "thin_pool_size": "10g", "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:6 Thursday 21 July 2022 19:28:35 +0000 (0:00:00.051) 0:05:31.822 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:14 Thursday 21 July 2022 19:28:35 +0000 (0:00:00.035) 0:05:31.858 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "info": { "/dev/nvme0n1": { "fstype": "", "label": "", "name": "/dev/nvme0n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme1n1": { "fstype": "", "label": "", "name": "/dev/nvme1n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/nvme2n1": { "fstype": "", "label": "", "name": "/dev/nvme2n1", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sda": { "fstype": "", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sr0": { "fstype": "iso9660", "label": "cidata", "name": "/dev/sr0", "size": "364K", "type": "rom", "uuid": "2022-07-21-19-22-43-00" }, "/dev/vda": { "fstype": "", "label": "", "name": "/dev/vda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vda1": { "fstype": "xfs", "label": "", "name": "/dev/vda1", "size": "10G", "type": "partition", "uuid": "395b9844-e404-4857-afbb-c6edccaf72f3" }, "/dev/vdb": { "fstype": "", "label": "", "name": "/dev/vdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vdc": { "fstype": "", "label": "", "name": "/dev/vdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/vdd": { "fstype": "", "label": "", "name": "/dev/vdd", "size": "10G", "type": "disk", "uuid": "" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:19 Thursday 21 July 2022 19:28:36 +0000 (0:00:00.396) 0:05:32.255 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002546", "end": "2022-07-21 19:28:36.091962", "rc": 0, "start": "2022-07-21 19:28:36.089416" } STDOUT: # # /etc/fstab # Created by anaconda on Tue Jan 25 20:03:39 2022 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=395b9844-e404-4857-afbb-c6edccaf72f3 / xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:24 Thursday 21 July 2022 19:28:36 +0000 (0:00:00.379) 0:05:32.634 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003027", "end": "2022-07-21 19:28:36.477425", "failed_when_result": false, "rc": 0, "start": "2022-07-21 19:28:36.474398" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:33 Thursday 21 July 2022 19:28:36 +0000 (0:00:00.387) 0:05:33.022 ********* included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool.yml for /cache/centos-8.qcow2 => (item={'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'name': 'vg1', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'state': 'absent', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1'}], 'raid_chunk_size': None}) TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool.yml:5 Thursday 21 July 2022 19:28:36 +0000 (0:00:00.059) 0:05:33.081 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [include_tasks] *********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool.yml:18 Thursday 21 July 2022 19:28:36 +0000 (0:00:00.077) 0:05:33.159 ********* included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml for /cache/centos-8.qcow2 => (item=members) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-volumes.yml for /cache/centos-8.qcow2 => (item=volumes) TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:1 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.047) 0:05:33.207 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_count": "0", "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:6 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.050) 0:05:33.257 ********* TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:15 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.020) 0:05:33.278 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "__pvs_lvm_len": "0" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:19 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.047) 0:05:33.325 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_pool_pvs": [] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:23 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.053) 0:05:33.379 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:29 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.052) 0:05:33.431 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:33 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.038) 0:05:33.469 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_pv_type": "partition" }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:37 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.053) 0:05:33.523 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:41 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.025) 0:05:33.548 ********* TASK [Check MD RAID] *********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:50 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.021) 0:05:33.570 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml for /cache/centos-8.qcow2 TASK [get information about RAID] ********************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:6 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.043) 0:05:33.613 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:12 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.023) 0:05:33.636 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:16 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.022) 0:05:33.658 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:20 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.022) 0:05:33.681 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID active devices count] ***************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:24 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.023) 0:05:33.704 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID spare devices count] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:30 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.022) 0:05:33.727 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID metadata version] ********************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:36 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.023) 0:05:33.750 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-md.yml:44 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.033) 0:05:33.783 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:53 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.040) 0:05:33.824 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-lvmraid.yml for /cache/centos-8.qcow2 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-lvmraid.yml:1 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.046) 0:05:33.871 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-lvmraid.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1'}) TASK [Get information about LVM RAID] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-lvmraid.yml:3 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.045) 0:05:33.916 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is LVM RAID] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-lvmraid.yml:8 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.028) 0:05:33.945 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-lvmraid.yml:12 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.026) 0:05:33.971 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:56 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.028) 0:05:34.000 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-thin.yml for /cache/centos-8.qcow2 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-thin.yml:1 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.043) 0:05:34.043 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1'}) TASK [Get information about thinpool] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml:3 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.041) 0:05:34.085 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml:8 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.027) 0:05:34.113 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml:13 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.026) 0:05:34.139 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-thin.yml:17 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.025) 0:05:34.165 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check member encryption] ************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:59 Thursday 21 July 2022 19:28:37 +0000 (0:00:00.024) 0:05:34.190 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml for /cache/centos-8.qcow2 TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml:4 Thursday 21 July 2022 19:28:38 +0000 (0:00:00.047) 0:05:34.238 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml:8 Thursday 21 July 2022 19:28:38 +0000 (0:00:00.100) 0:05:34.338 ********* TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml:15 Thursday 21 July 2022 19:28:38 +0000 (0:00:00.022) 0:05:34.361 ********* TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-encryption.yml:22 Thursday 21 July 2022 19:28:38 +0000 (0:00:00.060) 0:05:34.422 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:62 Thursday 21 July 2022 19:28:38 +0000 (0:00:00.035) 0:05:34.458 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-vdo.yml for /cache/centos-8.qcow2 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-members-vdo.yml:1 Thursday 21 July 2022 19:28:38 +0000 (0:00:00.047) 0:05:34.506 ********* included: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1'}) TASK [get information about VDO deduplication] ********************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:3 Thursday 21 July 2022 19:28:38 +0000 (0:00:00.048) 0:05:34.554 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:8 Thursday 21 July 2022 19:28:38 +0000 (0:00:00.024) 0:05:34.579 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:11 Thursday 21 July 2022 19:28:38 +0000 (0:00:00.024) 0:05:34.604 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:16 Thursday 21 July 2022 19:28:38 +0000 (0:00:00.024) 0:05:34.628 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:21 Thursday 21 July 2022 19:28:38 +0000 (0:00:00.025) 0:05:34.653 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:24 Thursday 21 July 2022 19:28:38 +0000 (0:00:00.025) 0:05:34.679 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:29 Thursday 21 July 2022 19:28:38 +0000 (0:00:00.026) 0:05:34.706 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-pool-member-vdo.yml:39 Thursday 21 July 2022 19:28:38 +0000 (0:00:00.025) 0:05:34.731 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-members.yml:65 Thursday 21 July 2022 19:28:38 +0000 (0:00:00.036) 0:05:34.768 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [verify the volumes] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-pool-volumes.yml:3 Thursday 21 July 2022 19:28:38 +0000 (0:00:00.037) 0:05:34.806 ********* included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume.yml for /cache/centos-8.qcow2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'name': 'lv1', 'raid_level': None, 'size': '3g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'thin_pool_name': 'tpool1', 'thin_pool_size': '10g', 'thin': True, 'vdo_pool_size': None, 'disks': ['nvme0n1', 'nvme1n1', 'nvme2n1'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/vg1-lv1', '_raw_device': '/dev/mapper/vg1-lv1', '_mount_id': '/dev/mapper/vg1-lv1'}) TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume.yml:2 Thursday 21 July 2022 19:28:38 +0000 (0:00:00.044) 0:05:34.850 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_volume_present": false, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [include_tasks] *********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume.yml:10 Thursday 21 July 2022 19:28:38 +0000 (0:00:00.052) 0:05:34.902 ********* included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml for /cache/centos-8.qcow2 => (item=mount) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml for /cache/centos-8.qcow2 => (item=fstab) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fs.yml for /cache/centos-8.qcow2 => (item=fs) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml for /cache/centos-8.qcow2 => (item=device) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml for /cache/centos-8.qcow2 => (item=encryption) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml for /cache/centos-8.qcow2 => (item=md) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml for /cache/centos-8.qcow2 => (item=size) included: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml for /cache/centos-8.qcow2 => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:6 Thursday 21 July 2022 19:28:38 +0000 (0:00:00.081) 0:05:34.984 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/vg1-lv1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:10 Thursday 21 July 2022 19:28:38 +0000 (0:00:00.043) 0:05:35.027 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_mount_device_matches": [], "storage_test_mount_expected_match_count": "0", "storage_test_mount_point_matches": [], "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Verify the current mount state by device] ******************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:20 Thursday 21 July 2022 19:28:38 +0000 (0:00:00.056) 0:05:35.083 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by mount point] *************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:29 Thursday 21 July 2022 19:28:38 +0000 (0:00:00.025) 0:05:35.108 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify the mount fs type] ************************************************ task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:37 Thursday 21 July 2022 19:28:38 +0000 (0:00:00.050) 0:05:35.159 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [command] ***************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:46 Thursday 21 July 2022 19:28:39 +0000 (0:00:00.040) 0:05:35.199 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:50 Thursday 21 July 2022 19:28:39 +0000 (0:00:00.023) 0:05:35.222 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:55 Thursday 21 July 2022 19:28:39 +0000 (0:00:00.034) 0:05:35.257 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-mount.yml:65 Thursday 21 July 2022 19:28:39 +0000 (0:00:00.023) 0:05:35.281 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_mount_device_matches": null, "storage_test_mount_expected_match_count": null, "storage_test_mount_point_matches": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:2 Thursday 21 July 2022 19:28:39 +0000 (0:00:00.036) 0:05:35.318 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "0", "storage_test_fstab_expected_mount_options_matches": "0", "storage_test_fstab_expected_mount_point_matches": "0", "storage_test_fstab_id_matches": [], "storage_test_fstab_mount_options_matches": [], "storage_test_fstab_mount_point_matches": [] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:12 Thursday 21 July 2022 19:28:39 +0000 (0:00:00.062) 0:05:35.381 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the fstab mount point] ******************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:19 Thursday 21 July 2022 19:28:39 +0000 (0:00:00.023) 0:05:35.405 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:25 Thursday 21 July 2022 19:28:39 +0000 (0:00:00.051) 0:05:35.456 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fstab.yml:34 Thursday 21 July 2022 19:28:39 +0000 (0:00:00.039) 0:05:35.496 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fs.yml:4 Thursday 21 July 2022 19:28:39 +0000 (0:00:00.032) 0:05:35.529 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fs label] ********************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-fs.yml:10 Thursday 21 July 2022 19:28:39 +0000 (0:00:00.022) 0:05:35.552 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [See whether the device node is present] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:4 Thursday 21 July 2022 19:28:39 +0000 (0:00:00.021) 0:05:35.574 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "stat": { "exists": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:10 Thursday 21 July 2022 19:28:39 +0000 (0:00:00.409) 0:05:35.984 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about this volume] ********************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:15 Thursday 21 July 2022 19:28:39 +0000 (0:00:00.038) 0:05:36.022 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [(1/2) Process volume type (set initial value)] *************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:21 Thursday 21 July 2022 19:28:39 +0000 (0:00:00.024) 0:05:36.046 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [(2/2) Process volume type (get RAID value)] ****************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:25 Thursday 21 July 2022 19:28:39 +0000 (0:00:00.040) 0:05:36.087 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-device.yml:30 Thursday 21 July 2022 19:28:39 +0000 (0:00:00.025) 0:05:36.112 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:3 Thursday 21 July 2022 19:28:39 +0000 (0:00:00.028) 0:05:36.141 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:10 Thursday 21 July 2022 19:28:39 +0000 (0:00:00.023) 0:05:36.164 ********* ok: [/cache/centos-8.qcow2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:15 Thursday 21 July 2022 19:28:41 +0000 (0:00:01.895) 0:05:38.059 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:21 Thursday 21 July 2022 19:28:41 +0000 (0:00:00.026) 0:05:38.085 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:27 Thursday 21 July 2022 19:28:41 +0000 (0:00:00.023) 0:05:38.109 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:33 Thursday 21 July 2022 19:28:41 +0000 (0:00:00.022) 0:05:38.131 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:39 Thursday 21 July 2022 19:28:41 +0000 (0:00:00.025) 0:05:38.157 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:44 Thursday 21 July 2022 19:28:41 +0000 (0:00:00.028) 0:05:38.186 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:50 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.027) 0:05:38.213 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:56 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.026) 0:05:38.239 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:62 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.029) 0:05:38.269 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:67 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.053) 0:05:38.322 ********* ok: [/cache/centos-8.qcow2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:72 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.053) 0:05:38.375 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:78 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.035) 0:05:38.411 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:84 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.039) 0:05:38.450 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:90 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.042) 0:05:38.493 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [get information about RAID] ********************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:7 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.035) 0:05:38.528 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:13 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.036) 0:05:38.564 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:17 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.039) 0:05:38.604 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:21 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.039) 0:05:38.644 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID active devices count] ***************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:25 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.040) 0:05:38.684 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID spare devices count] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:31 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.039) 0:05:38.723 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check RAID metadata version] ********************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-md.yml:37 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.039) 0:05:38.763 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the actual size of the volume] ************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:3 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.037) 0:05:38.800 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the requested size of the volume] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:9 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.026) 0:05:38.827 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:15 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.039) 0:05:38.867 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:20 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.041) 0:05:38.908 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_expected_size": "4294967296" } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:25 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.037) 0:05:38.945 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:28 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.038) 0:05:38.983 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Get the size of parent/pool device] ************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:31 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.043) 0:05:39.026 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:36 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.041) 0:05:39.068 ********* skipping: [/cache/centos-8.qcow2] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:39 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.037) 0:05:39.105 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:44 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.038) 0:05:39.144 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_actual_size": { "changed": false, "skip_reason": "Conditional result was False", "skipped": true } } TASK [debug] ******************************************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:47 Thursday 21 July 2022 19:28:42 +0000 (0:00:00.037) 0:05:39.181 ********* ok: [/cache/centos-8.qcow2] => { "storage_test_expected_size": "4294967296" } TASK [assert] ****************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-size.yml:50 Thursday 21 July 2022 19:28:43 +0000 (0:00:00.039) 0:05:39.221 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:6 Thursday 21 July 2022 19:28:43 +0000 (0:00:00.023) 0:05:39.245 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:14 Thursday 21 July 2022 19:28:43 +0000 (0:00:00.026) 0:05:39.271 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [check segment type] ****************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:17 Thursday 21 July 2022 19:28:43 +0000 (0:00:00.024) 0:05:39.296 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:22 Thursday 21 July 2022 19:28:43 +0000 (0:00:00.025) 0:05:39.321 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [parse the requested cache size] ****************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:26 Thursday 21 July 2022 19:28:43 +0000 (0:00:00.023) 0:05:39.345 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [set_fact] **************************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:32 Thursday 21 July 2022 19:28:43 +0000 (0:00:00.022) 0:05:39.368 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-cache.yml:36 Thursday 21 July 2022 19:28:43 +0000 (0:00:00.024) 0:05:39.392 ********* skipping: [/cache/centos-8.qcow2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume.yml:16 Thursday 21 July 2022 19:28:43 +0000 (0:00:00.024) 0:05:39.417 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:43 Thursday 21 July 2022 19:28:43 +0000 (0:00:00.035) 0:05:39.452 ********* TASK [Clean up variable namespace] ********************************************* task path: /tmp/tmp_0pjp8ed/tests/storage/verify-role-results.yml:53 Thursday 21 July 2022 19:28:43 +0000 (0:00:00.020) 0:05:39.473 ********* ok: [/cache/centos-8.qcow2] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } META: ran handlers META: ran handlers PLAY RECAP ********************************************************************* /cache/centos-8.qcow2 : ok=649 changed=11 unreachable=0 failed=0 skipped=476 rescued=0 ignored=0 Thursday 21 July 2022 19:28:43 +0000 (0:00:00.136) 0:05:39.609 ********* =============================================================================== fedora.linux_system_roles.storage : make sure blivet is available ----- 173.76s /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:7 Ensure cryptsetup is present -------------------------------------------- 3.57s /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:10 ----------- fedora.linux_system_roles.storage : manage the pools and volumes to match the specified state --- 3.22s /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 fedora.linux_system_roles.storage : manage the pools and volumes to match the specified state --- 3.20s /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 fedora.linux_system_roles.storage : manage the pools and volumes to match the specified state --- 2.53s /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 fedora.linux_system_roles.storage : manage the pools and volumes to match the specified state --- 2.22s /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 fedora.linux_system_roles.storage : get required packages --------------- 2.19s /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:23 fedora.linux_system_roles.storage : manage the pools and volumes to match the specified state --- 2.05s /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 fedora.linux_system_roles.storage : manage the pools and volumes to match the specified state --- 2.02s /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 fedora.linux_system_roles.storage : get required packages --------------- 2.00s /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:23 Ensure cryptsetup is present -------------------------------------------- 1.93s /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:10 ----------- fedora.linux_system_roles.storage : get required packages --------------- 1.92s /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:23 fedora.linux_system_roles.storage : make sure required packages are installed --- 1.92s /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:41 Ensure cryptsetup is present -------------------------------------------- 1.92s /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:10 ----------- fedora.linux_system_roles.storage : make sure blivet is available ------- 1.91s /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:7 Ensure cryptsetup is present -------------------------------------------- 1.91s /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:10 ----------- fedora.linux_system_roles.storage : make sure required packages are installed --- 1.91s /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:41 fedora.linux_system_roles.storage : make sure blivet is available ------- 1.90s /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:7 Ensure cryptsetup is present -------------------------------------------- 1.90s /tmp/tmp_0pjp8ed/tests/storage/test-verify-volume-encryption.yml:10 ----------- fedora.linux_system_roles.storage : make sure blivet is available ------- 1.89s /tmp/tmpu1heti3n/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:7