ansible-playbook 2.9.27 config file = /etc/ansible/ansible.cfg configured module search path = [u'/root/.ansible/plugins/modules', u'/usr/share/ansible/plugins/modules'] ansible python module location = /usr/lib/python2.7/site-packages/ansible executable location = /usr/bin/ansible-playbook python version = 2.7.5 (default, Nov 14 2023, 16:14:06) [GCC 4.8.5 20150623 (Red Hat 4.8.5-44)] Using /etc/ansible/ansible.cfg as config file [WARNING]: running playbook inside collection fedora.linux_system_roles Skipping callback 'actionable', as we already have a stdout callback. Skipping callback 'counter_enabled', as we already have a stdout callback. Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'dense', as we already have a stdout callback. Skipping callback 'dense', as we already have a stdout callback. Skipping callback 'full_skip', as we already have a stdout callback. Skipping callback 'json', as we already have a stdout callback. Skipping callback 'jsonl', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'null', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. Skipping callback 'selective', as we already have a stdout callback. Skipping callback 'skippy', as we already have a stdout callback. Skipping callback 'stderr', as we already have a stdout callback. Skipping callback 'unixy', as we already have a stdout callback. Skipping callback 'yaml', as we already have a stdout callback. PLAYBOOK: tests_mount.yml ****************************************************** 1 plays in /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml PLAY [Basic mount snapshot test] *********************************************** TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:2 Wednesday 12 November 2025 14:43:57 -0500 (0:00:00.051) 0:00:00.051 **** ok: [managed-node2] META: ran handlers TASK [Setup] ******************************************************************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:46 Wednesday 12 November 2025 14:43:59 -0500 (0:00:01.436) 0:00:01.487 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/setup.yml for managed-node2 TASK [Check if system is ostree] *********************************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/setup.yml:10 Wednesday 12 November 2025 14:43:59 -0500 (0:00:00.197) 0:00:01.685 **** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [Set mount parent] ******************************************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/setup.yml:15 Wednesday 12 November 2025 14:44:00 -0500 (0:00:00.594) 0:00:02.279 **** ok: [managed-node2] => { "ansible_facts": { "test_mnt_parent": "/mnt" }, "changed": false } TASK [Run the storage role install base packages] ****************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/setup.yml:19 Wednesday 12 November 2025 14:44:00 -0500 (0:00:00.076) 0:00:02.356 **** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Wednesday 12 November 2025 14:44:00 -0500 (0:00:00.132) 0:00:02.488 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Wednesday 12 November 2025 14:44:00 -0500 (0:00:00.090) 0:00:02.578 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Wednesday 12 November 2025 14:44:00 -0500 (0:00:00.084) 0:00:02.663 **** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node2] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Wednesday 12 November 2025 14:44:00 -0500 (0:00:00.116) 0:00:02.780 **** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Wednesday 12 November 2025 14:44:00 -0500 (0:00:00.320) 0:00:03.101 **** ok: [managed-node2] => { "ansible_facts": { "__storage_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Wednesday 12 November 2025 14:44:00 -0500 (0:00:00.063) 0:00:03.164 **** ok: [managed-node2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Wednesday 12 November 2025 14:44:00 -0500 (0:00:00.050) 0:00:03.215 **** ok: [managed-node2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Wednesday 12 November 2025 14:44:01 -0500 (0:00:00.051) 0:00:03.267 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Wednesday 12 November 2025 14:44:01 -0500 (0:00:00.140) 0:00:03.408 **** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "python-enum34-1.0.4-1.el7.noarch providing python-enum34 is already installed", "1:python2-blivet3-3.1.3-3.el7.noarch providing python-blivet3 is already installed", "libblockdev-crypto-2.18-5.el7.x86_64 providing libblockdev-crypto is already installed", "libblockdev-dm-2.18-5.el7.x86_64 providing libblockdev-dm is already installed", "libblockdev-lvm-2.18-5.el7.x86_64 providing libblockdev-lvm is already installed", "libblockdev-mdraid-2.18-5.el7.x86_64 providing libblockdev-mdraid is already installed", "libblockdev-swap-2.18-5.el7.x86_64 providing libblockdev-swap is already installed", "libblockdev-2.18-5.el7.x86_64 providing libblockdev is already installed" ] } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Wednesday 12 November 2025 14:44:02 -0500 (0:00:01.406) 0:00:04.815 **** ok: [managed-node2] => { "storage_pools | d([])": [] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Wednesday 12 November 2025 14:44:02 -0500 (0:00:00.146) 0:00:04.962 **** ok: [managed-node2] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Wednesday 12 November 2025 14:44:02 -0500 (0:00:00.075) 0:00:05.037 **** ok: [managed-node2] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Wednesday 12 November 2025 14:44:03 -0500 (0:00:00.998) 0:00:06.036 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for managed-node2 TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Wednesday 12 November 2025 14:44:03 -0500 (0:00:00.193) 0:00:06.229 **** TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Wednesday 12 November 2025 14:44:04 -0500 (0:00:00.061) 0:00:06.290 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************ task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19 Wednesday 12 November 2025 14:44:04 -0500 (0:00:00.098) 0:00:06.389 **** TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Wednesday 12 November 2025 14:44:04 -0500 (0:00:00.078) 0:00:06.467 **** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "kpartx-0.4.9-136.el7_9.x86_64 providing kpartx is already installed" ] } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Wednesday 12 November 2025 14:44:05 -0500 (0:00:00.836) 0:00:07.304 **** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "arp-ethers.service": { "name": "arp-ethers.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "brandbot.service": { "name": "brandbot.service", "source": "systemd", "state": "inactive", "status": "static" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "console-shell.service": { "name": "console-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.import1.service": { "name": "dbus-org.freedesktop.import1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.machine1.service": { "name": "dbus-org.freedesktop.machine1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "running", "status": "static" }, "dmraid-activation.service": { "name": "dmraid-activation.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmetad.service": { "name": "lvm2-lvmetad.service", "source": "systemd", "state": "running", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "lvm2-pvscan@8:0.service": { "name": "lvm2-pvscan@8:0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:16.service": { "name": "lvm2-pvscan@8:16.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:32.service": { "name": "lvm2-pvscan@8:32.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "netconsole": { "name": "netconsole", "source": "sysv", "state": "stopped", "status": "disabled" }, "network": { "name": "network", "source": "sysv", "state": "running", "status": "enabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-config.service": { "name": "nfs-config.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-idmap.service": { "name": "nfs-idmap.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-lock.service": { "name": "nfs-lock.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-rquotad.service": { "name": "nfs-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-secure.service": { "name": "nfs-secure.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs.service": { "name": "nfs.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfslock.service": { "name": "nfslock.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "postfix.service": { "name": "postfix.service", "source": "systemd", "state": "running", "status": "enabled" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rhel-autorelabel-mark.service": { "name": "rhel-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-autorelabel.service": { "name": "rhel-autorelabel.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-configure.service": { "name": "rhel-configure.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-dmesg.service": { "name": "rhel-dmesg.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-domainname.service": { "name": "rhel-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-import-state.service": { "name": "rhel-import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-loadmodules.service": { "name": "rhel-loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-readonly.service": { "name": "rhel-readonly.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-rquotad.service": { "name": "rpc-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpcgssd.service": { "name": "rpcgssd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rpcidmapd.service": { "name": "rpcidmapd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rsyncd.service": { "name": "rsyncd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyncd@.service": { "name": "rsyncd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-policy-migrate-local-changes@.service": { "name": "selinux-policy-migrate-local-changes@.service", "source": "systemd", "state": "unknown", "status": "static" }, "selinux-policy-migrate-local-changes@targeted.service": { "name": "selinux-policy-migrate-local-changes@targeted.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "unknown" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "static" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bootchart.service": { "name": "systemd-bootchart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-importd.service": { "name": "systemd-importd.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-machined.service": { "name": "systemd-machined.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-nspawn@.service": { "name": "systemd-nspawn@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-readahead-collect.service": { "name": "systemd-readahead-collect.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-readahead-done.service": { "name": "systemd-readahead-done.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "systemd-readahead-drop.service": { "name": "systemd-readahead-drop.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "systemd-readahead-replay.service": { "name": "systemd-readahead-replay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill@.service": { "name": "systemd-rfkill@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-shutdownd.service": { "name": "systemd-shutdownd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "wpa_supplicant.service": { "name": "wpa_supplicant.service", "source": "systemd", "state": "inactive", "status": "disabled" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Wednesday 12 November 2025 14:44:06 -0500 (0:00:01.187) 0:00:08.491 **** ok: [managed-node2] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Wednesday 12 November 2025 14:44:06 -0500 (0:00:00.078) 0:00:08.570 **** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Wednesday 12 November 2025 14:44:06 -0500 (0:00:00.068) 0:00:08.638 **** ok: [managed-node2] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Wednesday 12 November 2025 14:44:07 -0500 (0:00:00.629) 0:00:09.268 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Wednesday 12 November 2025 14:44:07 -0500 (0:00:00.135) 0:00:09.403 **** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1762976547.0214868, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "72884e3f126482c2d28276ff7c57744fa95eff91", "ctime": 1762976535.3184993, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 263693, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1762976535.3174992, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1229, "uid": 0, "version": "1805363463", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Wednesday 12 November 2025 14:44:07 -0500 (0:00:00.660) 0:00:10.063 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Wednesday 12 November 2025 14:44:07 -0500 (0:00:00.063) 0:00:10.127 **** TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Wednesday 12 November 2025 14:44:07 -0500 (0:00:00.088) 0:00:10.216 **** ok: [managed-node2] => { "blivet_output": { "actions": [], "changed": false, "crypts": [], "failed": false, "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Wednesday 12 November 2025 14:44:08 -0500 (0:00:00.102) 0:00:10.318 **** ok: [managed-node2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Wednesday 12 November 2025 14:44:08 -0500 (0:00:00.115) 0:00:10.434 **** ok: [managed-node2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Wednesday 12 November 2025 14:44:08 -0500 (0:00:00.102) 0:00:10.536 **** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Wednesday 12 November 2025 14:44:08 -0500 (0:00:00.075) 0:00:10.612 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Wednesday 12 November 2025 14:44:08 -0500 (0:00:00.058) 0:00:10.670 **** TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Wednesday 12 November 2025 14:44:08 -0500 (0:00:00.048) 0:00:10.719 **** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Wednesday 12 November 2025 14:44:08 -0500 (0:00:00.046) 0:00:10.765 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Wednesday 12 November 2025 14:44:08 -0500 (0:00:00.076) 0:00:10.841 **** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1762976469.2355814, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1718879272.062, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131079, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1718879026.308, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072852913879", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Wednesday 12 November 2025 14:44:09 -0500 (0:00:00.470) 0:00:11.312 **** TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Wednesday 12 November 2025 14:44:09 -0500 (0:00:00.042) 0:00:11.354 **** ok: [managed-node2] TASK [Get unused disks] ******************************************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/setup.yml:25 Wednesday 12 November 2025 14:44:09 -0500 (0:00:00.850) 0:00:12.204 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml for managed-node2 TASK [Check if system is ostree] *********************************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:5 Wednesday 12 November 2025 14:44:10 -0500 (0:00:00.246) 0:00:12.451 **** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [Set flag to indicate system is ostree] *********************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:10 Wednesday 12 November 2025 14:44:10 -0500 (0:00:00.567) 0:00:13.019 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_is_ostree": false }, "changed": false } TASK [Ensure test packages] **************************************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:14 Wednesday 12 November 2025 14:44:10 -0500 (0:00:00.101) 0:00:13.120 **** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "util-linux-2.23.2-65.el7_9.1.x86_64 providing util-linux is already installed" ] } TASK [Find unused disks in the system] ***************************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:23 Wednesday 12 November 2025 14:44:11 -0500 (0:00:01.067) 0:00:14.188 **** ok: [managed-node2] => { "changed": false, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi", "sdj" ], "info": [ "Line: NAME=\"/dev/sda\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdb\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdc\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdd\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sde\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdf\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdg\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdh\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdi\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdj\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdk\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdl\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/xvda\" TYPE=\"disk\" SIZE=\"268435456000\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"ext4\" LOG-SEC=\"512\"", "Line type [part] is not disk: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"ext4\" LOG-SEC=\"512\"", "filename [xvda1] is a partition", "Disk [/dev/xvda] attrs [{'fstype': '', 'type': 'disk', 'ssize': '512', 'size': '268435456000'}] has partitions" ] } TASK [Set unused_disks if necessary] ******************************************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:31 Wednesday 12 November 2025 14:44:13 -0500 (0:00:01.648) 0:00:15.836 **** ok: [managed-node2] => { "ansible_facts": { "unused_disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi", "sdj" ] }, "changed": false } TASK [Print unused disks] ****************************************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:36 Wednesday 12 November 2025 14:44:13 -0500 (0:00:00.130) 0:00:15.967 **** ok: [managed-node2] => { "unused_disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi", "sdj" ] } TASK [Print info from find_unused_disk] **************************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:44 Wednesday 12 November 2025 14:44:13 -0500 (0:00:00.120) 0:00:16.087 **** skipping: [managed-node2] => {} TASK [Show disk information] *************************************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:49 Wednesday 12 November 2025 14:44:13 -0500 (0:00:00.108) 0:00:16.196 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Exit playbook when there's not enough unused disks in the system] ******** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:58 Wednesday 12 November 2025 14:44:14 -0500 (0:00:00.123) 0:00:16.319 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Create LVM logical volumes under volume groups] ************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/setup.yml:31 Wednesday 12 November 2025 14:44:14 -0500 (0:00:00.125) 0:00:16.445 **** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Wednesday 12 November 2025 14:44:14 -0500 (0:00:00.168) 0:00:16.614 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Wednesday 12 November 2025 14:44:14 -0500 (0:00:00.129) 0:00:16.743 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Wednesday 12 November 2025 14:44:14 -0500 (0:00:00.168) 0:00:16.912 **** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node2] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Wednesday 12 November 2025 14:44:14 -0500 (0:00:00.232) 0:00:17.144 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Wednesday 12 November 2025 14:44:15 -0500 (0:00:00.151) 0:00:17.296 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Wednesday 12 November 2025 14:44:15 -0500 (0:00:00.110) 0:00:17.406 **** ok: [managed-node2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Wednesday 12 November 2025 14:44:15 -0500 (0:00:00.119) 0:00:17.526 **** ok: [managed-node2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Wednesday 12 November 2025 14:44:15 -0500 (0:00:00.100) 0:00:17.626 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Wednesday 12 November 2025 14:44:15 -0500 (0:00:00.150) 0:00:17.777 **** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "python-enum34-1.0.4-1.el7.noarch providing python-enum34 is already installed", "1:python2-blivet3-3.1.3-3.el7.noarch providing python-blivet3 is already installed", "libblockdev-crypto-2.18-5.el7.x86_64 providing libblockdev-crypto is already installed", "libblockdev-dm-2.18-5.el7.x86_64 providing libblockdev-dm is already installed", "libblockdev-lvm-2.18-5.el7.x86_64 providing libblockdev-lvm is already installed", "libblockdev-mdraid-2.18-5.el7.x86_64 providing libblockdev-mdraid is already installed", "libblockdev-swap-2.18-5.el7.x86_64 providing libblockdev-swap is already installed", "libblockdev-2.18-5.el7.x86_64 providing libblockdev is already installed" ] } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Wednesday 12 November 2025 14:44:16 -0500 (0:00:00.978) 0:00:18.755 **** ok: [managed-node2] => { "storage_pools | d([])": [ { "disks": [ "sda", "sdb", "sdc" ], "name": "test_vg1", "volumes": [ { "fs_type": "xfs", "name": "lv1", "size": "15%" }, { "fs_type": "xfs", "name": "lv2", "size": "50%" } ] }, { "disks": [ "sdd", "sde", "sdf" ], "name": "test_vg2", "volumes": [ { "fs_type": "xfs", "name": "lv3", "size": "10%" }, { "fs_type": "xfs", "name": "lv4", "size": "20%" } ] }, { "disks": [ "sdg", "sdh", "sdi", "sdj" ], "name": "test_vg3", "volumes": [ { "fs_type": "xfs", "name": "lv5", "size": "30%" }, { "fs_type": "xfs", "name": "lv6", "size": "25%" }, { "fs_type": "xfs", "name": "lv7", "size": "10%" }, { "fs_type": "xfs", "name": "lv8", "size": "10%" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Wednesday 12 November 2025 14:44:16 -0500 (0:00:00.106) 0:00:18.862 **** ok: [managed-node2] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Wednesday 12 November 2025 14:44:16 -0500 (0:00:00.088) 0:00:18.950 **** ok: [managed-node2] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "lvm2", "xfsprogs" ], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Wednesday 12 November 2025 14:44:21 -0500 (0:00:04.319) 0:00:23.270 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for managed-node2 TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Wednesday 12 November 2025 14:44:21 -0500 (0:00:00.133) 0:00:23.404 **** TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Wednesday 12 November 2025 14:44:21 -0500 (0:00:00.098) 0:00:23.503 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************ task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19 Wednesday 12 November 2025 14:44:21 -0500 (0:00:00.052) 0:00:23.556 **** TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Wednesday 12 November 2025 14:44:21 -0500 (0:00:00.080) 0:00:23.636 **** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "7:lvm2-2.02.187-6.el7_9.5.x86_64 providing lvm2 is already installed", "xfsprogs-4.5.0-22.el7.x86_64 providing xfsprogs is already installed", "kpartx-0.4.9-136.el7_9.x86_64 providing kpartx is already installed" ] } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Wednesday 12 November 2025 14:44:22 -0500 (0:00:00.837) 0:00:24.474 **** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "arp-ethers.service": { "name": "arp-ethers.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "brandbot.service": { "name": "brandbot.service", "source": "systemd", "state": "inactive", "status": "static" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "console-shell.service": { "name": "console-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.import1.service": { "name": "dbus-org.freedesktop.import1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.machine1.service": { "name": "dbus-org.freedesktop.machine1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "running", "status": "static" }, "dmraid-activation.service": { "name": "dmraid-activation.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmetad.service": { "name": "lvm2-lvmetad.service", "source": "systemd", "state": "running", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "lvm2-pvscan@8:0.service": { "name": "lvm2-pvscan@8:0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:16.service": { "name": "lvm2-pvscan@8:16.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:32.service": { "name": "lvm2-pvscan@8:32.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "netconsole": { "name": "netconsole", "source": "sysv", "state": "stopped", "status": "disabled" }, "network": { "name": "network", "source": "sysv", "state": "running", "status": "enabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-config.service": { "name": "nfs-config.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-idmap.service": { "name": "nfs-idmap.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-lock.service": { "name": "nfs-lock.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-rquotad.service": { "name": "nfs-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-secure.service": { "name": "nfs-secure.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs.service": { "name": "nfs.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfslock.service": { "name": "nfslock.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "postfix.service": { "name": "postfix.service", "source": "systemd", "state": "running", "status": "enabled" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rhel-autorelabel-mark.service": { "name": "rhel-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-autorelabel.service": { "name": "rhel-autorelabel.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-configure.service": { "name": "rhel-configure.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-dmesg.service": { "name": "rhel-dmesg.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-domainname.service": { "name": "rhel-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-import-state.service": { "name": "rhel-import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-loadmodules.service": { "name": "rhel-loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-readonly.service": { "name": "rhel-readonly.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-rquotad.service": { "name": "rpc-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpcgssd.service": { "name": "rpcgssd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rpcidmapd.service": { "name": "rpcidmapd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rsyncd.service": { "name": "rsyncd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyncd@.service": { "name": "rsyncd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-policy-migrate-local-changes@.service": { "name": "selinux-policy-migrate-local-changes@.service", "source": "systemd", "state": "unknown", "status": "static" }, "selinux-policy-migrate-local-changes@targeted.service": { "name": "selinux-policy-migrate-local-changes@targeted.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "unknown" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "static" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bootchart.service": { "name": "systemd-bootchart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-importd.service": { "name": "systemd-importd.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-machined.service": { "name": "systemd-machined.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-nspawn@.service": { "name": "systemd-nspawn@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-readahead-collect.service": { "name": "systemd-readahead-collect.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-readahead-done.service": { "name": "systemd-readahead-done.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "systemd-readahead-drop.service": { "name": "systemd-readahead-drop.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "systemd-readahead-replay.service": { "name": "systemd-readahead-replay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill@.service": { "name": "systemd-rfkill@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-shutdownd.service": { "name": "systemd-shutdownd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "wpa_supplicant.service": { "name": "wpa_supplicant.service", "source": "systemd", "state": "inactive", "status": "disabled" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Wednesday 12 November 2025 14:44:23 -0500 (0:00:01.088) 0:00:25.563 **** ok: [managed-node2] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Wednesday 12 November 2025 14:44:23 -0500 (0:00:00.103) 0:00:25.667 **** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Wednesday 12 November 2025 14:44:23 -0500 (0:00:00.083) 0:00:25.750 **** changed: [managed-node2] => { "actions": [ { "action": "create format", "device": "/dev/sdj", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdi", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdh", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdg", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/test_vg3", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/test_vg3-lv8", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg3-lv8", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg3-lv7", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg3-lv7", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg3-lv6", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg3-lv6", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg3-lv5", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg3-lv5", "fs_type": "xfs" }, { "action": "create format", "device": "/dev/sdf", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sde", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdd", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/test_vg2", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/test_vg2-lv4", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg2-lv4", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg2-lv3", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg2-lv3", "fs_type": "xfs" }, { "action": "create format", "device": "/dev/sdc", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdb", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sda", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/test_vg1", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/test_vg1-lv2", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg1-lv2", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg1-lv1", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg1-lv1", "fs_type": "xfs" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sdk", "/dev/sdl", "/dev/xvda1", "/dev/mapper/test_vg1-lv1", "/dev/mapper/test_vg1-lv2", "/dev/mapper/test_vg2-lv3", "/dev/mapper/test_vg2-lv4", "/dev/mapper/test_vg3-lv5", "/dev/mapper/test_vg3-lv6", "/dev/mapper/test_vg3-lv7", "/dev/mapper/test_vg3-lv8" ], "mounts": [], "packages": [ "xfsprogs", "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg1-lv1", "_kernel_device": "/dev/dm-7", "_mount_id": "/dev/mapper/test_vg1-lv1", "_raw_device": "/dev/mapper/test_vg1-lv1", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "15%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg1-lv2", "_kernel_device": "/dev/dm-6", "_mount_id": "/dev/mapper/test_vg1-lv2", "_raw_device": "/dev/mapper/test_vg1-lv2", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "50%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg2", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg2-lv3", "_kernel_device": "/dev/dm-5", "_mount_id": "/dev/mapper/test_vg2-lv3", "_raw_device": "/dev/mapper/test_vg2-lv3", "_raw_kernel_device": "/dev/dm-5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv3", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg2-lv4", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/test_vg2-lv4", "_raw_device": "/dev/mapper/test_vg2-lv4", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv4", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "20%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg3", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg3-lv5", "_kernel_device": "/dev/dm-3", "_mount_id": "/dev/mapper/test_vg3-lv5", "_raw_device": "/dev/mapper/test_vg3-lv5", "_raw_kernel_device": "/dev/dm-3", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv5", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "30%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv6", "_kernel_device": "/dev/dm-2", "_mount_id": "/dev/mapper/test_vg3-lv6", "_raw_device": "/dev/mapper/test_vg3-lv6", "_raw_kernel_device": "/dev/dm-2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv6", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "25%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv7", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/test_vg3-lv7", "_raw_device": "/dev/mapper/test_vg3-lv7", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv7", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv8", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/test_vg3-lv8", "_raw_device": "/dev/mapper/test_vg3-lv8", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv8", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Wednesday 12 November 2025 14:44:30 -0500 (0:00:07.433) 0:00:33.184 **** ok: [managed-node2] => { "changed": false, "cmd": [ "udevadm", "trigger", "--subsystem-match=block" ], "delta": "0:00:00.007115", "end": "2025-11-12 14:44:31.833160", "rc": 0, "start": "2025-11-12 14:44:31.826045" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Wednesday 12 November 2025 14:44:31 -0500 (0:00:01.009) 0:00:34.194 **** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1762976547.0214868, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "72884e3f126482c2d28276ff7c57744fa95eff91", "ctime": 1762976535.3184993, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 263693, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1762976535.3174992, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1229, "uid": 0, "version": "1805363463", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Wednesday 12 November 2025 14:44:32 -0500 (0:00:00.395) 0:00:34.589 **** ok: [managed-node2] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Wednesday 12 November 2025 14:44:32 -0500 (0:00:00.588) 0:00:35.178 **** TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Wednesday 12 November 2025 14:44:32 -0500 (0:00:00.049) 0:00:35.227 **** ok: [managed-node2] => { "blivet_output": { "actions": [ { "action": "create format", "device": "/dev/sdj", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdi", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdh", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdg", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/test_vg3", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/test_vg3-lv8", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg3-lv8", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg3-lv7", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg3-lv7", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg3-lv6", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg3-lv6", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg3-lv5", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg3-lv5", "fs_type": "xfs" }, { "action": "create format", "device": "/dev/sdf", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sde", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdd", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/test_vg2", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/test_vg2-lv4", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg2-lv4", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg2-lv3", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg2-lv3", "fs_type": "xfs" }, { "action": "create format", "device": "/dev/sdc", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdb", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sda", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/test_vg1", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/test_vg1-lv2", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg1-lv2", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/test_vg1-lv1", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/test_vg1-lv1", "fs_type": "xfs" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sdk", "/dev/sdl", "/dev/xvda1", "/dev/mapper/test_vg1-lv1", "/dev/mapper/test_vg1-lv2", "/dev/mapper/test_vg2-lv3", "/dev/mapper/test_vg2-lv4", "/dev/mapper/test_vg3-lv5", "/dev/mapper/test_vg3-lv6", "/dev/mapper/test_vg3-lv7", "/dev/mapper/test_vg3-lv8" ], "mounts": [], "packages": [ "xfsprogs", "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg1-lv1", "_kernel_device": "/dev/dm-7", "_mount_id": "/dev/mapper/test_vg1-lv1", "_raw_device": "/dev/mapper/test_vg1-lv1", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "15%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg1-lv2", "_kernel_device": "/dev/dm-6", "_mount_id": "/dev/mapper/test_vg1-lv2", "_raw_device": "/dev/mapper/test_vg1-lv2", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "50%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg2", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg2-lv3", "_kernel_device": "/dev/dm-5", "_mount_id": "/dev/mapper/test_vg2-lv3", "_raw_device": "/dev/mapper/test_vg2-lv3", "_raw_kernel_device": "/dev/dm-5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv3", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg2-lv4", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/test_vg2-lv4", "_raw_device": "/dev/mapper/test_vg2-lv4", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv4", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "20%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg3", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg3-lv5", "_kernel_device": "/dev/dm-3", "_mount_id": "/dev/mapper/test_vg3-lv5", "_raw_device": "/dev/mapper/test_vg3-lv5", "_raw_kernel_device": "/dev/dm-3", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv5", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "30%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv6", "_kernel_device": "/dev/dm-2", "_mount_id": "/dev/mapper/test_vg3-lv6", "_raw_device": "/dev/mapper/test_vg3-lv6", "_raw_kernel_device": "/dev/dm-2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv6", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "25%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv7", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/test_vg3-lv7", "_raw_device": "/dev/mapper/test_vg3-lv7", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv7", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv8", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/test_vg3-lv8", "_raw_device": "/dev/mapper/test_vg3-lv8", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv8", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Wednesday 12 November 2025 14:44:33 -0500 (0:00:00.145) 0:00:35.372 **** ok: [managed-node2] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg1-lv1", "_kernel_device": "/dev/dm-7", "_mount_id": "/dev/mapper/test_vg1-lv1", "_raw_device": "/dev/mapper/test_vg1-lv1", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "15%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg1-lv2", "_kernel_device": "/dev/dm-6", "_mount_id": "/dev/mapper/test_vg1-lv2", "_raw_device": "/dev/mapper/test_vg1-lv2", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "50%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg2", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg2-lv3", "_kernel_device": "/dev/dm-5", "_mount_id": "/dev/mapper/test_vg2-lv3", "_raw_device": "/dev/mapper/test_vg2-lv3", "_raw_kernel_device": "/dev/dm-5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv3", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg2-lv4", "_kernel_device": "/dev/dm-4", "_mount_id": "/dev/mapper/test_vg2-lv4", "_raw_device": "/dev/mapper/test_vg2-lv4", "_raw_kernel_device": "/dev/dm-4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv4", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "20%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg3", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg3-lv5", "_kernel_device": "/dev/dm-3", "_mount_id": "/dev/mapper/test_vg3-lv5", "_raw_device": "/dev/mapper/test_vg3-lv5", "_raw_kernel_device": "/dev/dm-3", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv5", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "30%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv6", "_kernel_device": "/dev/dm-2", "_mount_id": "/dev/mapper/test_vg3-lv6", "_raw_device": "/dev/mapper/test_vg3-lv6", "_raw_kernel_device": "/dev/dm-2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv6", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "25%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv7", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/test_vg3-lv7", "_raw_device": "/dev/mapper/test_vg3-lv7", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv7", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv8", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/test_vg3-lv8", "_raw_device": "/dev/mapper/test_vg3-lv8", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "lv8", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Wednesday 12 November 2025 14:44:33 -0500 (0:00:00.214) 0:00:35.587 **** ok: [managed-node2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Wednesday 12 November 2025 14:44:33 -0500 (0:00:00.118) 0:00:35.706 **** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Wednesday 12 November 2025 14:44:33 -0500 (0:00:00.109) 0:00:35.815 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Wednesday 12 November 2025 14:44:33 -0500 (0:00:00.104) 0:00:35.919 **** TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Wednesday 12 November 2025 14:44:33 -0500 (0:00:00.111) 0:00:36.031 **** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Wednesday 12 November 2025 14:44:33 -0500 (0:00:00.068) 0:00:36.100 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Wednesday 12 November 2025 14:44:33 -0500 (0:00:00.065) 0:00:36.166 **** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1762976469.2355814, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1718879272.062, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131079, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1718879026.308, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072852913879", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Wednesday 12 November 2025 14:44:34 -0500 (0:00:00.397) 0:00:36.564 **** TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Wednesday 12 November 2025 14:44:34 -0500 (0:00:00.111) 0:00:36.675 **** ok: [managed-node2] TASK [Run the snapshot role to create snapshot LVs] **************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:49 Wednesday 12 November 2025 14:44:35 -0500 (0:00:00.827) 0:00:37.502 **** TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3 Wednesday 12 November 2025 14:44:35 -0500 (0:00:00.145) 0:00:37.648 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2 Wednesday 12 November 2025 14:44:35 -0500 (0:00:00.102) 0:00:37.751 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] ********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10 Wednesday 12 November 2025 14:44:35 -0500 (0:00:00.097) 0:00:37.848 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15 Wednesday 12 November 2025 14:44:35 -0500 (0:00:00.077) 0:00:37.925 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19 Wednesday 12 November 2025 14:44:35 -0500 (0:00:00.077) 0:00:38.003 **** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_7.yml) => { "ansible_facts": { "__snapshot_python": "/usr/bin/python" }, "ansible_included_var_files": [ "/tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node2] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Enable copr if requested] *********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 Wednesday 12 November 2025 14:44:35 -0500 (0:00:00.137) 0:00:38.140 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Wednesday 12 November 2025 14:44:35 -0500 (0:00:00.064) 0:00:38.205 **** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "7:lvm2-2.02.187-6.el7_9.5.x86_64 providing lvm2 is already installed", "util-linux-2.23.2-65.el7_9.1.x86_64 providing util-linux is already installed" ] } TASK [fedora.linux_system_roles.snapshot : Get snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:16 Wednesday 12 November 2025 14:44:36 -0500 (0:00:00.777) 0:00:38.983 **** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": "snapm --version", "rc": 2 } MSG: [Errno 2] No such file or directory ...ignoring TASK [fedora.linux_system_roles.snapshot : Set snapm availability fact] ******** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:24 Wednesday 12 November 2025 14:44:37 -0500 (0:00:00.381) 0:00:39.364 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_snapm_available": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:28 Wednesday 12 November 2025 14:44:37 -0500 (0:00:00.112) 0:00:39.477 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set needs bootable support] ********* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:36 Wednesday 12 November 2025 14:44:37 -0500 (0:00:00.060) 0:00:39.538 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_needs_bootable_support": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Package snapm version must be 0.4 or later] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40 Wednesday 12 November 2025 14:44:37 -0500 (0:00:00.063) 0:00:39.601 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must available and be version 0.5 or later for bootable snapsets] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49 Wednesday 12 November 2025 14:44:37 -0500 (0:00:00.070) 0:00:39.672 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.snapshot : Run snapshot module snapshot] ******* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:69 Wednesday 12 November 2025 14:44:37 -0500 (0:00:00.098) 0:00:39.770 **** changed: [managed-node2] => { "changed": true, "errors": "", "message": "", "return_code": 0 } TASK [fedora.linux_system_roles.snapshot : Print out response] ***************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:96 Wednesday 12 November 2025 14:44:40 -0500 (0:00:03.333) 0:00:43.104 **** ok: [managed-node2] => { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } } TASK [fedora.linux_system_roles.snapshot : Set result] ************************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:102 Wednesday 12 November 2025 14:44:41 -0500 (0:00:00.169) 0:00:43.273 **** ok: [managed-node2] => { "ansible_facts": { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:107 Wednesday 12 November 2025 14:44:41 -0500 (0:00:00.155) 0:00:43.429 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Show errors] ************************ task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:112 Wednesday 12 November 2025 14:44:41 -0500 (0:00:00.091) 0:00:43.520 **** skipping: [managed-node2] => {} TASK [Verify the snapshot LVs are created] ************************************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:58 Wednesday 12 November 2025 14:44:41 -0500 (0:00:00.104) 0:00:43.625 **** TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3 Wednesday 12 November 2025 14:44:41 -0500 (0:00:00.276) 0:00:43.901 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2 Wednesday 12 November 2025 14:44:41 -0500 (0:00:00.162) 0:00:44.064 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] ********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10 Wednesday 12 November 2025 14:44:41 -0500 (0:00:00.050) 0:00:44.115 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15 Wednesday 12 November 2025 14:44:41 -0500 (0:00:00.050) 0:00:44.165 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19 Wednesday 12 November 2025 14:44:41 -0500 (0:00:00.056) 0:00:44.221 **** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_7.yml) => { "ansible_facts": { "__snapshot_python": "/usr/bin/python" }, "ansible_included_var_files": [ "/tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node2] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Enable copr if requested] *********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 Wednesday 12 November 2025 14:44:42 -0500 (0:00:00.161) 0:00:44.383 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Wednesday 12 November 2025 14:44:42 -0500 (0:00:00.135) 0:00:44.518 **** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "7:lvm2-2.02.187-6.el7_9.5.x86_64 providing lvm2 is already installed", "util-linux-2.23.2-65.el7_9.1.x86_64 providing util-linux is already installed" ] } TASK [fedora.linux_system_roles.snapshot : Get snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:16 Wednesday 12 November 2025 14:44:43 -0500 (0:00:00.794) 0:00:45.313 **** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": "snapm --version", "rc": 2 } MSG: [Errno 2] No such file or directory ...ignoring TASK [fedora.linux_system_roles.snapshot : Set snapm availability fact] ******** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:24 Wednesday 12 November 2025 14:44:43 -0500 (0:00:00.525) 0:00:45.838 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_snapm_available": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:28 Wednesday 12 November 2025 14:44:43 -0500 (0:00:00.086) 0:00:45.925 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set needs bootable support] ********* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:36 Wednesday 12 November 2025 14:44:43 -0500 (0:00:00.061) 0:00:45.986 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_needs_bootable_support": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Package snapm version must be 0.4 or later] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40 Wednesday 12 November 2025 14:44:43 -0500 (0:00:00.111) 0:00:46.098 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must available and be version 0.5 or later for bootable snapsets] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49 Wednesday 12 November 2025 14:44:43 -0500 (0:00:00.111) 0:00:46.210 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.snapshot : Run snapshot module check] ********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:69 Wednesday 12 November 2025 14:44:44 -0500 (0:00:00.152) 0:00:46.362 **** ok: [managed-node2] => { "changed": false, "errors": "", "message": "", "return_code": 0 } TASK [fedora.linux_system_roles.snapshot : Print out response] ***************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:96 Wednesday 12 November 2025 14:44:46 -0500 (0:00:01.904) 0:00:48.267 **** ok: [managed-node2] => { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } } TASK [fedora.linux_system_roles.snapshot : Set result] ************************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:102 Wednesday 12 November 2025 14:44:46 -0500 (0:00:00.109) 0:00:48.376 **** ok: [managed-node2] => { "ansible_facts": { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:107 Wednesday 12 November 2025 14:44:46 -0500 (0:00:00.115) 0:00:48.492 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Show errors] ************************ task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:112 Wednesday 12 November 2025 14:44:46 -0500 (0:00:00.078) 0:00:48.570 **** skipping: [managed-node2] => {} TASK [Mount the snapshot for lv1] ********************************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:67 Wednesday 12 November 2025 14:44:46 -0500 (0:00:00.058) 0:00:48.629 **** TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3 Wednesday 12 November 2025 14:44:46 -0500 (0:00:00.183) 0:00:48.813 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2 Wednesday 12 November 2025 14:44:46 -0500 (0:00:00.135) 0:00:48.948 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] ********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10 Wednesday 12 November 2025 14:44:46 -0500 (0:00:00.114) 0:00:49.062 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15 Wednesday 12 November 2025 14:44:47 -0500 (0:00:00.180) 0:00:49.243 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19 Wednesday 12 November 2025 14:44:47 -0500 (0:00:00.073) 0:00:49.316 **** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_7.yml) => { "ansible_facts": { "__snapshot_python": "/usr/bin/python" }, "ansible_included_var_files": [ "/tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node2] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Enable copr if requested] *********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 Wednesday 12 November 2025 14:44:47 -0500 (0:00:00.164) 0:00:49.481 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Wednesday 12 November 2025 14:44:47 -0500 (0:00:00.090) 0:00:49.571 **** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "7:lvm2-2.02.187-6.el7_9.5.x86_64 providing lvm2 is already installed", "util-linux-2.23.2-65.el7_9.1.x86_64 providing util-linux is already installed" ] } TASK [fedora.linux_system_roles.snapshot : Get snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:16 Wednesday 12 November 2025 14:44:48 -0500 (0:00:00.719) 0:00:50.290 **** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": "snapm --version", "rc": 2 } MSG: [Errno 2] No such file or directory ...ignoring TASK [fedora.linux_system_roles.snapshot : Set snapm availability fact] ******** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:24 Wednesday 12 November 2025 14:44:48 -0500 (0:00:00.628) 0:00:50.919 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_snapm_available": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:28 Wednesday 12 November 2025 14:44:48 -0500 (0:00:00.101) 0:00:51.021 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set needs bootable support] ********* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:36 Wednesday 12 November 2025 14:44:48 -0500 (0:00:00.107) 0:00:51.128 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_needs_bootable_support": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Package snapm version must be 0.4 or later] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40 Wednesday 12 November 2025 14:44:49 -0500 (0:00:00.159) 0:00:51.288 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must available and be version 0.5 or later for bootable snapsets] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49 Wednesday 12 November 2025 14:44:49 -0500 (0:00:00.127) 0:00:51.415 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.snapshot : Run snapshot module mount] ********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:69 Wednesday 12 November 2025 14:44:49 -0500 (0:00:00.124) 0:00:51.540 **** changed: [managed-node2] => { "changed": true, "errors": "", "message": "", "return_code": 0 } TASK [fedora.linux_system_roles.snapshot : Print out response] ***************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:96 Wednesday 12 November 2025 14:44:50 -0500 (0:00:00.707) 0:00:52.248 **** ok: [managed-node2] => { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } } TASK [fedora.linux_system_roles.snapshot : Set result] ************************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:102 Wednesday 12 November 2025 14:44:50 -0500 (0:00:00.077) 0:00:52.326 **** ok: [managed-node2] => { "ansible_facts": { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:107 Wednesday 12 November 2025 14:44:50 -0500 (0:00:00.072) 0:00:52.398 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Show errors] ************************ task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:112 Wednesday 12 November 2025 14:44:50 -0500 (0:00:00.085) 0:00:52.484 **** skipping: [managed-node2] => {} TASK [Assert changes for mount] ************************************************ task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:78 Wednesday 12 November 2025 14:44:50 -0500 (0:00:00.123) 0:00:52.608 **** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Mount the snapshot for lv2] ********************************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:82 Wednesday 12 November 2025 14:44:50 -0500 (0:00:00.099) 0:00:52.707 **** TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3 Wednesday 12 November 2025 14:44:50 -0500 (0:00:00.297) 0:00:53.005 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2 Wednesday 12 November 2025 14:44:50 -0500 (0:00:00.183) 0:00:53.189 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] ********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10 Wednesday 12 November 2025 14:44:51 -0500 (0:00:00.116) 0:00:53.306 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15 Wednesday 12 November 2025 14:44:51 -0500 (0:00:00.093) 0:00:53.399 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19 Wednesday 12 November 2025 14:44:51 -0500 (0:00:00.072) 0:00:53.472 **** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_7.yml) => { "ansible_facts": { "__snapshot_python": "/usr/bin/python" }, "ansible_included_var_files": [ "/tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node2] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Enable copr if requested] *********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 Wednesday 12 November 2025 14:44:51 -0500 (0:00:00.174) 0:00:53.647 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Wednesday 12 November 2025 14:44:51 -0500 (0:00:00.094) 0:00:53.741 **** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "7:lvm2-2.02.187-6.el7_9.5.x86_64 providing lvm2 is already installed", "util-linux-2.23.2-65.el7_9.1.x86_64 providing util-linux is already installed" ] } TASK [fedora.linux_system_roles.snapshot : Get snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:16 Wednesday 12 November 2025 14:44:52 -0500 (0:00:00.847) 0:00:54.589 **** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": "snapm --version", "rc": 2 } MSG: [Errno 2] No such file or directory ...ignoring TASK [fedora.linux_system_roles.snapshot : Set snapm availability fact] ******** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:24 Wednesday 12 November 2025 14:44:52 -0500 (0:00:00.361) 0:00:54.950 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_snapm_available": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:28 Wednesday 12 November 2025 14:44:52 -0500 (0:00:00.072) 0:00:55.023 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set needs bootable support] ********* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:36 Wednesday 12 November 2025 14:44:52 -0500 (0:00:00.084) 0:00:55.108 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_needs_bootable_support": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Package snapm version must be 0.4 or later] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40 Wednesday 12 November 2025 14:44:52 -0500 (0:00:00.094) 0:00:55.203 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must available and be version 0.5 or later for bootable snapsets] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49 Wednesday 12 November 2025 14:44:53 -0500 (0:00:00.059) 0:00:55.262 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.snapshot : Run snapshot module mount] ********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:69 Wednesday 12 November 2025 14:44:53 -0500 (0:00:00.107) 0:00:55.369 **** changed: [managed-node2] => { "changed": true, "errors": "", "message": "", "return_code": 0 } TASK [fedora.linux_system_roles.snapshot : Print out response] ***************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:96 Wednesday 12 November 2025 14:44:53 -0500 (0:00:00.675) 0:00:56.045 **** ok: [managed-node2] => { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } } TASK [fedora.linux_system_roles.snapshot : Set result] ************************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:102 Wednesday 12 November 2025 14:44:53 -0500 (0:00:00.148) 0:00:56.193 **** ok: [managed-node2] => { "ansible_facts": { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:107 Wednesday 12 November 2025 14:44:54 -0500 (0:00:00.090) 0:00:56.284 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Show errors] ************************ task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:112 Wednesday 12 November 2025 14:44:54 -0500 (0:00:00.062) 0:00:56.347 **** skipping: [managed-node2] => {} TASK [Mount the snapshot for lv7] ********************************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:93 Wednesday 12 November 2025 14:44:54 -0500 (0:00:00.055) 0:00:56.402 **** TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3 Wednesday 12 November 2025 14:44:54 -0500 (0:00:00.148) 0:00:56.551 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2 Wednesday 12 November 2025 14:44:54 -0500 (0:00:00.066) 0:00:56.617 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] ********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10 Wednesday 12 November 2025 14:44:54 -0500 (0:00:00.060) 0:00:56.678 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15 Wednesday 12 November 2025 14:44:54 -0500 (0:00:00.054) 0:00:56.732 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19 Wednesday 12 November 2025 14:44:54 -0500 (0:00:00.057) 0:00:56.790 **** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_7.yml) => { "ansible_facts": { "__snapshot_python": "/usr/bin/python" }, "ansible_included_var_files": [ "/tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node2] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Enable copr if requested] *********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 Wednesday 12 November 2025 14:44:54 -0500 (0:00:00.127) 0:00:56.917 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Wednesday 12 November 2025 14:44:54 -0500 (0:00:00.067) 0:00:56.985 **** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "7:lvm2-2.02.187-6.el7_9.5.x86_64 providing lvm2 is already installed", "util-linux-2.23.2-65.el7_9.1.x86_64 providing util-linux is already installed" ] } TASK [fedora.linux_system_roles.snapshot : Get snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:16 Wednesday 12 November 2025 14:44:55 -0500 (0:00:00.989) 0:00:57.975 **** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": "snapm --version", "rc": 2 } MSG: [Errno 2] No such file or directory ...ignoring TASK [fedora.linux_system_roles.snapshot : Set snapm availability fact] ******** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:24 Wednesday 12 November 2025 14:44:56 -0500 (0:00:00.439) 0:00:58.414 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_snapm_available": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:28 Wednesday 12 November 2025 14:44:56 -0500 (0:00:00.121) 0:00:58.536 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set needs bootable support] ********* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:36 Wednesday 12 November 2025 14:44:56 -0500 (0:00:00.206) 0:00:58.742 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_needs_bootable_support": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Package snapm version must be 0.4 or later] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40 Wednesday 12 November 2025 14:44:56 -0500 (0:00:00.148) 0:00:58.891 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must available and be version 0.5 or later for bootable snapsets] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49 Wednesday 12 November 2025 14:44:56 -0500 (0:00:00.091) 0:00:58.982 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.snapshot : Run snapshot module mount] ********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:69 Wednesday 12 November 2025 14:44:56 -0500 (0:00:00.122) 0:00:59.105 **** changed: [managed-node2] => { "changed": true, "errors": "", "message": "", "return_code": 0 } TASK [fedora.linux_system_roles.snapshot : Print out response] ***************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:96 Wednesday 12 November 2025 14:44:57 -0500 (0:00:00.707) 0:00:59.812 **** ok: [managed-node2] => { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } } TASK [fedora.linux_system_roles.snapshot : Set result] ************************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:102 Wednesday 12 November 2025 14:44:57 -0500 (0:00:00.102) 0:00:59.914 **** ok: [managed-node2] => { "ansible_facts": { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:107 Wednesday 12 November 2025 14:44:57 -0500 (0:00:00.080) 0:00:59.994 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Show errors] ************************ task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:112 Wednesday 12 November 2025 14:44:57 -0500 (0:00:00.070) 0:01:00.065 **** skipping: [managed-node2] => {} TASK [Mount the origin for lv6] ************************************************ task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:104 Wednesday 12 November 2025 14:44:57 -0500 (0:00:00.067) 0:01:00.133 **** TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3 Wednesday 12 November 2025 14:44:58 -0500 (0:00:00.247) 0:01:00.380 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2 Wednesday 12 November 2025 14:44:58 -0500 (0:00:00.071) 0:01:00.452 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] ********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10 Wednesday 12 November 2025 14:44:58 -0500 (0:00:00.056) 0:01:00.508 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15 Wednesday 12 November 2025 14:44:58 -0500 (0:00:00.040) 0:01:00.549 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19 Wednesday 12 November 2025 14:44:58 -0500 (0:00:00.051) 0:01:00.600 **** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_7.yml) => { "ansible_facts": { "__snapshot_python": "/usr/bin/python" }, "ansible_included_var_files": [ "/tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node2] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Enable copr if requested] *********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 Wednesday 12 November 2025 14:44:58 -0500 (0:00:00.136) 0:01:00.737 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Wednesday 12 November 2025 14:44:58 -0500 (0:00:00.100) 0:01:00.837 **** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "7:lvm2-2.02.187-6.el7_9.5.x86_64 providing lvm2 is already installed", "util-linux-2.23.2-65.el7_9.1.x86_64 providing util-linux is already installed" ] } TASK [fedora.linux_system_roles.snapshot : Get snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:16 Wednesday 12 November 2025 14:44:59 -0500 (0:00:01.025) 0:01:01.862 **** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": "snapm --version", "rc": 2 } MSG: [Errno 2] No such file or directory ...ignoring TASK [fedora.linux_system_roles.snapshot : Set snapm availability fact] ******** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:24 Wednesday 12 November 2025 14:45:00 -0500 (0:00:00.409) 0:01:02.271 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_snapm_available": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:28 Wednesday 12 November 2025 14:45:00 -0500 (0:00:00.074) 0:01:02.346 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set needs bootable support] ********* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:36 Wednesday 12 November 2025 14:45:00 -0500 (0:00:00.057) 0:01:02.404 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_needs_bootable_support": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Package snapm version must be 0.4 or later] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40 Wednesday 12 November 2025 14:45:00 -0500 (0:00:00.088) 0:01:02.492 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must available and be version 0.5 or later for bootable snapsets] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49 Wednesday 12 November 2025 14:45:00 -0500 (0:00:00.087) 0:01:02.580 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.snapshot : Run snapshot module mount] ********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:69 Wednesday 12 November 2025 14:45:00 -0500 (0:00:00.137) 0:01:02.718 **** changed: [managed-node2] => { "changed": true, "errors": "", "message": "", "return_code": 0 } TASK [fedora.linux_system_roles.snapshot : Print out response] ***************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:96 Wednesday 12 November 2025 14:45:01 -0500 (0:00:00.630) 0:01:03.348 **** ok: [managed-node2] => { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } } TASK [fedora.linux_system_roles.snapshot : Set result] ************************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:102 Wednesday 12 November 2025 14:45:01 -0500 (0:00:00.075) 0:01:03.424 **** ok: [managed-node2] => { "ansible_facts": { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:107 Wednesday 12 November 2025 14:45:01 -0500 (0:00:00.102) 0:01:03.526 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Show errors] ************************ task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:112 Wednesday 12 November 2025 14:45:01 -0500 (0:00:00.071) 0:01:03.598 **** skipping: [managed-node2] => {} TASK [Mount the snapshot for lv1 again for idempotence] ************************ task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:116 Wednesday 12 November 2025 14:45:01 -0500 (0:00:00.111) 0:01:03.709 **** TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3 Wednesday 12 November 2025 14:45:01 -0500 (0:00:00.354) 0:01:04.064 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2 Wednesday 12 November 2025 14:45:01 -0500 (0:00:00.120) 0:01:04.184 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] ********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10 Wednesday 12 November 2025 14:45:02 -0500 (0:00:00.069) 0:01:04.254 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15 Wednesday 12 November 2025 14:45:02 -0500 (0:00:00.052) 0:01:04.307 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19 Wednesday 12 November 2025 14:45:02 -0500 (0:00:00.093) 0:01:04.400 **** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_7.yml) => { "ansible_facts": { "__snapshot_python": "/usr/bin/python" }, "ansible_included_var_files": [ "/tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node2] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Enable copr if requested] *********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 Wednesday 12 November 2025 14:45:02 -0500 (0:00:00.092) 0:01:04.492 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Wednesday 12 November 2025 14:45:02 -0500 (0:00:00.045) 0:01:04.538 **** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "7:lvm2-2.02.187-6.el7_9.5.x86_64 providing lvm2 is already installed", "util-linux-2.23.2-65.el7_9.1.x86_64 providing util-linux is already installed" ] } TASK [fedora.linux_system_roles.snapshot : Get snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:16 Wednesday 12 November 2025 14:45:02 -0500 (0:00:00.694) 0:01:05.233 **** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": "snapm --version", "rc": 2 } MSG: [Errno 2] No such file or directory ...ignoring TASK [fedora.linux_system_roles.snapshot : Set snapm availability fact] ******** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:24 Wednesday 12 November 2025 14:45:03 -0500 (0:00:00.451) 0:01:05.684 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_snapm_available": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:28 Wednesday 12 November 2025 14:45:03 -0500 (0:00:00.081) 0:01:05.766 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set needs bootable support] ********* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:36 Wednesday 12 November 2025 14:45:03 -0500 (0:00:00.051) 0:01:05.818 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_needs_bootable_support": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Package snapm version must be 0.4 or later] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40 Wednesday 12 November 2025 14:45:03 -0500 (0:00:00.140) 0:01:05.959 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must available and be version 0.5 or later for bootable snapsets] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49 Wednesday 12 November 2025 14:45:03 -0500 (0:00:00.143) 0:01:06.102 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.snapshot : Run snapshot module mount] ********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:69 Wednesday 12 November 2025 14:45:04 -0500 (0:00:00.268) 0:01:06.370 **** ok: [managed-node2] => { "changed": false, "errors": "", "message": "", "return_code": 0 } TASK [fedora.linux_system_roles.snapshot : Print out response] ***************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:96 Wednesday 12 November 2025 14:45:05 -0500 (0:00:00.871) 0:01:07.242 **** ok: [managed-node2] => { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } } TASK [fedora.linux_system_roles.snapshot : Set result] ************************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:102 Wednesday 12 November 2025 14:45:05 -0500 (0:00:00.072) 0:01:07.315 **** ok: [managed-node2] => { "ansible_facts": { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:107 Wednesday 12 November 2025 14:45:05 -0500 (0:00:00.171) 0:01:07.486 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Show errors] ************************ task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:112 Wednesday 12 November 2025 14:45:05 -0500 (0:00:00.097) 0:01:07.584 **** skipping: [managed-node2] => {} TASK [Assert no changes for mount] ********************************************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:127 Wednesday 12 November 2025 14:45:05 -0500 (0:00:00.087) 0:01:07.672 **** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Umount the snapshot for lv1] ********************************************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:131 Wednesday 12 November 2025 14:45:05 -0500 (0:00:00.113) 0:01:07.786 **** TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3 Wednesday 12 November 2025 14:45:05 -0500 (0:00:00.330) 0:01:08.117 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2 Wednesday 12 November 2025 14:45:06 -0500 (0:00:00.119) 0:01:08.237 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] ********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10 Wednesday 12 November 2025 14:45:06 -0500 (0:00:00.142) 0:01:08.379 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15 Wednesday 12 November 2025 14:45:06 -0500 (0:00:00.155) 0:01:08.534 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19 Wednesday 12 November 2025 14:45:06 -0500 (0:00:00.129) 0:01:08.664 **** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_7.yml) => { "ansible_facts": { "__snapshot_python": "/usr/bin/python" }, "ansible_included_var_files": [ "/tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node2] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Enable copr if requested] *********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 Wednesday 12 November 2025 14:45:06 -0500 (0:00:00.171) 0:01:08.836 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Wednesday 12 November 2025 14:45:06 -0500 (0:00:00.075) 0:01:08.911 **** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "7:lvm2-2.02.187-6.el7_9.5.x86_64 providing lvm2 is already installed", "util-linux-2.23.2-65.el7_9.1.x86_64 providing util-linux is already installed" ] } TASK [fedora.linux_system_roles.snapshot : Get snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:16 Wednesday 12 November 2025 14:45:07 -0500 (0:00:00.906) 0:01:09.817 **** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": "snapm --version", "rc": 2 } MSG: [Errno 2] No such file or directory ...ignoring TASK [fedora.linux_system_roles.snapshot : Set snapm availability fact] ******** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:24 Wednesday 12 November 2025 14:45:08 -0500 (0:00:00.472) 0:01:10.290 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_snapm_available": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:28 Wednesday 12 November 2025 14:45:08 -0500 (0:00:00.151) 0:01:10.442 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set needs bootable support] ********* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:36 Wednesday 12 November 2025 14:45:08 -0500 (0:00:00.122) 0:01:10.564 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_needs_bootable_support": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Package snapm version must be 0.4 or later] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40 Wednesday 12 November 2025 14:45:08 -0500 (0:00:00.067) 0:01:10.631 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must available and be version 0.5 or later for bootable snapsets] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49 Wednesday 12 November 2025 14:45:08 -0500 (0:00:00.119) 0:01:10.751 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.snapshot : Run snapshot module umount] ********* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:69 Wednesday 12 November 2025 14:45:08 -0500 (0:00:00.184) 0:01:10.936 **** changed: [managed-node2] => { "changed": true, "errors": "", "message": "", "return_code": 0 } TASK [fedora.linux_system_roles.snapshot : Print out response] ***************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:96 Wednesday 12 November 2025 14:45:09 -0500 (0:00:01.064) 0:01:12.001 **** ok: [managed-node2] => { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } } TASK [fedora.linux_system_roles.snapshot : Set result] ************************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:102 Wednesday 12 November 2025 14:45:09 -0500 (0:00:00.138) 0:01:12.139 **** ok: [managed-node2] => { "ansible_facts": { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:107 Wednesday 12 November 2025 14:45:10 -0500 (0:00:00.105) 0:01:12.245 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Show errors] ************************ task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:112 Wednesday 12 November 2025 14:45:10 -0500 (0:00:00.081) 0:01:12.326 **** skipping: [managed-node2] => {} TASK [Assert changes for umount] *********************************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:141 Wednesday 12 November 2025 14:45:10 -0500 (0:00:00.068) 0:01:12.395 **** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Umount again to check idempotence] *************************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:145 Wednesday 12 November 2025 14:45:10 -0500 (0:00:00.056) 0:01:12.452 **** TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3 Wednesday 12 November 2025 14:45:10 -0500 (0:00:00.285) 0:01:12.738 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2 Wednesday 12 November 2025 14:45:10 -0500 (0:00:00.104) 0:01:12.842 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] ********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10 Wednesday 12 November 2025 14:45:10 -0500 (0:00:00.117) 0:01:12.959 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15 Wednesday 12 November 2025 14:45:10 -0500 (0:00:00.115) 0:01:13.075 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19 Wednesday 12 November 2025 14:45:10 -0500 (0:00:00.093) 0:01:13.169 **** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_7.yml) => { "ansible_facts": { "__snapshot_python": "/usr/bin/python" }, "ansible_included_var_files": [ "/tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node2] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Enable copr if requested] *********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 Wednesday 12 November 2025 14:45:11 -0500 (0:00:00.153) 0:01:13.323 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Wednesday 12 November 2025 14:45:11 -0500 (0:00:00.066) 0:01:13.389 **** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "7:lvm2-2.02.187-6.el7_9.5.x86_64 providing lvm2 is already installed", "util-linux-2.23.2-65.el7_9.1.x86_64 providing util-linux is already installed" ] } TASK [fedora.linux_system_roles.snapshot : Get snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:16 Wednesday 12 November 2025 14:45:12 -0500 (0:00:00.914) 0:01:14.304 **** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": "snapm --version", "rc": 2 } MSG: [Errno 2] No such file or directory ...ignoring TASK [fedora.linux_system_roles.snapshot : Set snapm availability fact] ******** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:24 Wednesday 12 November 2025 14:45:12 -0500 (0:00:00.368) 0:01:14.672 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_snapm_available": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:28 Wednesday 12 November 2025 14:45:12 -0500 (0:00:00.095) 0:01:14.768 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set needs bootable support] ********* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:36 Wednesday 12 November 2025 14:45:12 -0500 (0:00:00.107) 0:01:14.876 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_needs_bootable_support": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Package snapm version must be 0.4 or later] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40 Wednesday 12 November 2025 14:45:12 -0500 (0:00:00.118) 0:01:14.994 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must available and be version 0.5 or later for bootable snapsets] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49 Wednesday 12 November 2025 14:45:12 -0500 (0:00:00.090) 0:01:15.085 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.snapshot : Run snapshot module umount] ********* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:69 Wednesday 12 November 2025 14:45:13 -0500 (0:00:00.293) 0:01:15.378 **** ok: [managed-node2] => { "changed": false, "errors": "", "message": "", "return_code": 0 } TASK [fedora.linux_system_roles.snapshot : Print out response] ***************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:96 Wednesday 12 November 2025 14:45:14 -0500 (0:00:00.976) 0:01:16.355 **** ok: [managed-node2] => { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } } TASK [fedora.linux_system_roles.snapshot : Set result] ************************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:102 Wednesday 12 November 2025 14:45:14 -0500 (0:00:00.118) 0:01:16.474 **** ok: [managed-node2] => { "ansible_facts": { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:107 Wednesday 12 November 2025 14:45:14 -0500 (0:00:00.200) 0:01:16.674 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Show errors] ************************ task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:112 Wednesday 12 November 2025 14:45:14 -0500 (0:00:00.144) 0:01:16.819 **** skipping: [managed-node2] => {} TASK [Assert no changes for umount] ******************************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:155 Wednesday 12 November 2025 14:45:14 -0500 (0:00:00.141) 0:01:16.961 **** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Umount the snapshot for lv2] ********************************************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:159 Wednesday 12 November 2025 14:45:14 -0500 (0:00:00.145) 0:01:17.106 **** TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3 Wednesday 12 November 2025 14:45:15 -0500 (0:00:00.534) 0:01:17.641 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2 Wednesday 12 November 2025 14:45:15 -0500 (0:00:00.131) 0:01:17.772 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] ********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10 Wednesday 12 November 2025 14:45:15 -0500 (0:00:00.067) 0:01:17.840 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15 Wednesday 12 November 2025 14:45:15 -0500 (0:00:00.045) 0:01:17.885 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19 Wednesday 12 November 2025 14:45:15 -0500 (0:00:00.038) 0:01:17.923 **** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_7.yml) => { "ansible_facts": { "__snapshot_python": "/usr/bin/python" }, "ansible_included_var_files": [ "/tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node2] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Enable copr if requested] *********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 Wednesday 12 November 2025 14:45:15 -0500 (0:00:00.188) 0:01:18.112 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Wednesday 12 November 2025 14:45:15 -0500 (0:00:00.058) 0:01:18.170 **** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "7:lvm2-2.02.187-6.el7_9.5.x86_64 providing lvm2 is already installed", "util-linux-2.23.2-65.el7_9.1.x86_64 providing util-linux is already installed" ] } TASK [fedora.linux_system_roles.snapshot : Get snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:16 Wednesday 12 November 2025 14:45:16 -0500 (0:00:00.960) 0:01:19.131 **** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": "snapm --version", "rc": 2 } MSG: [Errno 2] No such file or directory ...ignoring TASK [fedora.linux_system_roles.snapshot : Set snapm availability fact] ******** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:24 Wednesday 12 November 2025 14:45:17 -0500 (0:00:00.388) 0:01:19.519 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_snapm_available": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:28 Wednesday 12 November 2025 14:45:17 -0500 (0:00:00.067) 0:01:19.586 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set needs bootable support] ********* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:36 Wednesday 12 November 2025 14:45:17 -0500 (0:00:00.051) 0:01:19.638 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_needs_bootable_support": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Package snapm version must be 0.4 or later] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40 Wednesday 12 November 2025 14:45:17 -0500 (0:00:00.064) 0:01:19.703 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must available and be version 0.5 or later for bootable snapsets] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49 Wednesday 12 November 2025 14:45:17 -0500 (0:00:00.057) 0:01:19.761 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.snapshot : Run snapshot module umount] ********* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:69 Wednesday 12 November 2025 14:45:17 -0500 (0:00:00.102) 0:01:19.863 **** changed: [managed-node2] => { "changed": true, "errors": "", "message": "", "return_code": 0 } TASK [fedora.linux_system_roles.snapshot : Print out response] ***************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:96 Wednesday 12 November 2025 14:45:18 -0500 (0:00:00.627) 0:01:20.491 **** ok: [managed-node2] => { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } } TASK [fedora.linux_system_roles.snapshot : Set result] ************************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:102 Wednesday 12 November 2025 14:45:18 -0500 (0:00:00.120) 0:01:20.611 **** ok: [managed-node2] => { "ansible_facts": { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:107 Wednesday 12 November 2025 14:45:18 -0500 (0:00:00.073) 0:01:20.685 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Show errors] ************************ task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:112 Wednesday 12 November 2025 14:45:18 -0500 (0:00:00.066) 0:01:20.751 **** skipping: [managed-node2] => {} TASK [Umount the snapshot for lv7] ********************************************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:169 Wednesday 12 November 2025 14:45:18 -0500 (0:00:00.105) 0:01:20.856 **** TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3 Wednesday 12 November 2025 14:45:18 -0500 (0:00:00.351) 0:01:21.208 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2 Wednesday 12 November 2025 14:45:19 -0500 (0:00:00.121) 0:01:21.329 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] ********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10 Wednesday 12 November 2025 14:45:19 -0500 (0:00:00.080) 0:01:21.409 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15 Wednesday 12 November 2025 14:45:19 -0500 (0:00:00.071) 0:01:21.481 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19 Wednesday 12 November 2025 14:45:19 -0500 (0:00:00.054) 0:01:21.535 **** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_7.yml) => { "ansible_facts": { "__snapshot_python": "/usr/bin/python" }, "ansible_included_var_files": [ "/tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node2] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Enable copr if requested] *********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 Wednesday 12 November 2025 14:45:19 -0500 (0:00:00.139) 0:01:21.675 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Wednesday 12 November 2025 14:45:19 -0500 (0:00:00.063) 0:01:21.738 **** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "7:lvm2-2.02.187-6.el7_9.5.x86_64 providing lvm2 is already installed", "util-linux-2.23.2-65.el7_9.1.x86_64 providing util-linux is already installed" ] } TASK [fedora.linux_system_roles.snapshot : Get snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:16 Wednesday 12 November 2025 14:45:20 -0500 (0:00:00.726) 0:01:22.465 **** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": "snapm --version", "rc": 2 } MSG: [Errno 2] No such file or directory ...ignoring TASK [fedora.linux_system_roles.snapshot : Set snapm availability fact] ******** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:24 Wednesday 12 November 2025 14:45:20 -0500 (0:00:00.389) 0:01:22.854 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_snapm_available": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:28 Wednesday 12 November 2025 14:45:20 -0500 (0:00:00.073) 0:01:22.928 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set needs bootable support] ********* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:36 Wednesday 12 November 2025 14:45:20 -0500 (0:00:00.056) 0:01:22.985 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_needs_bootable_support": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Package snapm version must be 0.4 or later] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40 Wednesday 12 November 2025 14:45:20 -0500 (0:00:00.120) 0:01:23.105 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must available and be version 0.5 or later for bootable snapsets] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49 Wednesday 12 November 2025 14:45:20 -0500 (0:00:00.084) 0:01:23.190 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.snapshot : Run snapshot module umount] ********* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:69 Wednesday 12 November 2025 14:45:21 -0500 (0:00:00.097) 0:01:23.288 **** changed: [managed-node2] => { "changed": true, "errors": "", "message": "", "return_code": 0 } TASK [fedora.linux_system_roles.snapshot : Print out response] ***************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:96 Wednesday 12 November 2025 14:45:21 -0500 (0:00:00.585) 0:01:23.873 **** ok: [managed-node2] => { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } } TASK [fedora.linux_system_roles.snapshot : Set result] ************************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:102 Wednesday 12 November 2025 14:45:21 -0500 (0:00:00.046) 0:01:23.920 **** ok: [managed-node2] => { "ansible_facts": { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:107 Wednesday 12 November 2025 14:45:21 -0500 (0:00:00.092) 0:01:24.013 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Show errors] ************************ task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:112 Wednesday 12 November 2025 14:45:21 -0500 (0:00:00.042) 0:01:24.055 **** skipping: [managed-node2] => {} TASK [Umount the origin for lv6] *********************************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:179 Wednesday 12 November 2025 14:45:21 -0500 (0:00:00.045) 0:01:24.100 **** TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3 Wednesday 12 November 2025 14:45:22 -0500 (0:00:00.311) 0:01:24.412 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2 Wednesday 12 November 2025 14:45:22 -0500 (0:00:00.068) 0:01:24.481 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] ********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10 Wednesday 12 November 2025 14:45:22 -0500 (0:00:00.050) 0:01:24.531 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15 Wednesday 12 November 2025 14:45:22 -0500 (0:00:00.042) 0:01:24.573 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19 Wednesday 12 November 2025 14:45:22 -0500 (0:00:00.041) 0:01:24.615 **** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_7.yml) => { "ansible_facts": { "__snapshot_python": "/usr/bin/python" }, "ansible_included_var_files": [ "/tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node2] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Enable copr if requested] *********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 Wednesday 12 November 2025 14:45:22 -0500 (0:00:00.091) 0:01:24.707 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Wednesday 12 November 2025 14:45:22 -0500 (0:00:00.046) 0:01:24.753 **** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "7:lvm2-2.02.187-6.el7_9.5.x86_64 providing lvm2 is already installed", "util-linux-2.23.2-65.el7_9.1.x86_64 providing util-linux is already installed" ] } TASK [fedora.linux_system_roles.snapshot : Get snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:16 Wednesday 12 November 2025 14:45:23 -0500 (0:00:00.993) 0:01:25.747 **** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": "snapm --version", "rc": 2 } MSG: [Errno 2] No such file or directory ...ignoring TASK [fedora.linux_system_roles.snapshot : Set snapm availability fact] ******** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:24 Wednesday 12 November 2025 14:45:24 -0500 (0:00:00.730) 0:01:26.477 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_snapm_available": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:28 Wednesday 12 November 2025 14:45:24 -0500 (0:00:00.091) 0:01:26.569 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set needs bootable support] ********* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:36 Wednesday 12 November 2025 14:45:24 -0500 (0:00:00.053) 0:01:26.623 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_needs_bootable_support": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Package snapm version must be 0.4 or later] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40 Wednesday 12 November 2025 14:45:24 -0500 (0:00:00.072) 0:01:26.695 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must available and be version 0.5 or later for bootable snapsets] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49 Wednesday 12 November 2025 14:45:24 -0500 (0:00:00.063) 0:01:26.759 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.snapshot : Run snapshot module umount] ********* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:69 Wednesday 12 November 2025 14:45:24 -0500 (0:00:00.105) 0:01:26.865 **** changed: [managed-node2] => { "changed": true, "errors": "", "message": "", "return_code": 0 } TASK [fedora.linux_system_roles.snapshot : Print out response] ***************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:96 Wednesday 12 November 2025 14:45:25 -0500 (0:00:00.526) 0:01:27.392 **** ok: [managed-node2] => { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } } TASK [fedora.linux_system_roles.snapshot : Set result] ************************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:102 Wednesday 12 November 2025 14:45:25 -0500 (0:00:00.048) 0:01:27.440 **** ok: [managed-node2] => { "ansible_facts": { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:107 Wednesday 12 November 2025 14:45:25 -0500 (0:00:00.051) 0:01:27.491 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Show errors] ************************ task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:112 Wednesday 12 November 2025 14:45:25 -0500 (0:00:00.057) 0:01:27.549 **** skipping: [managed-node2] => {} TASK [Run the snapshot role remove the snapshot LVs] *************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:189 Wednesday 12 November 2025 14:45:25 -0500 (0:00:00.119) 0:01:27.669 **** TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3 Wednesday 12 November 2025 14:45:25 -0500 (0:00:00.418) 0:01:28.088 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2 Wednesday 12 November 2025 14:45:25 -0500 (0:00:00.086) 0:01:28.174 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] ********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10 Wednesday 12 November 2025 14:45:25 -0500 (0:00:00.059) 0:01:28.233 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15 Wednesday 12 November 2025 14:45:26 -0500 (0:00:00.053) 0:01:28.287 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19 Wednesday 12 November 2025 14:45:26 -0500 (0:00:00.052) 0:01:28.340 **** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_7.yml) => { "ansible_facts": { "__snapshot_python": "/usr/bin/python" }, "ansible_included_var_files": [ "/tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node2] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Enable copr if requested] *********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 Wednesday 12 November 2025 14:45:26 -0500 (0:00:00.131) 0:01:28.471 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Wednesday 12 November 2025 14:45:26 -0500 (0:00:00.129) 0:01:28.601 **** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "7:lvm2-2.02.187-6.el7_9.5.x86_64 providing lvm2 is already installed", "util-linux-2.23.2-65.el7_9.1.x86_64 providing util-linux is already installed" ] } TASK [fedora.linux_system_roles.snapshot : Get snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:16 Wednesday 12 November 2025 14:45:27 -0500 (0:00:00.674) 0:01:29.275 **** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": "snapm --version", "rc": 2 } MSG: [Errno 2] No such file or directory ...ignoring TASK [fedora.linux_system_roles.snapshot : Set snapm availability fact] ******** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:24 Wednesday 12 November 2025 14:45:27 -0500 (0:00:00.300) 0:01:29.576 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_snapm_available": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:28 Wednesday 12 November 2025 14:45:27 -0500 (0:00:00.061) 0:01:29.637 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set needs bootable support] ********* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:36 Wednesday 12 November 2025 14:45:27 -0500 (0:00:00.058) 0:01:29.696 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_needs_bootable_support": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Package snapm version must be 0.4 or later] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40 Wednesday 12 November 2025 14:45:27 -0500 (0:00:00.115) 0:01:29.811 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must available and be version 0.5 or later for bootable snapsets] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49 Wednesday 12 November 2025 14:45:27 -0500 (0:00:00.077) 0:01:29.889 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.snapshot : Run snapshot module remove] ********* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:69 Wednesday 12 November 2025 14:45:27 -0500 (0:00:00.107) 0:01:29.996 **** changed: [managed-node2] => { "changed": true, "errors": "", "message": "", "return_code": 0 } TASK [fedora.linux_system_roles.snapshot : Print out response] ***************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:96 Wednesday 12 November 2025 14:45:30 -0500 (0:00:02.860) 0:01:32.856 **** ok: [managed-node2] => { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } } TASK [fedora.linux_system_roles.snapshot : Set result] ************************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:102 Wednesday 12 November 2025 14:45:30 -0500 (0:00:00.099) 0:01:32.956 **** ok: [managed-node2] => { "ansible_facts": { "snapshot_cmd": { "changed": true, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:107 Wednesday 12 November 2025 14:45:30 -0500 (0:00:00.126) 0:01:33.082 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Show errors] ************************ task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:112 Wednesday 12 November 2025 14:45:30 -0500 (0:00:00.049) 0:01:33.132 **** skipping: [managed-node2] => {} TASK [Use the snapshot_lvm_verify option to make sure remove is done] ********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:196 Wednesday 12 November 2025 14:45:30 -0500 (0:00:00.058) 0:01:33.190 **** TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:3 Wednesday 12 November 2025 14:45:31 -0500 (0:00:00.350) 0:01:33.541 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.snapshot : Ensure ansible_facts used by role] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:2 Wednesday 12 November 2025 14:45:31 -0500 (0:00:00.071) 0:01:33.612 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Check if system is ostree] ********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:10 Wednesday 12 November 2025 14:45:31 -0500 (0:00:00.052) 0:01:33.664 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set flag to indicate system is ostree] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:15 Wednesday 12 November 2025 14:45:31 -0500 (0:00:00.040) 0:01:33.705 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/set_vars.yml:19 Wednesday 12 November 2025 14:45:31 -0500 (0:00:00.040) 0:01:33.746 **** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_7.yml) => { "ansible_facts": { "__snapshot_python": "/usr/bin/python" }, "ansible_included_var_files": [ "/tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node2] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Enable copr if requested] *********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:6 Wednesday 12 November 2025 14:45:31 -0500 (0:00:00.093) 0:01:33.839 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Ensure required packages are installed] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Wednesday 12 November 2025 14:45:31 -0500 (0:00:00.038) 0:01:33.878 **** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "7:lvm2-2.02.187-6.el7_9.5.x86_64 providing lvm2 is already installed", "util-linux-2.23.2-65.el7_9.1.x86_64 providing util-linux is already installed" ] } TASK [fedora.linux_system_roles.snapshot : Get snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:16 Wednesday 12 November 2025 14:45:32 -0500 (0:00:00.909) 0:01:34.787 **** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": "snapm --version", "rc": 2 } MSG: [Errno 2] No such file or directory ...ignoring TASK [fedora.linux_system_roles.snapshot : Set snapm availability fact] ******** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:24 Wednesday 12 November 2025 14:45:32 -0500 (0:00:00.384) 0:01:35.172 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_snapm_available": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapm version] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:28 Wednesday 12 November 2025 14:45:33 -0500 (0:00:00.066) 0:01:35.239 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Set needs bootable support] ********* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:36 Wednesday 12 November 2025 14:45:33 -0500 (0:00:00.204) 0:01:35.444 **** ok: [managed-node2] => { "ansible_facts": { "__snapshot_needs_bootable_support": false }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Package snapm version must be 0.4 or later] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:40 Wednesday 12 November 2025 14:45:33 -0500 (0:00:00.074) 0:01:35.519 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Package snapm must available and be version 0.5 or later for bootable snapsets] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:49 Wednesday 12 November 2025 14:45:33 -0500 (0:00:00.083) 0:01:35.602 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.snapshot : Run snapshot module remove] ********* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:69 Wednesday 12 November 2025 14:45:33 -0500 (0:00:00.107) 0:01:35.710 **** ok: [managed-node2] => { "changed": false, "errors": "", "message": "", "return_code": 0 } TASK [fedora.linux_system_roles.snapshot : Print out response] ***************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:96 Wednesday 12 November 2025 14:45:34 -0500 (0:00:01.127) 0:01:36.837 **** ok: [managed-node2] => { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } } TASK [fedora.linux_system_roles.snapshot : Set result] ************************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:102 Wednesday 12 November 2025 14:45:34 -0500 (0:00:00.088) 0:01:36.926 **** ok: [managed-node2] => { "ansible_facts": { "snapshot_cmd": { "changed": false, "errors": "", "failed": false, "message": "", "msg": "", "return_code": 0 } }, "changed": false } TASK [fedora.linux_system_roles.snapshot : Set snapshot_facts to the JSON results] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:107 Wednesday 12 November 2025 14:45:34 -0500 (0:00:00.085) 0:01:37.011 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.snapshot : Show errors] ************************ task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:112 Wednesday 12 November 2025 14:45:34 -0500 (0:00:00.063) 0:01:37.074 **** skipping: [managed-node2] => {} TASK [Cleanup] ***************************************************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:204 Wednesday 12 November 2025 14:45:34 -0500 (0:00:00.079) 0:01:37.154 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/cleanup.yml for managed-node2 TASK [Remove storage volumes] ************************************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/cleanup.yml:7 Wednesday 12 November 2025 14:45:35 -0500 (0:00:00.242) 0:01:37.397 **** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Wednesday 12 November 2025 14:45:35 -0500 (0:00:00.105) 0:01:37.502 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Wednesday 12 November 2025 14:45:35 -0500 (0:00:00.100) 0:01:37.603 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Wednesday 12 November 2025 14:45:35 -0500 (0:00:00.089) 0:01:37.692 **** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node2] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Wednesday 12 November 2025 14:45:35 -0500 (0:00:00.164) 0:01:37.857 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Wednesday 12 November 2025 14:45:35 -0500 (0:00:00.077) 0:01:37.935 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Wednesday 12 November 2025 14:45:35 -0500 (0:00:00.088) 0:01:38.023 **** ok: [managed-node2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Wednesday 12 November 2025 14:45:35 -0500 (0:00:00.085) 0:01:38.109 **** ok: [managed-node2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Wednesday 12 November 2025 14:45:35 -0500 (0:00:00.062) 0:01:38.171 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Wednesday 12 November 2025 14:45:36 -0500 (0:00:00.137) 0:01:38.309 **** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "python-enum34-1.0.4-1.el7.noarch providing python-enum34 is already installed", "1:python2-blivet3-3.1.3-3.el7.noarch providing python-blivet3 is already installed", "libblockdev-crypto-2.18-5.el7.x86_64 providing libblockdev-crypto is already installed", "libblockdev-dm-2.18-5.el7.x86_64 providing libblockdev-dm is already installed", "libblockdev-lvm-2.18-5.el7.x86_64 providing libblockdev-lvm is already installed", "libblockdev-mdraid-2.18-5.el7.x86_64 providing libblockdev-mdraid is already installed", "libblockdev-swap-2.18-5.el7.x86_64 providing libblockdev-swap is already installed", "libblockdev-2.18-5.el7.x86_64 providing libblockdev is already installed" ] } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Wednesday 12 November 2025 14:45:36 -0500 (0:00:00.899) 0:01:39.209 **** ok: [managed-node2] => { "storage_pools | d([])": [ { "disks": [ "sda", "sdb", "sdc" ], "name": "test_vg1", "state": "absent", "volumes": [ { "name": "lv1", "state": "absent" }, { "name": "lv2", "state": "absent" } ] }, { "disks": [ "sdd", "sde", "sdf" ], "name": "test_vg2", "state": "absent", "volumes": [ { "name": "lv3", "state": "absent" }, { "name": "lv4", "state": "absent" } ] }, { "disks": [ "sdg", "sdh", "sdi", "sdj" ], "name": "test_vg3", "state": "absent", "volumes": [ { "name": "lv5", "state": "absent" }, { "name": "lv6", "state": "absent" }, { "name": "lv7", "state": "absent" }, { "name": "lv8", "state": "absent" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Wednesday 12 November 2025 14:45:37 -0500 (0:00:00.089) 0:01:39.299 **** ok: [managed-node2] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Wednesday 12 November 2025 14:45:37 -0500 (0:00:00.062) 0:01:39.361 **** ok: [managed-node2] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Wednesday 12 November 2025 14:45:42 -0500 (0:00:05.130) 0:01:44.492 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for managed-node2 TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Wednesday 12 November 2025 14:45:42 -0500 (0:00:00.111) 0:01:44.603 **** TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Wednesday 12 November 2025 14:45:42 -0500 (0:00:00.122) 0:01:44.726 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************ task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19 Wednesday 12 November 2025 14:45:42 -0500 (0:00:00.091) 0:01:44.817 **** TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Wednesday 12 November 2025 14:45:42 -0500 (0:00:00.056) 0:01:44.874 **** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "kpartx-0.4.9-136.el7_9.x86_64 providing kpartx is already installed" ] } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Wednesday 12 November 2025 14:45:43 -0500 (0:00:00.683) 0:01:45.557 **** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "arp-ethers.service": { "name": "arp-ethers.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "brandbot.service": { "name": "brandbot.service", "source": "systemd", "state": "inactive", "status": "static" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "console-shell.service": { "name": "console-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.import1.service": { "name": "dbus-org.freedesktop.import1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.machine1.service": { "name": "dbus-org.freedesktop.machine1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "running", "status": "static" }, "dmraid-activation.service": { "name": "dmraid-activation.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmetad.service": { "name": "lvm2-lvmetad.service", "source": "systemd", "state": "running", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "lvm2-pvscan@8:0.service": { "name": "lvm2-pvscan@8:0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:112.service": { "name": "lvm2-pvscan@8:112.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:128.service": { "name": "lvm2-pvscan@8:128.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:144.service": { "name": "lvm2-pvscan@8:144.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:16.service": { "name": "lvm2-pvscan@8:16.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:32.service": { "name": "lvm2-pvscan@8:32.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:48.service": { "name": "lvm2-pvscan@8:48.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:64.service": { "name": "lvm2-pvscan@8:64.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:80.service": { "name": "lvm2-pvscan@8:80.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "lvm2-pvscan@8:96.service": { "name": "lvm2-pvscan@8:96.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "netconsole": { "name": "netconsole", "source": "sysv", "state": "stopped", "status": "disabled" }, "network": { "name": "network", "source": "sysv", "state": "running", "status": "enabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-config.service": { "name": "nfs-config.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-idmap.service": { "name": "nfs-idmap.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-lock.service": { "name": "nfs-lock.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-rquotad.service": { "name": "nfs-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-secure.service": { "name": "nfs-secure.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs.service": { "name": "nfs.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfslock.service": { "name": "nfslock.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "postfix.service": { "name": "postfix.service", "source": "systemd", "state": "running", "status": "enabled" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rhel-autorelabel-mark.service": { "name": "rhel-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-autorelabel.service": { "name": "rhel-autorelabel.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-configure.service": { "name": "rhel-configure.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-dmesg.service": { "name": "rhel-dmesg.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-domainname.service": { "name": "rhel-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-import-state.service": { "name": "rhel-import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-loadmodules.service": { "name": "rhel-loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-readonly.service": { "name": "rhel-readonly.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-rquotad.service": { "name": "rpc-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpcgssd.service": { "name": "rpcgssd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rpcidmapd.service": { "name": "rpcidmapd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rsyncd.service": { "name": "rsyncd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyncd@.service": { "name": "rsyncd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-policy-migrate-local-changes@.service": { "name": "selinux-policy-migrate-local-changes@.service", "source": "systemd", "state": "unknown", "status": "static" }, "selinux-policy-migrate-local-changes@targeted.service": { "name": "selinux-policy-migrate-local-changes@targeted.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "unknown" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "static" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bootchart.service": { "name": "systemd-bootchart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-importd.service": { "name": "systemd-importd.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-machined.service": { "name": "systemd-machined.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-nspawn@.service": { "name": "systemd-nspawn@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-readahead-collect.service": { "name": "systemd-readahead-collect.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-readahead-done.service": { "name": "systemd-readahead-done.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "systemd-readahead-drop.service": { "name": "systemd-readahead-drop.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "systemd-readahead-replay.service": { "name": "systemd-readahead-replay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill@.service": { "name": "systemd-rfkill@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-shutdownd.service": { "name": "systemd-shutdownd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "wpa_supplicant.service": { "name": "wpa_supplicant.service", "source": "systemd", "state": "inactive", "status": "disabled" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Wednesday 12 November 2025 14:45:44 -0500 (0:00:00.994) 0:01:46.551 **** ok: [managed-node2] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Wednesday 12 November 2025 14:45:44 -0500 (0:00:00.087) 0:01:46.639 **** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Wednesday 12 November 2025 14:45:44 -0500 (0:00:00.052) 0:01:46.691 **** changed: [managed-node2] => { "actions": [ { "action": "destroy format", "device": "/dev/mapper/test_vg3-lv8", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg3-lv8", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg3-lv7", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg3-lv7", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg3-lv6", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg3-lv6", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg3-lv5", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg3-lv5", "fs_type": null }, { "action": "destroy device", "device": "/dev/test_vg3", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdg", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdi", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdj", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdh", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/mapper/test_vg2-lv4", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg2-lv4", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg2-lv3", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg2-lv3", "fs_type": null }, { "action": "destroy device", "device": "/dev/test_vg2", "fs_type": null }, { "action": "destroy format", "device": "/dev/sde", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdd", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdf", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/mapper/test_vg1-lv2", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg1-lv2", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg1-lv1", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg1-lv1", "fs_type": null }, { "action": "destroy device", "device": "/dev/test_vg1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdb", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdc", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "lvmpv" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdk", "/dev/sdl", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/sdj", "/dev/xvda1" ], "mounts": [], "packages": [ "e2fsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg1-lv1", "_mount_id": "/dev/mapper/test_vg1-lv1", "_raw_device": "/dev/mapper/test_vg1-lv1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1451229184, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg1-lv2", "_mount_id": "/dev/mapper/test_vg1-lv2", "_raw_device": "/dev/mapper/test_vg1-lv2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 4827643904, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg2", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg2-lv3", "_mount_id": "/dev/mapper/test_vg2-lv3", "_raw_device": "/dev/mapper/test_vg2-lv3", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv3", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 968884224, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg2-lv4", "_mount_id": "/dev/mapper/test_vg2-lv4", "_raw_device": "/dev/mapper/test_vg2-lv4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv4", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1933574144, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg3", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg3-lv5", "_mount_id": "/dev/mapper/test_vg3-lv5", "_raw_device": "/dev/mapper/test_vg3-lv5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv5", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 3862953984, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv6", "_mount_id": "/dev/mapper/test_vg3-lv6", "_raw_device": "/dev/mapper/test_vg3-lv6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv6", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 3217031168, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv7", "_mount_id": "/dev/mapper/test_vg3-lv7", "_raw_device": "/dev/mapper/test_vg3-lv7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv7", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1287651328, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv8", "_mount_id": "/dev/mapper/test_vg3-lv8", "_raw_device": "/dev/mapper/test_vg3-lv8", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv8", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1287651328, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Wednesday 12 November 2025 14:45:54 -0500 (0:00:09.936) 0:01:56.628 **** ok: [managed-node2] => { "changed": false, "cmd": [ "udevadm", "trigger", "--subsystem-match=block" ], "delta": "0:00:00.008820", "end": "2025-11-12 14:45:54.762520", "rc": 0, "start": "2025-11-12 14:45:54.753700" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Wednesday 12 November 2025 14:45:54 -0500 (0:00:00.491) 0:01:57.119 **** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1762976547.0214868, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "72884e3f126482c2d28276ff7c57744fa95eff91", "ctime": 1762976535.3184993, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 263693, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1762976535.3174992, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1229, "uid": 0, "version": "1805363463", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Wednesday 12 November 2025 14:45:55 -0500 (0:00:00.390) 0:01:57.510 **** ok: [managed-node2] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Wednesday 12 November 2025 14:45:55 -0500 (0:00:00.496) 0:01:58.006 **** TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Wednesday 12 November 2025 14:45:55 -0500 (0:00:00.134) 0:01:58.141 **** ok: [managed-node2] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/mapper/test_vg3-lv8", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg3-lv8", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg3-lv7", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg3-lv7", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg3-lv6", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg3-lv6", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg3-lv5", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg3-lv5", "fs_type": null }, { "action": "destroy device", "device": "/dev/test_vg3", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdg", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdi", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdj", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdh", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/mapper/test_vg2-lv4", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg2-lv4", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg2-lv3", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg2-lv3", "fs_type": null }, { "action": "destroy device", "device": "/dev/test_vg2", "fs_type": null }, { "action": "destroy format", "device": "/dev/sde", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdd", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdf", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/mapper/test_vg1-lv2", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg1-lv2", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/test_vg1-lv1", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/test_vg1-lv1", "fs_type": null }, { "action": "destroy device", "device": "/dev/test_vg1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdb", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdc", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "lvmpv" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdk", "/dev/sdl", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/sdj", "/dev/xvda1" ], "mounts": [], "packages": [ "e2fsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg1-lv1", "_mount_id": "/dev/mapper/test_vg1-lv1", "_raw_device": "/dev/mapper/test_vg1-lv1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1451229184, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg1-lv2", "_mount_id": "/dev/mapper/test_vg1-lv2", "_raw_device": "/dev/mapper/test_vg1-lv2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 4827643904, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg2", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg2-lv3", "_mount_id": "/dev/mapper/test_vg2-lv3", "_raw_device": "/dev/mapper/test_vg2-lv3", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv3", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 968884224, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg2-lv4", "_mount_id": "/dev/mapper/test_vg2-lv4", "_raw_device": "/dev/mapper/test_vg2-lv4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv4", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1933574144, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg3", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg3-lv5", "_mount_id": "/dev/mapper/test_vg3-lv5", "_raw_device": "/dev/mapper/test_vg3-lv5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv5", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 3862953984, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv6", "_mount_id": "/dev/mapper/test_vg3-lv6", "_raw_device": "/dev/mapper/test_vg3-lv6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv6", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 3217031168, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv7", "_mount_id": "/dev/mapper/test_vg3-lv7", "_raw_device": "/dev/mapper/test_vg3-lv7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv7", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1287651328, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv8", "_mount_id": "/dev/mapper/test_vg3-lv8", "_raw_device": "/dev/mapper/test_vg3-lv8", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv8", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1287651328, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Wednesday 12 November 2025 14:45:56 -0500 (0:00:00.203) 0:01:58.345 **** ok: [managed-node2] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg1", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg1-lv1", "_mount_id": "/dev/mapper/test_vg1-lv1", "_raw_device": "/dev/mapper/test_vg1-lv1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1451229184, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg1-lv2", "_mount_id": "/dev/mapper/test_vg1-lv2", "_raw_device": "/dev/mapper/test_vg1-lv2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 4827643904, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg2", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg2-lv3", "_mount_id": "/dev/mapper/test_vg2-lv3", "_raw_device": "/dev/mapper/test_vg2-lv3", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv3", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 968884224, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg2-lv4", "_mount_id": "/dev/mapper/test_vg2-lv4", "_raw_device": "/dev/mapper/test_vg2-lv4", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdd", "sde", "sdf" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv4", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1933574144, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] }, { "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "test_vg3", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/test_vg3-lv5", "_mount_id": "/dev/mapper/test_vg3-lv5", "_raw_device": "/dev/mapper/test_vg3-lv5", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv5", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 3862953984, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv6", "_mount_id": "/dev/mapper/test_vg3-lv6", "_raw_device": "/dev/mapper/test_vg3-lv6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv6", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 3217031168, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv7", "_mount_id": "/dev/mapper/test_vg3-lv7", "_raw_device": "/dev/mapper/test_vg3-lv7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv7", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1287651328, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/test_vg3-lv8", "_mount_id": "/dev/mapper/test_vg3-lv8", "_raw_device": "/dev/mapper/test_vg3-lv8", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdg", "sdh", "sdi", "sdj" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "lv8", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 1287651328, "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Wednesday 12 November 2025 14:45:56 -0500 (0:00:00.147) 0:01:58.492 **** ok: [managed-node2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Wednesday 12 November 2025 14:45:56 -0500 (0:00:00.081) 0:01:58.574 **** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Wednesday 12 November 2025 14:45:56 -0500 (0:00:00.070) 0:01:58.645 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Wednesday 12 November 2025 14:45:56 -0500 (0:00:00.060) 0:01:58.706 **** TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Wednesday 12 November 2025 14:45:56 -0500 (0:00:00.051) 0:01:58.757 **** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Wednesday 12 November 2025 14:45:56 -0500 (0:00:00.056) 0:01:58.814 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Wednesday 12 November 2025 14:45:56 -0500 (0:00:00.067) 0:01:58.881 **** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1762976469.2355814, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1718879272.062, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131079, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1718879026.308, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072852913879", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Wednesday 12 November 2025 14:45:56 -0500 (0:00:00.346) 0:01:59.227 **** TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Wednesday 12 November 2025 14:45:57 -0500 (0:00:00.087) 0:01:59.315 **** ok: [managed-node2] TASK [Save unused_disk_return before verify] *********************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/cleanup.yml:30 Wednesday 12 November 2025 14:45:57 -0500 (0:00:00.901) 0:02:00.216 **** ok: [managed-node2] => { "ansible_facts": { "unused_disks_before": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi", "sdj" ] }, "changed": false } TASK [Verify that pools/volumes used in test are removed] ********************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/cleanup.yml:34 Wednesday 12 November 2025 14:45:58 -0500 (0:00:00.049) 0:02:00.265 **** included: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml for managed-node2 TASK [Check if system is ostree] *********************************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:5 Wednesday 12 November 2025 14:45:58 -0500 (0:00:00.176) 0:02:00.442 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set flag to indicate system is ostree] *********************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:10 Wednesday 12 November 2025 14:45:58 -0500 (0:00:00.057) 0:02:00.499 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure test packages] **************************************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:14 Wednesday 12 November 2025 14:45:58 -0500 (0:00:00.056) 0:02:00.555 **** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "util-linux-2.23.2-65.el7_9.1.x86_64 providing util-linux is already installed" ] } TASK [Find unused disks in the system] ***************************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:23 Wednesday 12 November 2025 14:45:59 -0500 (0:00:00.812) 0:02:01.368 **** ok: [managed-node2] => { "changed": false, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi", "sdj" ], "info": [ "Line: NAME=\"/dev/sda\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdb\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdc\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdd\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sde\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdf\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdg\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdh\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdi\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdj\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdk\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdl\" TYPE=\"disk\" SIZE=\"3221225472\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/xvda\" TYPE=\"disk\" SIZE=\"268435456000\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"ext4\" LOG-SEC=\"512\"", "Line type [part] is not disk: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"ext4\" LOG-SEC=\"512\"", "filename [xvda1] is a partition", "Disk [/dev/xvda] attrs [{'fstype': '', 'type': 'disk', 'ssize': '512', 'size': '268435456000'}] has partitions" ] } TASK [Set unused_disks if necessary] ******************************************* task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:31 Wednesday 12 November 2025 14:45:59 -0500 (0:00:00.627) 0:02:01.996 **** ok: [managed-node2] => { "ansible_facts": { "unused_disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi", "sdj" ] }, "changed": false } TASK [Print unused disks] ****************************************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:36 Wednesday 12 November 2025 14:45:59 -0500 (0:00:00.116) 0:02:02.112 **** ok: [managed-node2] => { "unused_disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi", "sdj" ] } TASK [Print info from find_unused_disk] **************************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:44 Wednesday 12 November 2025 14:45:59 -0500 (0:00:00.061) 0:02:02.174 **** skipping: [managed-node2] => {} TASK [Show disk information] *************************************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:49 Wednesday 12 November 2025 14:45:59 -0500 (0:00:00.056) 0:02:02.230 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Exit playbook when there's not enough unused disks in the system] ******** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:58 Wednesday 12 November 2025 14:46:00 -0500 (0:00:00.058) 0:02:02.289 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Debug why list of unused disks has changed] ****************************** task path: /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tasks/cleanup.yml:40 Wednesday 12 November 2025 14:46:00 -0500 (0:00:00.058) 0:02:02.348 **** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } META: ran handlers META: ran handlers PLAY RECAP ********************************************************************* managed-node2 : ok=214 changed=12 unreachable=0 failed=0 skipped=177 rescued=0 ignored=14 SYSTEM ROLES ERRORS BEGIN v1 [] SYSTEM ROLES ERRORS END v1 TASKS RECAP ******************************************************************** Wednesday 12 November 2025 14:46:00 -0500 (0:00:00.041) 0:02:02.390 **** =============================================================================== fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 9.94s /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 7.43s /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.storage : Get required packages --------------- 5.13s /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 fedora.linux_system_roles.storage : Get required packages --------------- 4.32s /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 fedora.linux_system_roles.snapshot : Run snapshot module snapshot ------- 3.33s /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:69 fedora.linux_system_roles.snapshot : Run snapshot module remove --------- 2.86s /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:69 fedora.linux_system_roles.snapshot : Run snapshot module umount --------- 2.04s /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:69 fedora.linux_system_roles.snapshot : Run snapshot module check ---------- 1.90s /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:69 fedora.linux_system_roles.snapshot : Ensure required packages are installed --- 1.82s /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 Find unused disks in the system ----------------------------------------- 1.65s /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:23 fedora.linux_system_roles.snapshot : Run snapshot module mount ---------- 1.58s /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:69 Gathering Facts --------------------------------------------------------- 1.44s /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/tests_mount.yml:2 fedora.linux_system_roles.snapshot : Ensure required packages are installed --- 1.41s /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10 fedora.linux_system_roles.storage : Make sure blivet is available ------- 1.41s /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 fedora.linux_system_roles.storage : Get service facts ------------------- 1.19s /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 fedora.linux_system_roles.snapshot : Run snapshot module remove --------- 1.13s /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:69 fedora.linux_system_roles.storage : Get service facts ------------------- 1.09s /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 fedora.linux_system_roles.snapshot : Get snapm version ------------------ 1.08s /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:16 Ensure test packages ---------------------------------------------------- 1.07s /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/tests/snapshot/get_unused_disk.yml:14 fedora.linux_system_roles.snapshot : Ensure required packages are installed --- 1.03s /tmp/collections-f1h/ansible_collections/fedora/linux_system_roles/roles/snapshot/tasks/main.yml:10