[WARNING]: Collection infra.leapp does not support Ansible version 2.14.18 [WARNING]: running playbook inside collection infra.leapp ansible-playbook [core 2.14.18] config file = /etc/ansible/ansible.cfg configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/lib/python3.9/site-packages/ansible ansible collection location = /root/.ansible/collections:/usr/share/ansible/collections executable location = /usr/bin/ansible-playbook python version = 3.9.25 (main, Nov 10 2025, 00:00:00) [GCC 11.5.0 20240719 (Red Hat 11.5.0-11)] (/usr/bin/python3) jinja version = 3.1.2 libyaml = True Using /etc/ansible/ansible.cfg as config file Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_default.yml **************************************************** 1 plays in /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tests/tests_default.yml PLAY [Test] ******************************************************************** TASK [Gathering Facts] ********************************************************* task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tests/tests_default.yml:2 ok: [managed-node01] TASK [Test | Run role upgrade] ************************************************* task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tests/tests_default.yml:10 TASK [infra.leapp.common : Log directory exists] ******************************* task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/main.yml:3 ok: [managed-node01] => {"changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/var/log/ripu", "secontext": "unconfined_u:object_r:var_log_t:s0", "size": 6, "state": "directory", "uid": 0} TASK [infra.leapp.common : Check for existing log file] ************************ task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/main.yml:11 ok: [managed-node01] => {"changed": false, "stat": {"exists": false}} TASK [infra.leapp.common : Fail if log file already exists] ******************** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/main.yml:16 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.common : Create new log file] ******************************** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/main.yml:23 NOTIFIED HANDLER infra.leapp.common : Check for log file for managed-node01 NOTIFIED HANDLER infra.leapp.common : Add end time to log file for managed-node01 NOTIFIED HANDLER infra.leapp.common : Slurp ripu.log file for managed-node01 NOTIFIED HANDLER infra.leapp.common : Decode ripu.log file for managed-node01 NOTIFIED HANDLER infra.leapp.common : Rename log file for managed-node01 changed: [managed-node01] => {"changed": true, "checksum": "55c341dd95b03c623ee3f502302a711c5dd0d48d", "dest": "/var/log/ripu/ripu.log", "gid": 0, "group": "root", "md5sum": "27e5ffccc479f4f8e84870af87185bf1", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:var_log_t:s0", "size": 61, "src": "/root/.ansible/tmp/ansible-tmp-1764684579.770085-6673-131986221758858/source", "state": "file", "uid": 0} TASK [infra.leapp.common : /etc/ansible/facts.d directory exists] ************** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/main.yml:35 ok: [managed-node01] => {"changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/ansible/facts.d", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 57, "state": "directory", "uid": 0} TASK [infra.leapp.common : Capture current ansible_facts for validation after upgrade] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/main.yml:43 changed: [managed-node01] => {"changed": true, "checksum": "f707305b7335be0eadedd49fbf8476398e1e8eb8", "dest": "/etc/ansible/facts.d/pre_ripu.fact", "gid": 0, "group": "root", "md5sum": "7dfbf4b837886daf5e37ec03461f06a8", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 11997, "src": "/root/.ansible/tmp/ansible-tmp-1764684580.6538463-6701-122775170740860/source", "state": "file", "uid": 0} TASK [infra.leapp.common : Capture a list of non-rhel versioned packages] ****** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/main.yml:51 ok: [managed-node01] => {"changed": false, "cmd": "set -o pipefail; export PATH=$PATH; rpm -qa | grep -ve '[\\.|+]el7' | grep -vE '^(gpg-pubkey|libmodulemd|katello-ca-consumer)' | sort", "delta": "0:00:00.376143", "end": "2025-12-02 09:09:41.822807", "failed_when_result": false, "msg": "", "rc": 0, "start": "2025-12-02 09:09:41.446664", "stderr": "", "stderr_lines": [], "stdout": "epel-release-7-14.noarch\ntps-devel-2.44.50-1.noarch", "stdout_lines": ["epel-release-7-14.noarch", "tps-devel-2.44.50-1.noarch"]} TASK [infra.leapp.common : Create fact with the non-rhel versioned packages list] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/main.yml:65 ok: [managed-node01] => {"ansible_facts": {"non_rhel_packages": ["epel-release-7-14.noarch", "tps-devel-2.44.50-1.noarch"]}, "changed": false} TASK [infra.leapp.common : Capture the list of non-rhel versioned packages in a separate fact file] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/main.yml:69 ok: [managed-node01] => {"changed": false, "checksum": "6d36b22d9c2b2f366fc090edfbac427c77d524a5", "dest": "/etc/ansible/facts.d/non_rhel_packages.fact", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/ansible/facts.d/non_rhel_packages.fact", "secontext": "system_u:object_r:etc_t:s0", "size": 58, "state": "file", "uid": 0} TASK [infra.leapp.upgrade : Include tasks for upgrade using redhat-upgrade-tool] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/main.yml:9 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.upgrade : Include tasks for leapp upgrade] ******************* task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/main.yml:13 [WARNING]: Collection community.general does not support Ansible version 2.14.18 included: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/leapp-upgrade.yml for managed-node01 TASK [leapp-upgrade | Run parse_leapp_report to check for inhibitors] ********** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/leapp-upgrade.yml:2 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.upgrade : leapp-upgrade | Verify no inhibitor results found during preupgrade] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/leapp-upgrade.yml:8 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.upgrade : leapp-upgrade | Register to leapp activation key] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/leapp-upgrade.yml:14 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [leapp-upgrade | Include custom_local_repos for local_repos_pre_leapp] **** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/leapp-upgrade.yml:25 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.upgrade : leapp-upgrade | Install packages for upgrade from RHEL 7] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/leapp-upgrade.yml:33 fatal: [managed-node01]: FAILED! => {"changed": false, "msg": "\n\n One of the configured repositories failed (Unknown),\n and yum doesn't have enough cached data to continue. At this point the only\n safe thing yum can do is fail. There are a few ways to work \"fix\" this:\n\n 1. Contact the upstream for the repository and get them to fix the problem.\n\n 2. Reconfigure the baseurl/etc. for the repository, to point to a working\n upstream. This is most often useful if you are using a newer\n distribution release than is supported by the repository (and the\n packages for the previous distribution release still work).\n\n 3. Run the command with the repository temporarily disabled\n yum --disablerepo= ...\n\n 4. Disable the repository permanently, so yum won't use it by default. Yum\n will then just ignore the repository until you permanently enable it\n again or use --enablerepo for temporary usage:\n\n yum-config-manager --disable \n or\n subscription-manager repos --disable=\n\n 5. Configure the failing repository to be skipped, if it is unavailable.\n Note that yum will try to contact the repo. when it runs most commands,\n so will have to try and fail each time (and thus. yum will be be much\n slower). If it is a very temporary problem though, this is often a nice\n compromise:\n\n yum-config-manager --save --setopt=.skip_if_unavailable=true\n\nCannot find a valid baseurl for repo: rhel\n", "rc": 1, "results": []} TASK [Test | Check error] ****************************************************** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tests/tests_default.yml:14 ok: [managed-node01] => { "msg": "errors {\n \"_ansible_no_log\": false,\n \"_ansible_parsed\": true,\n \"changed\": false,\n \"failed\": true,\n \"invocation\": {\n \"module_args\": {\n \"allow_downgrade\": false,\n \"autoremove\": false,\n \"bugfix\": false,\n \"cacheonly\": false,\n \"conf_file\": null,\n \"disable_excludes\": null,\n \"disable_gpg_check\": false,\n \"disable_plugin\": [],\n \"disablerepo\": [],\n \"download_dir\": null,\n \"download_only\": false,\n \"enable_plugin\": [],\n \"enablerepo\": [\n \"rhel-7-server-extras-rpms\"\n ],\n \"exclude\": [],\n \"install_repoquery\": true,\n \"install_weak_deps\": true,\n \"installroot\": \"/\",\n \"list\": null,\n \"lock_timeout\": 30,\n \"name\": [\n \"leapp-upgrade\"\n ],\n \"releasever\": null,\n \"security\": false,\n \"skip_broken\": false,\n \"sslverify\": true,\n \"state\": \"latest\",\n \"update_cache\": false,\n \"update_only\": false,\n \"use_backend\": \"auto\",\n \"validate_certs\": true\n }\n },\n \"msg\": \"\\n\\n One of the configured repositories failed (Unknown),\\n and yum doesn't have enough cached data to continue. At this point the only\\n safe thing yum can do is fail. There are a few ways to work \\\"fix\\\" this:\\n\\n 1. Contact the upstream for the repository and get them to fix the problem.\\n\\n 2. Reconfigure the baseurl/etc. for the repository, to point to a working\\n upstream. This is most often useful if you are using a newer\\n distribution release than is supported by the repository (and the\\n packages for the previous distribution release still work).\\n\\n 3. Run the command with the repository temporarily disabled\\n yum --disablerepo= ...\\n\\n 4. Disable the repository permanently, so yum won't use it by default. Yum\\n will then just ignore the repository until you permanently enable it\\n again or use --enablerepo for temporary usage:\\n\\n yum-config-manager --disable \\n or\\n subscription-manager repos --disable=\\n\\n 5. Configure the failing repository to be skipped, if it is unavailable.\\n Note that yum will try to contact the repo. when it runs most commands,\\n so will have to try and fail each time (and thus. yum will be be much\\n slower). If it is a very temporary problem though, this is often a nice\\n compromise:\\n\\n yum-config-manager --save --setopt=.skip_if_unavailable=true\\n\\nCannot find a valid baseurl for repo: rhel\\n\",\n \"rc\": 1,\n \"results\": []\n}" } TASK [Test | Ensure correct error] ********************************************* task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tests/tests_default.yml:18 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [Cleanup | Remove log files] ********************************************** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tests/tests_default.yml:27 changed: [managed-node01] => {"changed": true, "cmd": "set -euxo pipefail\nrm -f /var/log/leapp/leapp-upgrade.log\nrm -f /var/log/ripu/ripu.log*\n", "delta": "0:00:00.004462", "end": "2025-12-02 09:09:43.599644", "msg": "", "rc": 0, "start": "2025-12-02 09:09:43.595182", "stderr": "+ rm -f /var/log/leapp/leapp-upgrade.log\n+ rm -f /var/log/ripu/ripu.log", "stderr_lines": ["+ rm -f /var/log/leapp/leapp-upgrade.log", "+ rm -f /var/log/ripu/ripu.log"], "stdout": "", "stdout_lines": []} RUNNING HANDLER [infra.leapp.common : Check for log file] ********************** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/handlers/main.yml:3 ok: [managed-node01] => {"changed": false, "stat": {"exists": false}} RUNNING HANDLER [infra.leapp.common : Add end time to log file] **************** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/handlers/main.yml:9 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} RUNNING HANDLER [infra.leapp.common : Slurp ripu.log file] ********************* task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/handlers/main.yml:19 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} RUNNING HANDLER [infra.leapp.common : Decode ripu.log file] ******************** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/handlers/main.yml:26 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} RUNNING HANDLER [infra.leapp.common : Rename log file] ************************* task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/handlers/main.yml:32 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} PLAY RECAP ********************************************************************* managed-node01 : ok=13 changed=3 unreachable=0 failed=0 skipped=11 rescued=1 ignored=0