[
{
    "branch": "main",
    "index": "0",
    "phase": "pre",
    "playbook": "vexxhost.dev/zuul-config/playbooks/base/pre.yaml",
    "plays": [
        {
            "play": {
                "duration": {
                    "end": "2026-02-16T22:54:13.467225Z",
                    "start": "2026-02-16T22:54:11.032716Z"
                },
                "id": "0242ac17-0010-bb2d-7573-000000000002",
                "name": "localhost"
            },
            "tasks": [
                {
                    "hosts": {
                        "localhost": {
                            "action": "gather_facts",
                            "ansible_facts": {
                                "ansible_apparmor": {
                                    "status": "disabled"
                                },
                                "ansible_architecture": "x86_64",
                                "ansible_bios_date": "NA",
                                "ansible_bios_vendor": "NA",
                                "ansible_bios_version": "NA",
                                "ansible_board_asset_tag": "NA",
                                "ansible_board_name": "NA",
                                "ansible_board_serial": "NA",
                                "ansible_board_vendor": "NA",
                                "ansible_board_version": "NA",
                                "ansible_chassis_asset_tag": "NA",
                                "ansible_chassis_serial": "NA",
                                "ansible_chassis_vendor": "NA",
                                "ansible_chassis_version": "NA",
                                "ansible_cmdline": {
                                    "BOOT_IMAGE": "/boot/vmlinuz-5.15.0-130-generic",
                                    "console": "ttyS0",
                                    "ro": true,
                                    "root": "UUID=5a569d86-b935-46dd-ae79-7a72a25b6a4c"
                                },
                                "ansible_date_time": {
                                    "date": "2026-02-16",
                                    "day": "16",
                                    "epoch": "1771282451",
                                    "epoch_int": "1771282451",
                                    "hour": "22",
                                    "iso8601": "2026-02-16T22:54:11Z",
                                    "iso8601_basic": "20260216T225411649573",
                                    "iso8601_basic_short": "20260216T225411",
                                    "iso8601_micro": "2026-02-16T22:54:11.649573Z",
                                    "minute": "54",
                                    "month": "02",
                                    "second": "11",
                                    "time": "22:54:11",
                                    "tz": "UTC",
                                    "tz_dst": "UTC",
                                    "tz_offset": "+0000",
                                    "weekday": "Monday",
                                    "weekday_number": "1",
                                    "weeknumber": "07",
                                    "year": "2026"
                                },
                                "ansible_devices": {},
                                "ansible_distribution": "Debian",
                                "ansible_distribution_major_version": "12",
                                "ansible_distribution_release": "bookworm",
                                "ansible_distribution_version": "12",
                                "ansible_dns": {
                                    "nameservers": [
                                        "127.0.0.11"
                                    ],
                                    "options": {
                                        "edns0": true,
                                        "ndots": "0",
                                        "trust-ad": true
                                    },
                                    "search": [
                                        "openstacklocal"
                                    ]
                                },
                                "ansible_domain": "",
                                "ansible_effective_group_id": 0,
                                "ansible_effective_user_id": 0,
                                "ansible_env": {
                                    "ANSIBLE_CONFIG": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/ansible/pre_playbook_0/ansible.cfg",
                                    "DEBIAN_FRONTEND": "noninteractive",
                                    "GPG_KEY": "A035C8C19219BA821ECEA86B64E628F8D684696D",
                                    "HOME": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work",
                                    "HOSTNAME": "3a2793d2bd32",
                                    "LANG": "C.UTF-8",
                                    "PATH": "/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
                                    "PWD": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_0/vexxhost.dev/zuul-config/playbooks/base",
                                    "PYTHONPATH": "/var/lib/zuul/ansible/9",
                                    "PYTHON_SHA256": "8fb5f9fbc7609fa822cb31549884575db7fd9657cbffb89510b5d7975963a83a",
                                    "PYTHON_VERSION": "3.11.13",
                                    "SSH_AGENT_PID": "51687",
                                    "SSH_AUTH_SOCK": "/tmp/ssh-IBu6KrtI1nhT/agent.51686",
                                    "TMP": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/tmp",
                                    "ZUUL_ANSIBLE_SPLIT_STREAMS": "False",
                                    "ZUUL_JOBDIR": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954",
                                    "ZUUL_JOB_FAILURE_OUTPUT": "[]",
                                    "ZUUL_JOB_LOG_CONFIG": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/ansible/logging.json",
                                    "ZUUL_OUTPUT_MAX_BYTES": "1073741824"
                                },
                                "ansible_fibre_channel_wwn": [],
                                "ansible_fips": false,
                                "ansible_form_factor": "NA",
                                "ansible_fqdn": "3a2793d2bd32",
                                "ansible_hostname": "3a2793d2bd32",
                                "ansible_hostnqn": "",
                                "ansible_is_chroot": false,
                                "ansible_iscsi_iqn": "",
                                "ansible_kernel": "5.15.0-130-generic",
                                "ansible_kernel_version": "#140-Ubuntu SMP Wed Dec 18 17:59:53 UTC 2024",
                                "ansible_loadavg": {
                                    "15m": 0.7451171875,
                                    "1m": 1.47021484375,
                                    "5m": 1.0048828125
                                },
                                "ansible_local": {},
                                "ansible_lsb": {},
                                "ansible_lvm": "N/A",
                                "ansible_machine": "x86_64",
                                "ansible_memfree_mb": 3270,
                                "ansible_memory_mb": {
                                    "nocache": {
                                        "free": 17967,
                                        "used": 14123
                                    },
                                    "real": {
                                        "free": 3270,
                                        "total": 32090,
                                        "used": 28820
                                    },
                                    "swap": {
                                        "cached": 0,
                                        "free": 0,
                                        "total": 0,
                                        "used": 0
                                    }
                                },
                                "ansible_memtotal_mb": 32090,
                                "ansible_mounts": [
                                    {
                                        "block_available": 10060685,
                                        "block_size": 4096,
                                        "block_total": 40601580,
                                        "block_used": 30540895,
                                        "device": "/dev/vda1",
                                        "fstype": "ext4",
                                        "inode_available": 16867596,
                                        "inode_total": 20643840,
                                        "inode_used": 3776244,
                                        "mount": "/etc/resolv.conf",
                                        "options": "ro,nosuid,nodev,relatime,discard,errors=remount-ro,bind",
                                        "size_available": 41208565760,
                                        "size_total": 166304071680,
                                        "uuid": "N/A"
                                    },
                                    {
                                        "block_available": 10060685,
                                        "block_size": 4096,
                                        "block_total": 40601580,
                                        "block_used": 30540895,
                                        "device": "/dev/vda1",
                                        "fstype": "ext4",
                                        "inode_available": 16867596,
                                        "inode_total": 20643840,
                                        "inode_used": 3776244,
                                        "mount": "/etc/hosts",
                                        "options": "ro,nosuid,nodev,relatime,discard,errors=remount-ro,bind",
                                        "size_available": 41208565760,
                                        "size_total": 166304071680,
                                        "uuid": "N/A"
                                    },
                                    {
                                        "block_available": 10060685,
                                        "block_size": 4096,
                                        "block_total": 40601580,
                                        "block_used": 30540895,
                                        "device": "/dev/vda1",
                                        "fstype": "ext4",
                                        "inode_available": 16867596,
                                        "inode_total": 20643840,
                                        "inode_used": 3776244,
                                        "mount": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work",
                                        "options": "rw,nosuid,nodev,relatime,discard,errors=remount-ro,bind",
                                        "size_available": 41208565760,
                                        "size_total": 166304071680,
                                        "uuid": "N/A"
                                    },
                                    {
                                        "block_available": 10060685,
                                        "block_size": 4096,
                                        "block_total": 40601580,
                                        "block_used": 30540895,
                                        "device": "/dev/vda1",
                                        "fstype": "ext4",
                                        "inode_available": 16867596,
                                        "inode_total": 20643840,
                                        "inode_used": 3776244,
                                        "mount": "/var/lib/zuul/ansible/9",
                                        "options": "ro,nosuid,nodev,relatime,discard,errors=remount-ro,bind",
                                        "size_available": 41208565760,
                                        "size_total": 166304071680,
                                        "uuid": "N/A"
                                    },
                                    {
                                        "block_available": 10060685,
                                        "block_size": 4096,
                                        "block_total": 40601580,
                                        "block_used": 30540895,
                                        "device": "/dev/vda1",
                                        "fstype": "ext4",
                                        "inode_available": 16867596,
                                        "inode_total": 20643840,
                                        "inode_used": 3776244,
                                        "mount": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/ansible",
                                        "options": "ro,nosuid,nodev,relatime,discard,errors=remount-ro,bind",
                                        "size_available": 41208565760,
                                        "size_total": 166304071680,
                                        "uuid": "N/A"
                                    },
                                    {
                                        "block_available": 10060685,
                                        "block_size": 4096,
                                        "block_total": 40601580,
                                        "block_used": 30540895,
                                        "device": "/dev/vda1",
                                        "fstype": "ext4",
                                        "inode_available": 16867596,
                                        "inode_total": 20643840,
                                        "inode_used": 3776244,
                                        "mount": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted",
                                        "options": "ro,nosuid,nodev,relatime,discard,errors=remount-ro,bind",
                                        "size_available": 41208565760,
                                        "size_total": 166304071680,
                                        "uuid": "N/A"
                                    },
                                    {
                                        "block_available": 10060685,
                                        "block_size": 4096,
                                        "block_total": 40601580,
                                        "block_used": 30540895,
                                        "device": "/dev/vda1",
                                        "fstype": "ext4",
                                        "inode_available": 16867596,
                                        "inode_total": 20643840,
                                        "inode_used": 3776244,
                                        "mount": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted",
                                        "options": "ro,nosuid,nodev,relatime,discard,errors=remount-ro,bind",
                                        "size_available": 41208565760,
                                        "size_total": 166304071680,
                                        "uuid": "N/A"
                                    },
                                    {
                                        "block_available": 10060685,
                                        "block_size": 4096,
                                        "block_total": 40601580,
                                        "block_used": 30540895,
                                        "device": "/dev/vda1",
                                        "fstype": "ext4",
                                        "inode_available": 16867596,
                                        "inode_total": 20643840,
                                        "inode_used": 3776244,
                                        "mount": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/ansible/pre_playbook_0",
                                        "options": "ro,nosuid,nodev,relatime,discard,errors=remount-ro,bind",
                                        "size_available": 41208565760,
                                        "size_total": 166304071680,
                                        "uuid": "N/A"
                                    },
                                    {
                                        "block_available": 10060685,
                                        "block_size": 4096,
                                        "block_total": 40601580,
                                        "block_used": 30540895,
                                        "device": "/dev/vda1",
                                        "fstype": "ext4",
                                        "inode_available": 16867596,
                                        "inode_total": 20643840,
                                        "inode_used": 3776244,
                                        "mount": "/etc/zuul/site-variables.yaml",
                                        "options": "ro,nosuid,nodev,relatime,discard,errors=remount-ro,bind",
                                        "size_available": 41208565760,
                                        "size_total": 166304071680,
                                        "uuid": "N/A"
                                    },
                                    {
                                        "block_available": 53529411,
                                        "block_size": 4096,
                                        "block_total": 263940717,
                                        "block_used": 210411306,
                                        "device": "/dev/vdb",
                                        "fstype": "ext4",
                                        "inode_available": 18244922,
                                        "inode_total": 67108864,
                                        "inode_used": 48863942,
                                        "mount": "/srv/static/logs",
                                        "options": "rw,nosuid,nodev,relatime,discard",
                                        "size_available": 219256467456,
                                        "size_total": 1081101176832,
                                        "uuid": "N/A"
                                    },
                                    {
                                        "block_available": 10060685,
                                        "block_size": 4096,
                                        "block_total": 40601580,
                                        "block_used": 30540895,
                                        "device": "/dev/vda1",
                                        "fstype": "ext4",
                                        "inode_available": 16867596,
                                        "inode_total": 20643840,
                                        "inode_used": 3776244,
                                        "mount": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/.ansible",
                                        "options": "rw,nosuid,nodev,relatime,discard,errors=remount-ro,bind",
                                        "size_available": 41208565760,
                                        "size_total": 166304071680,
                                        "uuid": "N/A"
                                    }
                                ],
                                "ansible_nodename": "3a2793d2bd32",
                                "ansible_os_family": "Debian",
                                "ansible_pkg_mgr": "apt",
                                "ansible_proc_cmdline": {
                                    "BOOT_IMAGE": "/boot/vmlinuz-5.15.0-130-generic",
                                    "console": [
                                        "tty1",
                                        "ttyS0"
                                    ],
                                    "ro": true,
                                    "root": "UUID=5a569d86-b935-46dd-ae79-7a72a25b6a4c"
                                },
                                "ansible_processor": [
                                    "0",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "1",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "2",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "3",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "4",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "5",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "6",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "7",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "8",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "9",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "10",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "11",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "12",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "13",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "14",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "15",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor"
                                ],
                                "ansible_processor_cores": 1,
                                "ansible_processor_count": 16,
                                "ansible_processor_nproc": 16,
                                "ansible_processor_threads_per_core": 1,
                                "ansible_processor_vcpus": 16,
                                "ansible_product_name": "NA",
                                "ansible_product_serial": "NA",
                                "ansible_product_uuid": "NA",
                                "ansible_product_version": "NA",
                                "ansible_python": {
                                    "executable": "/usr/local/lib/zuul/ansible/9/bin/python",
                                    "has_sslcontext": true,
                                    "type": "cpython",
                                    "version": {
                                        "major": 3,
                                        "micro": 13,
                                        "minor": 11,
                                        "releaselevel": "final",
                                        "serial": 0
                                    },
                                    "version_info": [
                                        3,
                                        11,
                                        13,
                                        "final",
                                        0
                                    ]
                                },
                                "ansible_python_version": "3.11.13",
                                "ansible_real_group_id": 0,
                                "ansible_real_user_id": 0,
                                "ansible_selinux": {
                                    "status": "disabled"
                                },
                                "ansible_selinux_python_present": true,
                                "ansible_service_mgr": "bwrap",
                                "ansible_swapfree_mb": 0,
                                "ansible_swaptotal_mb": 0,
                                "ansible_system": "Linux",
                                "ansible_system_capabilities": "N/A",
                                "ansible_system_capabilities_enforced": "N/A",
                                "ansible_system_vendor": "NA",
                                "ansible_uptime_seconds": 34946181,
                                "ansible_user_dir": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work",
                                "ansible_user_gecos": "root",
                                "ansible_user_gid": 0,
                                "ansible_user_id": "root",
                                "ansible_user_shell": "/bin/bash",
                                "ansible_user_uid": 0,
                                "ansible_userspace_architecture": "x86_64",
                                "ansible_userspace_bits": "64",
                                "ansible_virtualization_role": "host",
                                "ansible_virtualization_tech_guest": [],
                                "ansible_virtualization_tech_host": [
                                    "kvm"
                                ],
                                "ansible_virtualization_type": "kvm",
                                "gather_subset": [
                                    "all"
                                ],
                                "module_setup": true
                            },
                            "changed": false,
                            "deprecations": [],
                            "warnings": []
                        }
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:11.964936Z",
                            "start": "2026-02-16T22:54:11.041139Z"
                        },
                        "id": "0242ac17-0010-bb2d-7573-00000000002a",
                        "name": "Gathering Facts"
                    }
                },
                {
                    "hosts": {
                        "localhost": {
                            "action": "include_role",
                            "changed": false,
                            "include_args": {
                                "name": "set-zuul-log-path-fact"
                            }
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-bb2d-7573-000000000006",
                        "name": "emit-job-header",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/emit-job-header"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:12.000015Z",
                            "start": "2026-02-16T22:54:11.969192Z"
                        },
                        "id": "0242ac17-0010-bb2d-7573-000000000008",
                        "name": "Setup log path fact"
                    }
                },
                {
                    "hosts": {
                        "localhost": {
                            "action": "set_fact",
                            "ansible_facts": {
                                "zuul_log_path": "753/oss/75357a43b83d49128104b6fefcadd954"
                            },
                            "changed": false
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-bb2d-7573-000000000054",
                        "name": "set-zuul-log-path-fact",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/set-zuul-log-path-fact"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:12.056557Z",
                            "start": "2026-02-16T22:54:12.023941Z"
                        },
                        "id": "0242ac17-0010-bb2d-7573-000000000056",
                        "name": "Set log path for a build"
                    }
                },
                {
                    "hosts": {
                        "localhost": {
                            "action": "debug",
                            "changed": false,
                            "msg": "# Job Information\nAnsible Version: 2.16.15\nJob: atmosphere-molecule-csi-rbd\nPipeline: check\nExecutor: 3a2793d2bd32\nTriggered by: https://github.com/vexxhost/atmosphere/pull/3590\nEvent ID: 4ee4dcd0-0b8a-11f1-9c24-f03c3431c989\n"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-bb2d-7573-000000000006",
                        "name": "emit-job-header",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/emit-job-header"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:12.106614Z",
                            "start": "2026-02-16T22:54:12.062601Z"
                        },
                        "id": "0242ac17-0010-bb2d-7573-000000000009",
                        "name": "Print job information"
                    }
                },
                {
                    "hosts": {
                        "localhost": {
                            "action": "debug",
                            "changed": false,
                            "msg": "All items completed",
                            "results": [
                                {
                                    "ansible_loop_var": "zj_item",
                                    "changed": false,
                                    "failed": false,
                                    "msg": "# Node Information\nInventory Hostname: instance\nHostname: np0000155839\nUsername: zuul\nDistro: Ubuntu 22.04\nProvider: yul1\nRegion: ca-ymq-1\nLabel: ubuntu-jammy\nProduct Name: OpenStack Nova\nInterface IP: 199.204.45.216\n",
                                    "zj_item": "instance"
                                }
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-bb2d-7573-000000000006",
                        "name": "emit-job-header",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/emit-job-header"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:12.217587Z",
                            "start": "2026-02-16T22:54:12.110321Z"
                        },
                        "id": "0242ac17-0010-bb2d-7573-00000000000a",
                        "name": "Print node information"
                    }
                },
                {
                    "hosts": {
                        "localhost": {
                            "action": "file",
                            "changed": true,
                            "diff": {
                                "after": {
                                    "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/zuul-info",
                                    "state": "directory"
                                },
                                "before": {
                                    "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/zuul-info",
                                    "state": "absent"
                                }
                            },
                            "gid": 0,
                            "group": "root",
                            "invocation": {
                                "module_args": {
                                    "_diff_peek": null,
                                    "_original_basename": null,
                                    "access_time": null,
                                    "access_time_format": "%Y%m%d%H%M.%S",
                                    "attributes": null,
                                    "follow": true,
                                    "force": false,
                                    "group": null,
                                    "mode": 493,
                                    "modification_time": null,
                                    "modification_time_format": "%Y%m%d%H%M.%S",
                                    "owner": null,
                                    "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/zuul-info",
                                    "recurse": false,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": null,
                                    "state": "directory",
                                    "unsafe_writes": false
                                }
                            },
                            "mode": "0755",
                            "owner": "root",
                            "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/zuul-info",
                            "size": 4096,
                            "state": "directory",
                            "uid": 0
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-bb2d-7573-00000000000d",
                        "name": "log-inventory",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/log-inventory"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:12.619593Z",
                            "start": "2026-02-16T22:54:12.228024Z"
                        },
                        "id": "0242ac17-0010-bb2d-7573-00000000000f",
                        "name": "Ensure Zuul Ansible directory exists"
                    }
                },
                {
                    "hosts": {
                        "localhost": {
                            "action": "copy",
                            "changed": true,
                            "checksum": "d64abd4e4656d6252b498914fd4ed3e783f5b32b",
                            "dest": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/zuul-info/inventory.yaml",
                            "diff": [],
                            "gid": 0,
                            "group": "root",
                            "invocation": {
                                "module_args": {
                                    "_original_basename": "inventory.yaml",
                                    "attributes": null,
                                    "backup": false,
                                    "checksum": "d64abd4e4656d6252b498914fd4ed3e783f5b32b",
                                    "content": null,
                                    "dest": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/zuul-info",
                                    "directory_mode": null,
                                    "follow": false,
                                    "force": true,
                                    "group": null,
                                    "local_follow": null,
                                    "mode": 420,
                                    "owner": null,
                                    "remote_src": null,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/.ansible/tmp/ansible-tmp-1771282452.6599739-84-191467754468128/source",
                                    "unsafe_writes": false,
                                    "validate": null
                                }
                            },
                            "md5sum": "e618f71cf61aa7cf9f24a24dc565dad7",
                            "mode": "0644",
                            "owner": "root",
                            "size": 27864,
                            "src": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/.ansible/tmp/ansible-tmp-1771282452.6599739-84-191467754468128/source",
                            "state": "file",
                            "uid": 0
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-bb2d-7573-00000000000d",
                        "name": "log-inventory",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/log-inventory"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:13.467225Z",
                            "start": "2026-02-16T22:54:12.628636Z"
                        },
                        "id": "0242ac17-0010-bb2d-7573-000000000010",
                        "name": "Copy ansible inventory to logs dir"
                    }
                }
            ]
        },
        {
            "play": {
                "duration": {
                    "end": "2026-02-16T22:54:20.526136Z",
                    "start": "2026-02-16T22:54:13.477044Z"
                },
                "id": "0242ac17-0010-bb2d-7573-000000000011",
                "name": "all"
            },
            "tasks": [
                {
                    "hosts": {
                        "instance": {
                            "action": "stat",
                            "changed": false,
                            "failed_when_result": false,
                            "invocation": {
                                "module_args": {
                                    "checksum_algorithm": "sha1",
                                    "follow": false,
                                    "get_attributes": true,
                                    "get_checksum": true,
                                    "get_mime": true,
                                    "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/75357a43b83d49128104b6fefcadd954_id_rsa"
                                }
                            },
                            "stat": {
                                "exists": false
                            }
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-bb2d-7573-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:13.746265Z",
                            "start": "2026-02-16T22:54:13.488809Z"
                        },
                        "id": "0242ac17-0010-bb2d-7573-000000000018",
                        "name": "Check to see if ssh key was already created for this build"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "include_tasks",
                            "changed": false,
                            "include": "create-key-and-replace.yaml",
                            "include_args": {}
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-bb2d-7573-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:13.798210Z",
                            "start": "2026-02-16T22:54:13.760215Z"
                        },
                        "id": "0242ac17-0010-bb2d-7573-000000000019",
                        "name": "Create a new key in workspace based on build UUID"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "command",
                            "changed": true,
                            "cmd": [
                                "ssh-keygen",
                                "-t",
                                "rsa",
                                "-N",
                                "",
                                "-C",
                                "zuul-build-sshkey",
                                "-f",
                                "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/75357a43b83d49128104b6fefcadd954_id_rsa",
                                "-b",
                                "3072"
                            ],
                            "delta": "0:00:00.289395",
                            "end": "2026-02-16 22:54:14.520733",
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "ssh-keygen -t rsa -N '' -C 'zuul-build-sshkey' -f /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/75357a43b83d49128104b6fefcadd954_id_rsa -b 3072",
                                    "_uses_shell": false,
                                    "argv": null,
                                    "chdir": null,
                                    "creates": null,
                                    "executable": null,
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0010-bb2d-7573-00000000009c-0-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "",
                            "rc": 0,
                            "start": "2026-02-16 22:54:14.231338",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "Generating public/private rsa key pair.\nYour identification has been saved in /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/75357a43b83d49128104b6fefcadd954_id_rsa\nYour public key has been saved in /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/75357a43b83d49128104b6fefcadd954_id_rsa.pub\nThe key fingerprint is:\nSHA256:9PCWcF49SIGZ2EDGSxGWUih7zbbY1UxcYTTZa/3r3XI zuul-build-sshkey\nThe key's randomart image is:\n+---[RSA 3072]----+\n|       =OB =o+*+ |\n|    . oo= =.o+...|\n|     o =+..+o o o|\n|    . ..=B.oo  +.|\n|     . +So*   . .|\n|      . o.      .|\n|                .|\n|              ..E|\n|              .+o|\n+----[SHA256]-----+",
                            "stdout_lines": [
                                "Generating public/private rsa key pair.",
                                "Your identification has been saved in /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/75357a43b83d49128104b6fefcadd954_id_rsa",
                                "Your public key has been saved in /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/75357a43b83d49128104b6fefcadd954_id_rsa.pub",
                                "The key fingerprint is:",
                                "SHA256:9PCWcF49SIGZ2EDGSxGWUih7zbbY1UxcYTTZa/3r3XI zuul-build-sshkey",
                                "The key's randomart image is:",
                                "+---[RSA 3072]----+",
                                "|       =OB =o+*+ |",
                                "|    . oo= =.o+...|",
                                "|     o =+..+o o o|",
                                "|    . ..=B.oo  +.|",
                                "|     . +So*   . .|",
                                "|      . o.      .|",
                                "|                .|",
                                "|              ..E|",
                                "|              .+o|",
                                "+----[SHA256]-----+"
                            ],
                            "zuul_log_id": "0242ac17-0010-bb2d-7573-00000000009c-0-instance"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-bb2d-7573-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:14.580769Z",
                            "start": "2026-02-16T22:54:13.825999Z"
                        },
                        "id": "0242ac17-0010-bb2d-7573-00000000009c",
                        "name": "Create Temp SSH key"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "include_tasks",
                            "changed": false,
                            "include": "remote-linux.yaml",
                            "include_args": {}
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-bb2d-7573-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:14.625566Z",
                            "start": "2026-02-16T22:54:14.587839Z"
                        },
                        "id": "0242ac17-0010-bb2d-7573-00000000009d",
                        "name": "Remote setup ssh keys (linux)"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "lineinfile",
                            "changed": false,
                            "false_condition": "zuul_build_sshkey_cleanup",
                            "skip_reason": "Conditional result was False",
                            "skipped": true
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-bb2d-7573-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:14.673279Z",
                            "start": "2026-02-16T22:54:14.646419Z"
                        },
                        "id": "0242ac17-0010-bb2d-7573-0000000000bc",
                        "name": "Remove previously added zuul-build-sshkey"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "authorized_key",
                            "changed": true,
                            "comment": null,
                            "exclusive": false,
                            "follow": false,
                            "invocation": {
                                "module_args": {
                                    "changed": true,
                                    "comment": null,
                                    "exclusive": false,
                                    "follow": false,
                                    "key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCTykJAbMtx1dNz/ZyMSQFCrkGfX64qNr5A+sjyGnCBB7FKVfB7o64ewfKHHWrTu7j8fpcYb6DJJCLKfAlSJYUAzpPJp43tT3McrEL0n3D00ym+132moJaCzUBvYIXcT1aawKrL121JBYtfm5++ux4DaX5PHHJ8i2cFwMdOQNgBK4xGkWk/ZQiK70S/fLx97OUPQobV9VLKj6lXablW4KAoK415b6DCsNrzb42vcp8IyU51m4N9C3sF8jDBKX48GU+IV1qZ+woEB8M0JzdbXQJPqKwJ9iQbRO1ORMyqHRlRJeC+HU8yOmbapOB0Lq0r+sw7/X9Ln81zIMVpeIati1lWqlZy45fKkHzimoh0DKgowUTWimUkSBD5eF5CnsId3hBKoIcfXVepX3eSTaYXIZu1brxfLGLwWUQa5Hgnq7aO7kVvrFxPWuDRxgDpeDDdyWsPeQb2TijNwYbfGrpnDR1A1Uw8aMj9hBGgzQ/KPDwnMUJH4i+H8I+Ps5vj70wsAz8= zuul-build-sshkey",
                                    "key_options": null,
                                    "keyfile": "/home/zuul/.ssh/authorized_keys",
                                    "manage_dir": true,
                                    "path": null,
                                    "state": "present",
                                    "user": "zuul",
                                    "validate_certs": true
                                }
                            },
                            "key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCTykJAbMtx1dNz/ZyMSQFCrkGfX64qNr5A+sjyGnCBB7FKVfB7o64ewfKHHWrTu7j8fpcYb6DJJCLKfAlSJYUAzpPJp43tT3McrEL0n3D00ym+132moJaCzUBvYIXcT1aawKrL121JBYtfm5++ux4DaX5PHHJ8i2cFwMdOQNgBK4xGkWk/ZQiK70S/fLx97OUPQobV9VLKj6lXablW4KAoK415b6DCsNrzb42vcp8IyU51m4N9C3sF8jDBKX48GU+IV1qZ+woEB8M0JzdbXQJPqKwJ9iQbRO1ORMyqHRlRJeC+HU8yOmbapOB0Lq0r+sw7/X9Ln81zIMVpeIati1lWqlZy45fKkHzimoh0DKgowUTWimUkSBD5eF5CnsId3hBKoIcfXVepX3eSTaYXIZu1brxfLGLwWUQa5Hgnq7aO7kVvrFxPWuDRxgDpeDDdyWsPeQb2TijNwYbfGrpnDR1A1Uw8aMj9hBGgzQ/KPDwnMUJH4i+H8I+Ps5vj70wsAz8= zuul-build-sshkey",
                            "key_options": null,
                            "keyfile": "/home/zuul/.ssh/authorized_keys",
                            "manage_dir": true,
                            "path": null,
                            "state": "present",
                            "user": "zuul",
                            "validate_certs": true
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-bb2d-7573-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:15.174842Z",
                            "start": "2026-02-16T22:54:14.683101Z"
                        },
                        "id": "0242ac17-0010-bb2d-7573-0000000000bd",
                        "name": "Enable access via build key on all nodes"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "file",
                            "changed": false,
                            "diff": {
                                "after": {
                                    "path": "/home/zuul/.ssh"
                                },
                                "before": {
                                    "path": "/home/zuul/.ssh"
                                }
                            },
                            "gid": 1000,
                            "group": "zuul",
                            "invocation": {
                                "module_args": {
                                    "_diff_peek": null,
                                    "_original_basename": null,
                                    "access_time": null,
                                    "access_time_format": "%Y%m%d%H%M.%S",
                                    "attributes": null,
                                    "follow": true,
                                    "force": false,
                                    "group": null,
                                    "mode": 448,
                                    "modification_time": null,
                                    "modification_time_format": "%Y%m%d%H%M.%S",
                                    "owner": null,
                                    "path": "/home/zuul/.ssh",
                                    "recurse": false,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": null,
                                    "state": "directory",
                                    "unsafe_writes": false
                                }
                            },
                            "mode": "0700",
                            "owner": "zuul",
                            "path": "/home/zuul/.ssh",
                            "size": 4096,
                            "state": "directory",
                            "uid": 1000
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-bb2d-7573-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:15.411227Z",
                            "start": "2026-02-16T22:54:15.181810Z"
                        },
                        "id": "0242ac17-0010-bb2d-7573-0000000000be",
                        "name": "Make sure user has a .ssh"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "copy",
                            "changed": true,
                            "checksum": "c01e93c282fb6bf6131838ed121192752e258131",
                            "dest": "/home/zuul/.ssh/id_rsa",
                            "diff": [],
                            "gid": 1000,
                            "group": "zuul",
                            "invocation": {
                                "module_args": {
                                    "_original_basename": "75357a43b83d49128104b6fefcadd954_id_rsa",
                                    "attributes": null,
                                    "backup": false,
                                    "checksum": "c01e93c282fb6bf6131838ed121192752e258131",
                                    "content": null,
                                    "dest": "/home/zuul/.ssh/id_rsa",
                                    "directory_mode": null,
                                    "follow": false,
                                    "force": false,
                                    "group": null,
                                    "local_follow": null,
                                    "mode": 384,
                                    "owner": null,
                                    "remote_src": null,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": "/home/zuul/.ansible/tmp/ansible-tmp-1771282455.461022-190-204082486961052/source",
                                    "unsafe_writes": false,
                                    "validate": null
                                }
                            },
                            "md5sum": "e56c33735d1df9abe14f4ca15c952e8b",
                            "mode": "0600",
                            "owner": "zuul",
                            "size": 2602,
                            "src": "/home/zuul/.ansible/tmp/ansible-tmp-1771282455.461022-190-204082486961052/source",
                            "state": "file",
                            "uid": 1000
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-bb2d-7573-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:15.877560Z",
                            "start": "2026-02-16T22:54:15.417537Z"
                        },
                        "id": "0242ac17-0010-bb2d-7573-0000000000bf",
                        "name": "Install build private key as SSH key on all nodes"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "copy",
                            "changed": true,
                            "checksum": "3e4c10f7f12aff2ca092ac44fb5ec1a023208160",
                            "dest": "/home/zuul/.ssh/id_rsa.pub",
                            "diff": [],
                            "gid": 1000,
                            "group": "zuul",
                            "invocation": {
                                "module_args": {
                                    "_original_basename": "75357a43b83d49128104b6fefcadd954_id_rsa.pub",
                                    "attributes": null,
                                    "backup": false,
                                    "checksum": "3e4c10f7f12aff2ca092ac44fb5ec1a023208160",
                                    "content": null,
                                    "dest": "/home/zuul/.ssh/id_rsa.pub",
                                    "directory_mode": null,
                                    "follow": false,
                                    "force": false,
                                    "group": null,
                                    "local_follow": null,
                                    "mode": 420,
                                    "owner": null,
                                    "remote_src": null,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": "/home/zuul/.ansible/tmp/ansible-tmp-1771282455.925339-200-106913441199555/source",
                                    "unsafe_writes": false,
                                    "validate": null
                                }
                            },
                            "md5sum": "98d06cec8ff9c8a40a237a23bb87a466",
                            "mode": "0644",
                            "owner": "zuul",
                            "size": 571,
                            "src": "/home/zuul/.ansible/tmp/ansible-tmp-1771282455.925339-200-106913441199555/source",
                            "state": "file",
                            "uid": 1000
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-bb2d-7573-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:16.341589Z",
                            "start": "2026-02-16T22:54:15.883598Z"
                        },
                        "id": "0242ac17-0010-bb2d-7573-0000000000c0",
                        "name": "Install build public key as SSH key on all nodes"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "include_tasks",
                            "changed": false,
                            "false_condition": "ansible_os_family == \"Windows\"",
                            "skip_reason": "Conditional result was False",
                            "skipped": true
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-bb2d-7573-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:16.373245Z",
                            "start": "2026-02-16T22:54:16.348043Z"
                        },
                        "id": "0242ac17-0010-bb2d-7573-00000000009e",
                        "name": "Remote setup ssh keys (windows)"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "sshagent_remove_keys",
                            "changed": true,
                            "invocation": {
                                "module_args": {
                                    "remove": "^(?!\\(stdin\\)).*"
                                }
                            },
                            "removed": [
                                "/etc/zuul/id_rsa"
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-bb2d-7573-0000000000a3",
                        "name": "remove-zuul-sshkey",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/remove-zuul-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:16.752865Z",
                            "start": "2026-02-16T22:54:16.383958Z"
                        },
                        "id": "0242ac17-0010-bb2d-7573-0000000000a5",
                        "name": "Remove master key from local agent"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "command",
                            "changed": true,
                            "cmd": [
                                "ssh-add",
                                "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/75357a43b83d49128104b6fefcadd954_id_rsa"
                            ],
                            "delta": "0:00:00.015140",
                            "end": "2026-02-16 22:54:17.031039",
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "ssh-add /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/75357a43b83d49128104b6fefcadd954_id_rsa",
                                    "_uses_shell": false,
                                    "argv": null,
                                    "chdir": null,
                                    "creates": null,
                                    "executable": null,
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0010-bb2d-7573-0000000000ab-0-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "",
                            "rc": 0,
                            "start": "2026-02-16 22:54:17.015899",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "Identity added: /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/75357a43b83d49128104b6fefcadd954_id_rsa (zuul-build-sshkey)",
                            "stdout_lines": [
                                "Identity added: /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/75357a43b83d49128104b6fefcadd954_id_rsa (zuul-build-sshkey)"
                            ],
                            "zuul_log_id": "0242ac17-0010-bb2d-7573-0000000000ab-0-instance"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-bb2d-7573-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:17.083950Z",
                            "start": "2026-02-16T22:54:16.770828Z"
                        },
                        "id": "0242ac17-0010-bb2d-7573-0000000000ab",
                        "name": "Add back temp key"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "ping",
                            "changed": false,
                            "invocation": {
                                "module_args": {
                                    "data": "pong"
                                }
                            },
                            "ping": "pong"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-bb2d-7573-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:17.419977Z",
                            "start": "2026-02-16T22:54:17.088592Z"
                        },
                        "id": "0242ac17-0010-bb2d-7573-0000000000ac",
                        "name": "Verify we can still SSH to all nodes"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "command",
                            "changed": false,
                            "false_condition": "ansible_os_family == \"Windows\"",
                            "skip_reason": "Conditional result was False",
                            "skipped": true
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-bb2d-7573-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:17.452182Z",
                            "start": "2026-02-16T22:54:17.426848Z"
                        },
                        "id": "0242ac17-0010-bb2d-7573-0000000000ad",
                        "name": "Verify we can still SSH to all nodes (windows)"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "zuul_console",
                            "changed": false,
                            "invocation": {
                                "module_args": {
                                    "path": "/tmp/console-{log_uuid}.log",
                                    "port": 19885,
                                    "state": "present"
                                }
                            }
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-bb2d-7573-00000000001b",
                        "name": "prepare-workspace",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/prepare-workspace"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:17.805215Z",
                            "start": "2026-02-16T22:54:17.466602Z"
                        },
                        "id": "0242ac17-0010-bb2d-7573-00000000001d",
                        "name": "Start zuul_console daemon."
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "synchronize",
                            "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
                            "changed": true
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-bb2d-7573-00000000001b",
                        "name": "prepare-workspace",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/prepare-workspace"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:19.405142Z",
                            "start": "2026-02-16T22:54:17.812721Z"
                        },
                        "id": "0242ac17-0010-bb2d-7573-00000000001e",
                        "name": "Synchronize src repos to workspace directory."
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "file",
                            "changed": false,
                            "msg": "All items completed",
                            "results": [
                                {
                                    "ansible_loop_var": "zj_output_dir",
                                    "changed": false,
                                    "failed": false,
                                    "invocation": {
                                        "module_args": {
                                            "_diff_peek": null,
                                            "_original_basename": null,
                                            "access_time": null,
                                            "access_time_format": "%Y%m%d%H%M.%S",
                                            "attributes": null,
                                            "follow": true,
                                            "force": false,
                                            "group": null,
                                            "mode": null,
                                            "modification_time": null,
                                            "modification_time_format": "%Y%m%d%H%M.%S",
                                            "owner": null,
                                            "path": "/home/zuul/zuul-output/logs",
                                            "recurse": false,
                                            "selevel": null,
                                            "serole": null,
                                            "setype": null,
                                            "seuser": null,
                                            "src": null,
                                            "state": "absent",
                                            "unsafe_writes": false
                                        }
                                    },
                                    "path": "/home/zuul/zuul-output/logs",
                                    "state": "absent",
                                    "zj_output_dir": "logs"
                                },
                                {
                                    "ansible_loop_var": "zj_output_dir",
                                    "changed": false,
                                    "failed": false,
                                    "invocation": {
                                        "module_args": {
                                            "_diff_peek": null,
                                            "_original_basename": null,
                                            "access_time": null,
                                            "access_time_format": "%Y%m%d%H%M.%S",
                                            "attributes": null,
                                            "follow": true,
                                            "force": false,
                                            "group": null,
                                            "mode": null,
                                            "modification_time": null,
                                            "modification_time_format": "%Y%m%d%H%M.%S",
                                            "owner": null,
                                            "path": "/home/zuul/zuul-output/artifacts",
                                            "recurse": false,
                                            "selevel": null,
                                            "serole": null,
                                            "setype": null,
                                            "seuser": null,
                                            "src": null,
                                            "state": "absent",
                                            "unsafe_writes": false
                                        }
                                    },
                                    "path": "/home/zuul/zuul-output/artifacts",
                                    "state": "absent",
                                    "zj_output_dir": "artifacts"
                                },
                                {
                                    "ansible_loop_var": "zj_output_dir",
                                    "changed": false,
                                    "failed": false,
                                    "invocation": {
                                        "module_args": {
                                            "_diff_peek": null,
                                            "_original_basename": null,
                                            "access_time": null,
                                            "access_time_format": "%Y%m%d%H%M.%S",
                                            "attributes": null,
                                            "follow": true,
                                            "force": false,
                                            "group": null,
                                            "mode": null,
                                            "modification_time": null,
                                            "modification_time_format": "%Y%m%d%H%M.%S",
                                            "owner": null,
                                            "path": "/home/zuul/zuul-output/docs",
                                            "recurse": false,
                                            "selevel": null,
                                            "serole": null,
                                            "setype": null,
                                            "seuser": null,
                                            "src": null,
                                            "state": "absent",
                                            "unsafe_writes": false
                                        }
                                    },
                                    "path": "/home/zuul/zuul-output/docs",
                                    "state": "absent",
                                    "zj_output_dir": "docs"
                                }
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-bb2d-7573-000000000020",
                        "name": "ensure-output-dirs",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/ensure-output-dirs"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:19.932053Z",
                            "start": "2026-02-16T22:54:19.413706Z"
                        },
                        "id": "0242ac17-0010-bb2d-7573-000000000022",
                        "name": "Empty Zuul Output directories by removing them"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "file",
                            "changed": true,
                            "msg": "All items completed",
                            "results": [
                                {
                                    "ansible_loop_var": "zj_output_dir",
                                    "changed": true,
                                    "diff": {
                                        "after": {
                                            "mode": "0755",
                                            "path": "/home/zuul/zuul-output/logs",
                                            "state": "directory"
                                        },
                                        "before": {
                                            "mode": "0775",
                                            "path": "/home/zuul/zuul-output/logs",
                                            "state": "absent"
                                        }
                                    },
                                    "failed": false,
                                    "gid": 1000,
                                    "group": "zuul",
                                    "invocation": {
                                        "module_args": {
                                            "_diff_peek": null,
                                            "_original_basename": null,
                                            "access_time": null,
                                            "access_time_format": "%Y%m%d%H%M.%S",
                                            "attributes": null,
                                            "follow": true,
                                            "force": false,
                                            "group": null,
                                            "mode": 493,
                                            "modification_time": null,
                                            "modification_time_format": "%Y%m%d%H%M.%S",
                                            "owner": null,
                                            "path": "/home/zuul/zuul-output/logs",
                                            "recurse": false,
                                            "selevel": null,
                                            "serole": null,
                                            "setype": null,
                                            "seuser": null,
                                            "src": null,
                                            "state": "directory",
                                            "unsafe_writes": false
                                        }
                                    },
                                    "mode": "0755",
                                    "owner": "zuul",
                                    "path": "/home/zuul/zuul-output/logs",
                                    "size": 4096,
                                    "state": "directory",
                                    "uid": 1000,
                                    "zj_output_dir": "logs"
                                },
                                {
                                    "ansible_loop_var": "zj_output_dir",
                                    "changed": true,
                                    "diff": {
                                        "after": {
                                            "mode": "0755",
                                            "path": "/home/zuul/zuul-output/artifacts",
                                            "state": "directory"
                                        },
                                        "before": {
                                            "mode": "0775",
                                            "path": "/home/zuul/zuul-output/artifacts",
                                            "state": "absent"
                                        }
                                    },
                                    "failed": false,
                                    "gid": 1000,
                                    "group": "zuul",
                                    "invocation": {
                                        "module_args": {
                                            "_diff_peek": null,
                                            "_original_basename": null,
                                            "access_time": null,
                                            "access_time_format": "%Y%m%d%H%M.%S",
                                            "attributes": null,
                                            "follow": true,
                                            "force": false,
                                            "group": null,
                                            "mode": 493,
                                            "modification_time": null,
                                            "modification_time_format": "%Y%m%d%H%M.%S",
                                            "owner": null,
                                            "path": "/home/zuul/zuul-output/artifacts",
                                            "recurse": false,
                                            "selevel": null,
                                            "serole": null,
                                            "setype": null,
                                            "seuser": null,
                                            "src": null,
                                            "state": "directory",
                                            "unsafe_writes": false
                                        }
                                    },
                                    "mode": "0755",
                                    "owner": "zuul",
                                    "path": "/home/zuul/zuul-output/artifacts",
                                    "size": 4096,
                                    "state": "directory",
                                    "uid": 1000,
                                    "zj_output_dir": "artifacts"
                                },
                                {
                                    "ansible_loop_var": "zj_output_dir",
                                    "changed": true,
                                    "diff": {
                                        "after": {
                                            "mode": "0755",
                                            "path": "/home/zuul/zuul-output/docs",
                                            "state": "directory"
                                        },
                                        "before": {
                                            "mode": "0775",
                                            "path": "/home/zuul/zuul-output/docs",
                                            "state": "absent"
                                        }
                                    },
                                    "failed": false,
                                    "gid": 1000,
                                    "group": "zuul",
                                    "invocation": {
                                        "module_args": {
                                            "_diff_peek": null,
                                            "_original_basename": null,
                                            "access_time": null,
                                            "access_time_format": "%Y%m%d%H%M.%S",
                                            "attributes": null,
                                            "follow": true,
                                            "force": false,
                                            "group": null,
                                            "mode": 493,
                                            "modification_time": null,
                                            "modification_time_format": "%Y%m%d%H%M.%S",
                                            "owner": null,
                                            "path": "/home/zuul/zuul-output/docs",
                                            "recurse": false,
                                            "selevel": null,
                                            "serole": null,
                                            "setype": null,
                                            "seuser": null,
                                            "src": null,
                                            "state": "directory",
                                            "unsafe_writes": false
                                        }
                                    },
                                    "mode": "0755",
                                    "owner": "zuul",
                                    "path": "/home/zuul/zuul-output/docs",
                                    "size": 4096,
                                    "state": "directory",
                                    "uid": 1000,
                                    "zj_output_dir": "docs"
                                }
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-bb2d-7573-000000000020",
                        "name": "ensure-output-dirs",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/ensure-output-dirs"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:20.526136Z",
                            "start": "2026-02-16T22:54:19.940014Z"
                        },
                        "id": "0242ac17-0010-bb2d-7573-000000000024",
                        "name": "Ensure Zuul Output directories exist"
                    }
                }
            ]
        }
    ],
    "stats": {
        "instance": {
            "changed": 8,
            "failures": 0,
            "ignored": 0,
            "ok": 15,
            "rescued": 0,
            "skipped": 3,
            "unreachable": 0
        },
        "localhost": {
            "changed": 2,
            "failures": 0,
            "ignored": 0,
            "ok": 6,
            "rescued": 0,
            "skipped": 0,
            "unreachable": 0
        }
    },
    "trusted": true
},
{
    "branch": "main",
    "index": "1",
    "phase": "pre",
    "playbook": "github.com/vexxhost/zuul-jobs/playbooks/molecule/pre.yaml",
    "plays": [
        {
            "play": {
                "duration": {
                    "end": "2026-02-16T22:54:23.988553Z",
                    "start": "2026-02-16T22:54:21.267839Z"
                },
                "id": "0242ac17-0010-98f7-58cd-000000000002",
                "name": "all"
            },
            "tasks": [
                {
                    "hosts": {
                        "instance": {
                            "action": "ansible.builtin.unarchive",
                            "changed": true,
                            "dest": "/usr/local/bin",
                            "extract_results": {
                                "cmd": [
                                    "/usr/bin/tar",
                                    "--extract",
                                    "-C",
                                    "/usr/local/bin",
                                    "-z",
                                    "--show-transformed-names",
                                    "--strip-components=1",
                                    "-f",
                                    "/home/zuul/.ansible/tmp/ansible-tmp-1771282461.3120344-5-97357555608140/uv-x86_64-unknown-linux-gnu4klfx57q.tar.gz"
                                ],
                                "err": "",
                                "out": "",
                                "rc": 0
                            },
                            "gid": 0,
                            "group": "root",
                            "handler": "TgzArchive",
                            "invocation": {
                                "module_args": {
                                    "attributes": null,
                                    "copy": true,
                                    "creates": "/usr/local/bin/uv",
                                    "decrypt": true,
                                    "dest": "/usr/local/bin",
                                    "exclude": [],
                                    "extra_opts": [
                                        "--strip-components=1"
                                    ],
                                    "group": null,
                                    "include": [],
                                    "io_buffer_size": 65536,
                                    "keep_newer": false,
                                    "list_files": false,
                                    "mode": null,
                                    "owner": null,
                                    "remote_src": true,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": "https://github.com/astral-sh/uv/releases/download/0.8.13/uv-x86_64-unknown-linux-gnu.tar.gz",
                                    "unsafe_writes": false,
                                    "validate_certs": true
                                }
                            },
                            "mode": "0755",
                            "owner": "root",
                            "size": 4096,
                            "src": "/home/zuul/.ansible/tmp/ansible-tmp-1771282461.3120344-5-97357555608140/uv-x86_64-unknown-linux-gnu4klfx57q.tar.gz",
                            "state": "directory",
                            "uid": 0
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-98f7-58cd-000000000005",
                        "name": "setup-uv",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted/project_0/github.com/vexxhost/zuul-jobs/roles/setup-uv"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:23.442025Z",
                            "start": "2026-02-16T22:54:21.279474Z"
                        },
                        "id": "0242ac17-0010-98f7-58cd-000000000007",
                        "name": "Extract archive"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "ansible.builtin.command",
                            "changed": true,
                            "cmd": [
                                "/usr/local/bin/uv",
                                "--version"
                            ],
                            "delta": "0:00:00.011930",
                            "end": "2026-02-16 22:54:23.998097",
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "/usr/local/bin/uv --version",
                                    "_uses_shell": false,
                                    "argv": null,
                                    "chdir": null,
                                    "creates": null,
                                    "executable": null,
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0010-98f7-58cd-000000000008-1-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "",
                            "rc": 0,
                            "start": "2026-02-16 22:54:23.986167",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "uv 0.8.13",
                            "stdout_lines": [
                                "uv 0.8.13"
                            ],
                            "zuul_log_id": "0242ac17-0010-98f7-58cd-000000000008-1-instance"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-98f7-58cd-000000000005",
                        "name": "setup-uv",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted/project_0/github.com/vexxhost/zuul-jobs/roles/setup-uv"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:23.988553Z",
                            "start": "2026-02-16T22:54:23.471190Z"
                        },
                        "id": "0242ac17-0010-98f7-58cd-000000000008",
                        "name": "Print version"
                    }
                }
            ]
        }
    ],
    "stats": {
        "instance": {
            "changed": 2,
            "failures": 0,
            "ignored": 0,
            "ok": 2,
            "rescued": 0,
            "skipped": 0,
            "unreachable": 0
        }
    },
    "trusted": false
},
{
    "branch": "stable/2024.1",
    "index": "2",
    "phase": "pre",
    "playbook": "github.com/vexxhost/atmosphere/test-playbooks/molecule/pre.yml",
    "plays": [
        {
            "play": {
                "duration": {
                    "end": "2026-02-16T22:54:33.581896Z",
                    "start": "2026-02-16T22:54:24.693162Z"
                },
                "id": "0242ac17-0010-de89-2415-000000000002",
                "name": "all"
            },
            "tasks": [
                {
                    "hosts": {
                        "instance": {
                            "action": "ansible.builtin.apt",
                            "cache_update_time": 1771282470,
                            "cache_updated": true,
                            "changed": true,
                            "diff": {},
                            "invocation": {
                                "module_args": {
                                    "allow_change_held_packages": false,
                                    "allow_downgrade": false,
                                    "allow_unauthenticated": false,
                                    "autoclean": false,
                                    "autoremove": false,
                                    "cache_valid_time": 0,
                                    "clean": false,
                                    "deb": null,
                                    "default_release": null,
                                    "dpkg_options": "force-confdef,force-confold",
                                    "fail_on_autoremove": false,
                                    "force": false,
                                    "force_apt_get": false,
                                    "install_recommends": null,
                                    "lock_timeout": 60,
                                    "name": "jq",
                                    "only_upgrade": false,
                                    "package": [
                                        "jq"
                                    ],
                                    "policy_rc_d": null,
                                    "purge": false,
                                    "state": "present",
                                    "update_cache": true,
                                    "update_cache_retries": 5,
                                    "update_cache_retry_max_delay": 12,
                                    "upgrade": null
                                }
                            },
                            "stderr": "debconf: delaying package configuration, since apt-utils is not installed\n",
                            "stderr_lines": [
                                "debconf: delaying package configuration, since apt-utils is not installed"
                            ],
                            "stdout": "Reading package lists...\nBuilding dependency tree...\nReading state information...\nThe following additional packages will be installed:\n  libjq1 libonig5\nThe following NEW packages will be installed:\n  jq libjq1 libonig5\n0 upgraded, 3 newly installed, 0 to remove and 2 not upgraded.\nNeed to get 357 kB of archives.\nAfter this operation, 1087 kB of additional disk space will be used.\nGet:1 http://nova.clouds.archive.ubuntu.com/ubuntu jammy/main amd64 libonig5 amd64 6.9.7.1-2build1 [172 kB]\nGet:2 http://nova.clouds.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libjq1 amd64 1.6-2.1ubuntu3.1 [133 kB]\nGet:3 http://nova.clouds.archive.ubuntu.com/ubuntu jammy-updates/main amd64 jq amd64 1.6-2.1ubuntu3.1 [52.5 kB]\nFetched 357 kB in 0s (2386 kB/s)\nSelecting previously unselected package libonig5:amd64.\r\n(Reading database ... \r(Reading database ... 5%\r(Reading database ... 10%\r(Reading database ... 15%\r(Reading database ... 20%\r(Reading database ... 25%\r(Reading database ... 30%\r(Reading database ... 35%\r(Reading database ... 40%\r(Reading database ... 45%\r(Reading database ... 50%\r(Reading database ... 55%\r(Reading database ... 60%\r(Reading database ... 65%\r(Reading database ... 70%\r(Reading database ... 75%\r(Reading database ... 80%\r(Reading database ... 85%\r(Reading database ... 90%\r(Reading database ... 95%\r(Reading database ... 100%\r(Reading database ... 30406 files and directories currently installed.)\r\nPreparing to unpack .../libonig5_6.9.7.1-2build1_amd64.deb ...\r\nUnpacking libonig5:amd64 (6.9.7.1-2build1) ...\r\nSelecting previously unselected package libjq1:amd64.\r\nPreparing to unpack .../libjq1_1.6-2.1ubuntu3.1_amd64.deb ...\r\nUnpacking libjq1:amd64 (1.6-2.1ubuntu3.1) ...\r\nSelecting previously unselected package jq.\r\nPreparing to unpack .../jq_1.6-2.1ubuntu3.1_amd64.deb ...\r\nUnpacking jq (1.6-2.1ubuntu3.1) ...\r\nSetting up libonig5:amd64 (6.9.7.1-2build1) ...\r\nSetting up libjq1:amd64 (1.6-2.1ubuntu3.1) ...\r\nSetting up jq (1.6-2.1ubuntu3.1) ...\r\nProcessing triggers for libc-bin (2.35-0ubuntu3.13) ...\r\n",
                            "stdout_lines": [
                                "Reading package lists...",
                                "Building dependency tree...",
                                "Reading state information...",
                                "The following additional packages will be installed:",
                                "  libjq1 libonig5",
                                "The following NEW packages will be installed:",
                                "  jq libjq1 libonig5",
                                "0 upgraded, 3 newly installed, 0 to remove and 2 not upgraded.",
                                "Need to get 357 kB of archives.",
                                "After this operation, 1087 kB of additional disk space will be used.",
                                "Get:1 http://nova.clouds.archive.ubuntu.com/ubuntu jammy/main amd64 libonig5 amd64 6.9.7.1-2build1 [172 kB]",
                                "Get:2 http://nova.clouds.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libjq1 amd64 1.6-2.1ubuntu3.1 [133 kB]",
                                "Get:3 http://nova.clouds.archive.ubuntu.com/ubuntu jammy-updates/main amd64 jq amd64 1.6-2.1ubuntu3.1 [52.5 kB]",
                                "Fetched 357 kB in 0s (2386 kB/s)",
                                "Selecting previously unselected package libonig5:amd64.",
                                "(Reading database ... ",
                                "(Reading database ... 5%",
                                "(Reading database ... 10%",
                                "(Reading database ... 15%",
                                "(Reading database ... 20%",
                                "(Reading database ... 25%",
                                "(Reading database ... 30%",
                                "(Reading database ... 35%",
                                "(Reading database ... 40%",
                                "(Reading database ... 45%",
                                "(Reading database ... 50%",
                                "(Reading database ... 55%",
                                "(Reading database ... 60%",
                                "(Reading database ... 65%",
                                "(Reading database ... 70%",
                                "(Reading database ... 75%",
                                "(Reading database ... 80%",
                                "(Reading database ... 85%",
                                "(Reading database ... 90%",
                                "(Reading database ... 95%",
                                "(Reading database ... 100%",
                                "(Reading database ... 30406 files and directories currently installed.)",
                                "Preparing to unpack .../libonig5_6.9.7.1-2build1_amd64.deb ...",
                                "Unpacking libonig5:amd64 (6.9.7.1-2build1) ...",
                                "Selecting previously unselected package libjq1:amd64.",
                                "Preparing to unpack .../libjq1_1.6-2.1ubuntu3.1_amd64.deb ...",
                                "Unpacking libjq1:amd64 (1.6-2.1ubuntu3.1) ...",
                                "Selecting previously unselected package jq.",
                                "Preparing to unpack .../jq_1.6-2.1ubuntu3.1_amd64.deb ...",
                                "Unpacking jq (1.6-2.1ubuntu3.1) ...",
                                "Setting up libonig5:amd64 (6.9.7.1-2build1) ...",
                                "Setting up libjq1:amd64 (1.6-2.1ubuntu3.1) ...",
                                "Setting up jq (1.6-2.1ubuntu3.1) ...",
                                "Processing triggers for libc-bin (2.35-0ubuntu3.13) ..."
                            ]
                        }
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:33.581896Z",
                            "start": "2026-02-16T22:54:24.705066Z"
                        },
                        "id": "0242ac17-0010-de89-2415-000000000004",
                        "name": "Install \"jq\" for log collection"
                    }
                }
            ]
        }
    ],
    "stats": {
        "instance": {
            "changed": 1,
            "failures": 0,
            "ignored": 0,
            "ok": 1,
            "rescued": 0,
            "skipped": 0,
            "unreachable": 0
        }
    },
    "trusted": false
},
{
    "branch": "main",
    "index": "0",
    "phase": "run",
    "playbook": "github.com/vexxhost/zuul-jobs/playbooks/molecule/run.yaml",
    "plays": [
        {
            "play": {
                "duration": {
                    "end": "2026-02-16T23:06:47.903223Z",
                    "start": "2026-02-16T22:54:34.288405Z"
                },
                "id": "0242ac17-0010-434f-163f-000000000002",
                "name": "all"
            },
            "tasks": [
                {
                    "hosts": {
                        "instance": {
                            "action": "ansible.builtin.copy",
                            "changed": true,
                            "checksum": "d64abd4e4656d6252b498914fd4ed3e783f5b32b",
                            "dest": "src/github.com/vexxhost/atmosphere/inventory.yaml",
                            "diff": [],
                            "gid": 1000,
                            "group": "zuul",
                            "invocation": {
                                "module_args": {
                                    "_original_basename": "inventory.yaml",
                                    "attributes": null,
                                    "backup": false,
                                    "checksum": "d64abd4e4656d6252b498914fd4ed3e783f5b32b",
                                    "content": null,
                                    "dest": "src/github.com/vexxhost/atmosphere",
                                    "directory_mode": null,
                                    "follow": false,
                                    "force": true,
                                    "group": null,
                                    "local_follow": null,
                                    "mode": 420,
                                    "owner": null,
                                    "remote_src": null,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": "/home/zuul/.ansible/tmp/ansible-tmp-1771282474.3378396-5-200534648365387/source",
                                    "unsafe_writes": false,
                                    "validate": null
                                }
                            },
                            "md5sum": "e618f71cf61aa7cf9f24a24dc565dad7",
                            "mode": "0644",
                            "owner": "zuul",
                            "size": 27864,
                            "src": "/home/zuul/.ansible/tmp/ansible-tmp-1771282474.3378396-5-200534648365387/source",
                            "state": "file",
                            "uid": 1000
                        }
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:35.115263Z",
                            "start": "2026-02-16T22:54:34.299332Z"
                        },
                        "id": "0242ac17-0010-434f-163f-000000000004",
                        "name": "Copy inventory file for Zuul"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "ansible.builtin.replace",
                            "changed": true,
                            "invocation": {
                                "module_args": {
                                    "after": null,
                                    "attributes": null,
                                    "backup": false,
                                    "before": null,
                                    "encoding": "utf-8",
                                    "group": null,
                                    "mode": null,
                                    "owner": null,
                                    "path": "src/github.com/vexxhost/atmosphere/inventory.yaml",
                                    "regexp": "(^\\s*ansible_host:\\s*).*$",
                                    "replace": "\\1\"{{ nodepool.private_ipv4 }}\"",
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "unsafe_writes": false,
                                    "validate": null
                                }
                            },
                            "msg": "1 replacements made",
                            "rc": 0
                        }
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T22:54:35.394440Z",
                            "start": "2026-02-16T22:54:35.123565Z"
                        },
                        "id": "0242ac17-0010-434f-163f-000000000005",
                        "name": "Switch \"ansible_host\" to private IP"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "ansible.builtin.command",
                            "changed": true,
                            "cmd": [
                                "uv",
                                "run",
                                "molecule",
                                "test",
                                "--destroy",
                                "never",
                                "-s",
                                "csi"
                            ],
                            "delta": "0:12:11.843175",
                            "end": "2026-02-16 23:06:47.705491",
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "uv run molecule test --destroy never -s csi",
                                    "_uses_shell": false,
                                    "argv": null,
                                    "chdir": "src/github.com/vexxhost/atmosphere",
                                    "creates": null,
                                    "executable": null,
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0010-434f-163f-000000000006-1-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "",
                            "rc": 0,
                            "start": "2026-02-16 22:54:35.862316",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "Using CPython 3.10.12 interpreter at: /usr/bin/python3\nCreating virtual environment at: .venv\n   Building atmosphere @ file:///home/zuul/src/github.com/vexxhost/atmosphere\nDownloading cryptography (4.2MiB)\nDownloading pygments (1.2MiB)\nDownloading ansible-core (2.1MiB)\nDownloading kubernetes (1.9MiB)\nDownloading netaddr (2.2MiB)\nDownloading openstacksdk (1.7MiB)\nDownloading setuptools (1.1MiB)\nDownloading rjsonnet (1.2MiB)\n   Building pyperclip==1.9.0\n Downloading rjsonnet\n Downloading pygments\n Downloading netaddr\n Downloading cryptography\n Downloading setuptools\n Downloading kubernetes\n Downloading ansible-core\n Downloading openstacksdk\n      Built pyperclip==1.9.0\n      Built atmosphere @ file:///home/zuul/src/github.com/vexxhost/atmosphere\nInstalled 79 packages in 62ms\nWARNING  Molecule scenarios should migrate to 'extensions/molecule'\nINFO     [csi > discovery] scenario test matrix: dependency, cleanup, destroy, syntax, create, prepare, converge, idempotence, side_effect, verify, cleanup, destroy\nINFO     [csi > prerun] Performing prerun with role_name_check=0...\nINFO     [csi > dependency] Executing\nWARNING  [csi > dependency] Missing roles requirements file: requirements.yml\nWARNING  [csi > dependency] Missing collections requirements file: collections.yml\nWARNING  [csi > dependency] Executed: 2 missing (Remove from test_sequence to suppress)\nINFO     [csi > cleanup] Executing\nWARNING  [csi > cleanup] Executed: Missing playbook (Remove from test_sequence to suppress)\nINFO     [csi > destroy] Executing\nWARNING  [csi > destroy] Skipping, '--destroy=never' requested.\nINFO     [csi > destroy] Executed: Successful\nINFO     [csi > syntax] Executing\n\nplaybook: /home/zuul/src/github.com/vexxhost/atmosphere/molecule/csi/converge.yml\nINFO     [csi > syntax] Executed: Successful\nINFO     [csi > create] Executing\nWARNING  [csi > create] Executed: Missing playbook (Remove from test_sequence to suppress)\nINFO     [csi > prepare] Executing\n\nPLAY [Prepare] *****************************************************************\n\nTASK [Gathering Facts] *********************************************************\nMonday 16 February 2026  22:55:18 +0000 (0:00:00.027)       0:00:00.027 *******\n[WARNING]: Platform linux on host instance is using the discovered Python\ninterpreter at /usr/bin/python3.10, but future installation of another Python\ninterpreter could change the meaning of that path. See\nhttps://docs.ansible.com/ansible-\ncore/2.17/reference_appendices/interpreter_discovery.html for more information.\nok: [instance]\n\nTASK [Configure short hostname] ************************************************\nMonday 16 February 2026  22:55:19 +0000 (0:00:01.045)       0:00:01.073 *******\nchanged: [instance]\n\nTASK [Ensure hostname inside hosts file] ***************************************\nMonday 16 February 2026  22:55:19 +0000 (0:00:00.633)       0:00:01.706 *******\n[WARNING]: Module remote_tmp /root/.ansible/tmp did not exist and was created\nwith a mode of 0700, this may cause issues when running as another user. To\navoid this, create the remote_tmp dir with the correct permissions manually\nchanged: [instance]\n\nTASK [Purge \"snapd\" package] ***************************************************\nMonday 16 February 2026  22:55:20 +0000 (0:00:00.263)       0:00:01.969 *******\nok: [instance]\n\nPLAY [Create devices for Ceph] *************************************************\n\nTASK [Gathering Facts] *********************************************************\nMonday 16 February 2026  22:55:20 +0000 (0:00:00.730)       0:00:02.700 *******\nok: [instance]\n\nTASK [Install depedencies] *****************************************************\nMonday 16 February 2026  22:55:21 +0000 (0:00:00.653)       0:00:03.354 *******\nchanged: [instance]\n\nTASK [Start up service] ********************************************************\nMonday 16 February 2026  22:55:40 +0000 (0:00:19.601)       0:00:22.956 *******\nok: [instance]\n\nTASK [Generate lvm.conf] *******************************************************\nMonday 16 February 2026  22:55:41 +0000 (0:00:00.519)       0:00:23.475 *******\nok: [instance]\n\nTASK [Write /etc/lvm/lvm.conf] *************************************************\nMonday 16 February 2026  22:55:41 +0000 (0:00:00.277)       0:00:23.753 *******\nchanged: [instance]\n\nTASK [Get list of all loopback devices] ****************************************\nMonday 16 February 2026  22:55:42 +0000 (0:00:00.568)       0:00:24.321 *******\nok: [instance]\n\nTASK [Fail if there is any existing loopback devices] **************************\nMonday 16 February 2026  22:55:42 +0000 (0:00:00.175)       0:00:24.497 *******\nskipping: [instance]\n\nTASK [Create devices for Ceph] *************************************************\nMonday 16 February 2026  22:55:42 +0000 (0:00:00.018)       0:00:24.516 *******\nchanged: [instance] => (item=osd0)\nchanged: [instance] => (item=osd1)\nchanged: [instance] => (item=osd2)\n\nTASK [Set permissions on loopback devices] *************************************\nMonday 16 February 2026  22:55:43 +0000 (0:00:00.519)       0:00:25.035 *******\nchanged: [instance] => (item=osd0)\nchanged: [instance] => (item=osd1)\nchanged: [instance] => (item=osd2)\n\nTASK [Start loop devices] ******************************************************\nMonday 16 February 2026  22:55:43 +0000 (0:00:00.590)       0:00:25.626 *******\nchanged: [instance] => (item=osd0)\nchanged: [instance] => (item=osd1)\nchanged: [instance] => (item=osd2)\n\nTASK [Create a volume group for each loop device] ******************************\nMonday 16 February 2026  22:55:44 +0000 (0:00:00.636)       0:00:26.262 *******\nchanged: [instance] => (item=osd0)\nchanged: [instance] => (item=osd1)\nchanged: [instance] => (item=osd2)\n\nTASK [Create a logical volume for each loop device] ****************************\nMonday 16 February 2026  22:55:46 +0000 (0:00:02.453)       0:00:28.715 *******\nchanged: [instance] => (item=ceph-instance-osd0)\nchanged: [instance] => (item=ceph-instance-osd1)\nchanged: [instance] => (item=ceph-instance-osd2)\n\nPLAY RECAP *********************************************************************\ninstance                   : ok=15   changed=9    unreachable=0    failed=0    skipped=1    rescued=0    ignored=0\n\nMonday 16 February 2026  22:55:48 +0000 (0:00:01.655)       0:00:30.371 *******\n===============================================================================\nInstall depedencies ---------------------------------------------------- 19.60s\nCreate a volume group for each loop device ------------------------------ 2.45s\nCreate a logical volume for each loop device ---------------------------- 1.66s\nGathering Facts --------------------------------------------------------- 1.05s\nPurge \"snapd\" package --------------------------------------------------- 0.73s\nGathering Facts --------------------------------------------------------- 0.65s\nStart loop devices ------------------------------------------------------ 0.64s\nConfigure short hostname ------------------------------------------------ 0.63s\nSet permissions on loopback devices ------------------------------------- 0.59s\nWrite /etc/lvm/lvm.conf ------------------------------------------------- 0.57s\nStart up service -------------------------------------------------------- 0.52s\nCreate devices for Ceph ------------------------------------------------- 0.52s\nGenerate lvm.conf ------------------------------------------------------- 0.28s\nEnsure hostname inside hosts file --------------------------------------- 0.26s\nGet list of all loopback devices ---------------------------------------- 0.18s\nFail if there is any existing loopback devices -------------------------- 0.02s\nINFO     [csi > prepare] Executed: Successful\nINFO     [csi > converge] Executing\n\nPLAY [Debug CSI driver value] **************************************************\n\nTASK [Gathering Facts] *********************************************************\nMonday 16 February 2026  22:55:49 +0000 (0:00:00.035)       0:00:00.035 *******\n[WARNING]: Platform linux on host instance is using the discovered Python\ninterpreter at /usr/bin/python3.10, but future installation of another Python\ninterpreter could change the meaning of that path. See\nhttps://docs.ansible.com/ansible-\ncore/2.17/reference_appendices/interpreter_discovery.html for more information.\nok: [instance]\n\nTASK [Display CSI driver value and environment variable] ***********************\nMonday 16 February 2026  22:55:50 +0000 (0:00:00.899)       0:00:00.935 *******\nok: [instance] => {\n    \"msg\": \"csi_driver=rbd, MOLECULE_CSI_DRIVER=\"\n}\n\nPLAY [all] *********************************************************************\n\nTASK [Gathering Facts] *********************************************************\nMonday 16 February 2026  22:55:50 +0000 (0:00:00.036)       0:00:00.971 *******\nok: [instance]\n\nTASK [Set a fact with the \"atmosphere_images\" for other plays] *****************\nMonday 16 February 2026  22:55:51 +0000 (0:00:00.803)       0:00:01.774 *******\nok: [instance]\n\nPLAY [Deploy Ceph monitors & managers] *****************************************\n\nTASK [Gathering Facts] *********************************************************\nMonday 16 February 2026  22:55:51 +0000 (0:00:00.170)       0:00:01.944 *******\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  22:55:52 +0000 (0:00:00.813)       0:00:02.758 *******\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nMonday 16 February 2026  22:55:52 +0000 (0:00:00.271)       0:00:03.030 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***\nMonday 16 February 2026  22:55:52 +0000 (0:00:00.038)       0:00:03.068 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  22:55:53 +0000 (0:00:00.271)       0:00:03.340 *******\nok: [instance] => {\n    \"msg\": \"https://github.com/opencontainers/runc/releases/download/v1.4.0/runc.amd64\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  22:55:53 +0000 (0:00:00.058)       0:00:03.399 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  22:55:53 +0000 (0:00:00.607)       0:00:04.006 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nMonday 16 February 2026  22:55:53 +0000 (0:00:00.050)       0:00:04.056 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  22:55:53 +0000 (0:00:00.048)       0:00:04.105 *******\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nMonday 16 February 2026  22:55:54 +0000 (0:00:00.191)       0:00:04.297 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  22:55:55 +0000 (0:00:01.086)       0:00:05.384 *******\nok: [instance] => {\n    \"msg\": \"https://github.com/containerd/containerd/releases/download/v2.2.0/containerd-2.2.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  22:55:55 +0000 (0:00:00.061)       0:00:05.445 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  22:55:55 +0000 (0:00:00.729)       0:00:06.175 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.containerd : Install SELinux packages] ***************\nMonday 16 February 2026  22:55:58 +0000 (0:00:02.779)       0:00:08.955 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***\nMonday 16 February 2026  22:55:58 +0000 (0:00:00.034)       0:00:08.989 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********\nMonday 16 February 2026  22:55:58 +0000 (0:00:00.027)       0:00:09.017 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Install AppArmor packages] **************\nMonday 16 February 2026  22:55:58 +0000 (0:00:00.030)       0:00:09.047 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***\nMonday 16 February 2026  22:56:03 +0000 (0:00:04.639)       0:00:13.687 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.containerd : Create folders for configuration] *******\nMonday 16 February 2026  22:56:03 +0000 (0:00:00.485)       0:00:14.172 *******\nchanged: [instance] => (item={'path': '/etc/containerd'})\nchanged: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})\nchanged: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})\nchanged: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})\nchanged: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})\n\nTASK [vexxhost.containers.containerd : Create containerd config file] **********\nMonday 16 February 2026  22:56:04 +0000 (0:00:00.859)       0:00:15.031 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.containerd : Force any restarts if necessary] ********\nMonday 16 February 2026  22:56:05 +0000 (0:00:00.483)       0:00:15.515 *******\n\nRUNNING HANDLER [vexxhost.containers.containerd : Reload systemd] **************\nMonday 16 February 2026  22:56:05 +0000 (0:00:00.017)       0:00:15.532 *******\nok: [instance]\n\nRUNNING HANDLER [vexxhost.containers.containerd : Restart containerd] **********\nMonday 16 February 2026  22:56:06 +0000 (0:00:00.953)       0:00:16.486 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.containerd : Enable and start service] ***************\nMonday 16 February 2026  22:56:06 +0000 (0:00:00.564)       0:00:17.050 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  22:56:07 +0000 (0:00:00.559)       0:00:17.610 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  22:56:07 +0000 (0:00:00.197)       0:00:17.807 *******\nok: [instance] => {\n    \"msg\": \"https://download.docker.com/linux/static/stable/x86_64/docker-24.0.9.tgz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  22:56:07 +0000 (0:00:00.053)       0:00:17.861 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  22:56:08 +0000 (0:00:00.755)       0:00:18.617 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.docker : Install AppArmor packages] ******************\nMonday 16 February 2026  22:56:12 +0000 (0:00:04.272)       0:00:22.889 *******\nok: [instance]\n\nTASK [vexxhost.containers.docker : Ensure group \"docker\" exists] ***************\nMonday 16 February 2026  22:56:13 +0000 (0:00:00.925)       0:00:23.815 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.docker : Create systemd service file for docker] *****\nMonday 16 February 2026  22:56:13 +0000 (0:00:00.319)       0:00:24.135 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.docker : Create folders for configuration] ***********\nMonday 16 February 2026  22:56:14 +0000 (0:00:00.411)       0:00:24.547 *******\nchanged: [instance] => (item={'path': '/etc/docker'})\nchanged: [instance] => (item={'path': '/var/lib/docker', 'mode': '0o710'})\nchanged: [instance] => (item={'path': '/run/docker', 'mode': '0o711'})\n\nTASK [vexxhost.containers.docker : Create systemd socket file for docker] ******\nMonday 16 February 2026  22:56:14 +0000 (0:00:00.526)       0:00:25.073 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.docker : Create docker daemon config file] ***********\nMonday 16 February 2026  22:56:15 +0000 (0:00:00.431)       0:00:25.504 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.docker : Force any restarts if necessary] ************\nMonday 16 February 2026  22:56:15 +0000 (0:00:00.401)       0:00:25.905 *******\n\nRUNNING HANDLER [vexxhost.containers.containerd : Reload systemd] **************\nMonday 16 February 2026  22:56:15 +0000 (0:00:00.016)       0:00:25.922 *******\nok: [instance]\n\nRUNNING HANDLER [vexxhost.containers.docker : Restart docker] ******************\nMonday 16 February 2026  22:56:16 +0000 (0:00:00.773)       0:00:26.696 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.docker : Enable and start service] *******************\nMonday 16 February 2026  22:56:17 +0000 (0:00:00.839)       0:00:27.536 *******\nchanged: [instance]\n\nTASK [vexxhost.ceph.cephadm : Gather variables for each operating system] ******\nMonday 16 February 2026  22:56:17 +0000 (0:00:00.528)       0:00:28.064 *******\nok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/cephadm/vars/ubuntu-22.04.yml)\n\nTASK [vexxhost.ceph.cephadm : Install packages] ********************************\nMonday 16 February 2026  22:56:17 +0000 (0:00:00.053)       0:00:28.118 *******\nchanged: [instance]\n\nTASK [vexxhost.ceph.cephadm : Ensure services are started] *********************\nMonday 16 February 2026  22:56:23 +0000 (0:00:05.130)       0:00:33.248 *******\nok: [instance] => (item=chronyd)\nok: [instance] => (item=sshd)\n\nTASK [vexxhost.ceph.cephadm : Download \"cephadm\"] ******************************\nMonday 16 February 2026  22:56:23 +0000 (0:00:00.623)       0:00:33.872 *******\nchanged: [instance]\n\nTASK [vexxhost.ceph.cephadm : Remove cephadm from old path] ********************\nMonday 16 February 2026  22:56:24 +0000 (0:00:01.283)       0:00:35.155 *******\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Ensure \"cephadm\" user is present] ****************\nMonday 16 February 2026  22:56:25 +0000 (0:00:00.189)       0:00:35.344 *******\nchanged: [instance]\n\nTASK [vexxhost.ceph.cephadm : Allow \"cephadm\" user to have passwordless sudo] ***\nMonday 16 February 2026  22:56:25 +0000 (0:00:00.428)       0:00:35.773 *******\nchanged: [instance]\n\nTASK [vexxhost.ceph.mon : Get `cephadm ls` status] *****************************\nMonday 16 February 2026  22:56:25 +0000 (0:00:00.320)       0:00:36.093 *******\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Parse the `cephadm ls` output] ***********************\nMonday 16 February 2026  22:56:27 +0000 (0:00:01.603)       0:00:37.696 *******\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Assimilate existing configs in `ceph.conf`] **********\nMonday 16 February 2026  22:56:27 +0000 (0:00:00.042)       0:00:37.738 *******\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Adopt monitor to cluster] ****************************\nMonday 16 February 2026  22:56:27 +0000 (0:00:00.028)       0:00:37.767 *******\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Adopt manager to cluster] ****************************\nMonday 16 February 2026  22:56:27 +0000 (0:00:00.026)       0:00:37.794 *******\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Enable \"cephadm\" mgr module] *************************\nMonday 16 February 2026  22:56:27 +0000 (0:00:00.028)       0:00:37.823 *******\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Set orchestrator backend to \"cephadm\"] ***************\nMonday 16 February 2026  22:56:27 +0000 (0:00:00.027)       0:00:37.850 *******\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Use `cephadm` user for cephadm] **********************\nMonday 16 February 2026  22:56:27 +0000 (0:00:00.033)       0:00:37.884 *******\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Generate \"cephadm\" key] ******************************\nMonday 16 February 2026  22:56:27 +0000 (0:00:00.029)       0:00:37.913 *******\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Set Ceph Monitor IP address] *************************\nMonday 16 February 2026  22:56:27 +0000 (0:00:00.028)       0:00:37.942 *******\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Check if any node is bootstrapped] *******************\nMonday 16 February 2026  22:56:27 +0000 (0:00:00.097)       0:00:38.039 *******\nok: [instance] => (item=instance)\n\nTASK [vexxhost.ceph.mon : Select pre-existing bootstrap node if exists] ********\nMonday 16 February 2026  22:56:28 +0000 (0:00:00.188)       0:00:38.227 *******\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Bootstrap cluster] ***********************************\nMonday 16 February 2026  22:56:28 +0000 (0:00:00.040)       0:00:38.268 *******\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/mon/tasks/bootstrap-ceph.yml for instance\n\nTASK [vexxhost.ceph.mon : Generate temporary file for \"ceph.conf\"] *************\nMonday 16 February 2026  22:56:28 +0000 (0:00:00.056)       0:00:38.325 *******\nchanged: [instance]\n\nTASK [vexxhost.ceph.mon : Include extra configuration values] ******************\nMonday 16 February 2026  22:56:28 +0000 (0:00:00.289)       0:00:38.614 *******\nchanged: [instance] => (item={'option': 'mon allow pool size one', 'section': 'global', 'value': True})\nchanged: [instance] => (item={'option': 'osd crush chooseleaf type', 'section': 'global', 'value': 0})\nchanged: [instance] => (item={'option': 'auth allow insecure global id reclaim', 'section': 'mon', 'value': False})\n\nTASK [vexxhost.ceph.mon : Run Bootstrap coomand] *******************************\nMonday 16 February 2026  22:56:29 +0000 (0:00:00.646)       0:00:39.261 *******\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Remove temporary file for \"ceph.conf\"] ***************\nMonday 16 February 2026  22:57:53 +0000 (0:01:24.772)       0:02:04.033 *******\nchanged: [instance]\n\nTASK [vexxhost.ceph.mon : Set bootstrap node] **********************************\nMonday 16 February 2026  22:57:54 +0000 (0:00:00.208)       0:02:04.242 *******\nok: [instance]\n\nTASK [Install Ceph host] *******************************************************\nMonday 16 February 2026  22:57:54 +0000 (0:00:00.036)       0:02:04.279 *******\nincluded: vexxhost.ceph.cephadm_host for instance\n\nTASK [vexxhost.ceph.cephadm_host : Get public SSH key for \"cephadm\" user] ******\nMonday 16 February 2026  22:57:54 +0000 (0:00:00.076)       0:02:04.355 *******\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Set fact with public SSH key for \"cephadm\" user] ***\nMonday 16 February 2026  22:57:55 +0000 (0:00:01.492)       0:02:05.848 *******\nok: [instance] => (item=instance)\n\nTASK [vexxhost.ceph.cephadm_host : Set authorized key for \"cephadm\"] ***********\nMonday 16 February 2026  22:57:55 +0000 (0:00:00.052)       0:02:05.900 *******\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Add new host to Ceph] ***********************\nMonday 16 February 2026  22:57:56 +0000 (0:00:00.370)       0:02:06.271 *******\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Configure \"mon\" label for monitors] ******************\nMonday 16 February 2026  22:57:57 +0000 (0:00:01.761)       0:02:08.033 *******\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Validate monitor exist] ******************************\nMonday 16 February 2026  22:57:59 +0000 (0:00:01.466)       0:02:09.499 *******\nok: [instance]\n\nTASK [Install Ceph host] *******************************************************\nMonday 16 February 2026  22:58:09 +0000 (0:00:09.980)       0:02:19.480 *******\nincluded: vexxhost.ceph.cephadm_host for instance\n\nTASK [vexxhost.ceph.cephadm_host : Get public SSH key for \"cephadm\" user] ******\nMonday 16 February 2026  22:58:09 +0000 (0:00:00.080)       0:02:19.560 *******\nskipping: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Set fact with public SSH key for \"cephadm\" user] ***\nMonday 16 February 2026  22:58:09 +0000 (0:00:00.049)       0:02:19.610 *******\nskipping: [instance] => (item=instance)\nskipping: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Set authorized key for \"cephadm\"] ***********\nMonday 16 February 2026  22:58:09 +0000 (0:00:00.043)       0:02:19.653 *******\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Add new host to Ceph] ***********************\nMonday 16 February 2026  22:58:09 +0000 (0:00:00.263)       0:02:19.917 *******\nok: [instance]\n\nTASK [vexxhost.ceph.mgr : Configure \"mgr\" label for managers] ******************\nMonday 16 February 2026  22:58:11 +0000 (0:00:02.244)       0:02:22.162 *******\nok: [instance]\n\nTASK [vexxhost.ceph.mgr : Validate manager exist] ******************************\nMonday 16 February 2026  22:58:13 +0000 (0:00:01.413)       0:02:23.576 *******\nok: [instance]\n\nTASK [vexxhost.ceph.mgr : Enable the Ceph Manager prometheus module] ***********\nMonday 16 February 2026  22:58:14 +0000 (0:00:01.310)       0:02:24.886 *******\nok: [instance]\n\nPLAY [Deploy Ceph OSDs] ********************************************************\n\nTASK [Gathering Facts] *********************************************************\nMonday 16 February 2026  22:58:17 +0000 (0:00:02.307)       0:02:27.193 *******\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  22:58:17 +0000 (0:00:00.785)       0:02:27.979 *******\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nMonday 16 February 2026  22:58:17 +0000 (0:00:00.198)       0:02:28.178 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***\nMonday 16 February 2026  22:58:18 +0000 (0:00:00.030)       0:02:28.208 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  22:58:18 +0000 (0:00:00.203)       0:02:28.412 *******\nok: [instance] => {\n    \"msg\": \"https://github.com/opencontainers/runc/releases/download/v1.4.0/runc.amd64\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  22:58:18 +0000 (0:00:00.044)       0:02:28.456 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  22:58:18 +0000 (0:00:00.280)       0:02:28.737 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nMonday 16 February 2026  22:58:18 +0000 (0:00:00.035)       0:02:28.772 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  22:58:18 +0000 (0:00:00.037)       0:02:28.809 *******\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nMonday 16 February 2026  22:58:18 +0000 (0:00:00.188)       0:02:28.998 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  22:58:19 +0000 (0:00:00.979)       0:02:29.977 *******\nok: [instance] => {\n    \"msg\": \"https://github.com/containerd/containerd/releases/download/v2.2.0/containerd-2.2.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  22:58:19 +0000 (0:00:00.052)       0:02:30.029 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  22:58:20 +0000 (0:00:00.314)       0:02:30.344 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Install SELinux packages] ***************\nMonday 16 February 2026  22:58:22 +0000 (0:00:01.900)       0:02:32.244 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***\nMonday 16 February 2026  22:58:22 +0000 (0:00:00.022)       0:02:32.266 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********\nMonday 16 February 2026  22:58:22 +0000 (0:00:00.021)       0:02:32.288 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Install AppArmor packages] **************\nMonday 16 February 2026  22:58:22 +0000 (0:00:00.022)       0:02:32.310 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***\nMonday 16 February 2026  22:58:23 +0000 (0:00:00.951)       0:02:33.262 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create folders for configuration] *******\nMonday 16 February 2026  22:58:23 +0000 (0:00:00.395)       0:02:33.657 *******\nok: [instance] => (item={'path': '/etc/containerd'})\nok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})\n\nTASK [vexxhost.containers.containerd : Create containerd config file] **********\nMonday 16 February 2026  22:58:24 +0000 (0:00:00.875)       0:02:34.533 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Force any restarts if necessary] ********\nMonday 16 February 2026  22:58:24 +0000 (0:00:00.598)       0:02:35.132 *******\n\nTASK [vexxhost.containers.containerd : Enable and start service] ***************\nMonday 16 February 2026  22:58:24 +0000 (0:00:00.032)       0:02:35.165 *******\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  22:58:25 +0000 (0:00:00.385)       0:02:35.550 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  22:58:25 +0000 (0:00:00.200)       0:02:35.750 *******\nok: [instance] => {\n    \"msg\": \"https://download.docker.com/linux/static/stable/x86_64/docker-24.0.9.tgz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  22:58:25 +0000 (0:00:00.048)       0:02:35.798 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  22:58:25 +0000 (0:00:00.320)       0:02:36.119 *******\nok: [instance]\n\nTASK [vexxhost.containers.docker : Install AppArmor packages] ******************\nMonday 16 February 2026  22:58:28 +0000 (0:00:03.013)       0:02:39.132 *******\nok: [instance]\n\nTASK [vexxhost.containers.docker : Ensure group \"docker\" exists] ***************\nMonday 16 February 2026  22:58:30 +0000 (0:00:01.056)       0:02:40.188 *******\nok: [instance]\n\nTASK [vexxhost.containers.docker : Create systemd service file for docker] *****\nMonday 16 February 2026  22:58:30 +0000 (0:00:00.174)       0:02:40.363 *******\nok: [instance]\n\nTASK [vexxhost.containers.docker : Create folders for configuration] ***********\nMonday 16 February 2026  22:58:30 +0000 (0:00:00.426)       0:02:40.789 *******\nok: [instance] => (item={'path': '/etc/docker'})\nok: [instance] => (item={'path': '/var/lib/docker', 'mode': '0o710'})\nok: [instance] => (item={'path': '/run/docker', 'mode': '0o711'})\n\nTASK [vexxhost.containers.docker : Create systemd socket file for docker] ******\nMonday 16 February 2026  22:58:31 +0000 (0:00:00.585)       0:02:41.374 *******\nok: [instance]\n\nTASK [vexxhost.containers.docker : Create docker daemon config file] ***********\nMonday 16 February 2026  22:58:31 +0000 (0:00:00.445)       0:02:41.820 *******\nok: [instance]\n\nTASK [vexxhost.containers.docker : Force any restarts if necessary] ************\nMonday 16 February 2026  22:58:32 +0000 (0:00:00.383)       0:02:42.203 *******\n\nTASK [vexxhost.containers.docker : Enable and start service] *******************\nMonday 16 February 2026  22:58:32 +0000 (0:00:00.016)       0:02:42.219 *******\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Gather variables for each operating system] ******\nMonday 16 February 2026  22:58:32 +0000 (0:00:00.358)       0:02:42.577 *******\nok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/cephadm/vars/ubuntu-22.04.yml)\n\nTASK [vexxhost.ceph.cephadm : Install packages] ********************************\nMonday 16 February 2026  22:58:32 +0000 (0:00:00.050)       0:02:42.628 *******\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Ensure services are started] *********************\nMonday 16 February 2026  22:58:33 +0000 (0:00:00.994)       0:02:43.623 *******\nok: [instance] => (item=chronyd)\nok: [instance] => (item=sshd)\n\nTASK [vexxhost.ceph.cephadm : Download \"cephadm\"] ******************************\nMonday 16 February 2026  22:58:34 +0000 (0:00:00.631)       0:02:44.254 *******\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Remove cephadm from old path] ********************\nMonday 16 February 2026  22:58:34 +0000 (0:00:00.773)       0:02:45.028 *******\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Ensure \"cephadm\" user is present] ****************\nMonday 16 February 2026  22:58:35 +0000 (0:00:00.184)       0:02:45.212 *******\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Allow \"cephadm\" user to have passwordless sudo] ***\nMonday 16 February 2026  22:58:35 +0000 (0:00:00.229)       0:02:45.442 *******\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Get monitor status] **********************************\nMonday 16 February 2026  22:58:35 +0000 (0:00:00.196)       0:02:45.639 *******\nok: [instance] => (item=instance)\n\nTASK [vexxhost.ceph.osd : Select admin host] ***********************************\nMonday 16 February 2026  22:58:35 +0000 (0:00:00.215)       0:02:45.854 *******\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Get `cephadm ls` status] *****************************\nMonday 16 February 2026  22:58:35 +0000 (0:00:00.047)       0:02:45.902 *******\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Parse the `cephadm ls` output] ***********************\nMonday 16 February 2026  22:58:40 +0000 (0:00:05.274)       0:02:51.177 *******\nok: [instance]\n\nTASK [Install Ceph host] *******************************************************\nMonday 16 February 2026  22:58:41 +0000 (0:00:00.045)       0:02:51.222 *******\nincluded: vexxhost.ceph.cephadm_host for instance\n\nTASK [vexxhost.ceph.cephadm_host : Get public SSH key for \"cephadm\" user] ******\nMonday 16 February 2026  22:58:41 +0000 (0:00:00.057)       0:02:51.280 *******\nskipping: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Set fact with public SSH key for \"cephadm\" user] ***\nMonday 16 February 2026  22:58:41 +0000 (0:00:00.037)       0:02:51.318 *******\nskipping: [instance] => (item=instance)\nskipping: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Set authorized key for \"cephadm\"] ***********\nMonday 16 February 2026  22:58:41 +0000 (0:00:00.040)       0:02:51.359 *******\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Add new host to Ceph] ***********************\nMonday 16 February 2026  22:58:41 +0000 (0:00:00.247)       0:02:51.606 *******\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Adopt OSDs to cluster] *******************************\nMonday 16 February 2026  22:58:43 +0000 (0:00:01.756)       0:02:53.362 *******\nskipping: [instance]\n\nTASK [vexxhost.ceph.osd : Wait until OSD added to cephadm] *********************\nMonday 16 February 2026  22:58:43 +0000 (0:00:00.026)       0:02:53.389 *******\nskipping: [instance]\n\nTASK [vexxhost.ceph.osd : Ensure all OSDs are non-legacy] **********************\nMonday 16 February 2026  22:58:43 +0000 (0:00:00.023)       0:02:53.412 *******\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Get `ceph-volume lvm list` status] *******************\nMonday 16 February 2026  22:58:48 +0000 (0:00:05.291)       0:02:58.704 *******\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Install OSDs] ****************************************\nMonday 16 February 2026  22:58:58 +0000 (0:00:09.906)       0:03:08.610 *******\nok: [instance] => (item=/dev/ceph-instance-osd0/data)\nok: [instance] => (item=/dev/ceph-instance-osd1/data)\nok: [instance] => (item=/dev/ceph-instance-osd2/data)\n\nTASK [vexxhost.ceph.osd : Get mon dump] ****************************************\nMonday 16 February 2026  23:00:19 +0000 (0:01:20.818)       0:04:29.429 *******\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Mark require osd release] ****************************\nMonday 16 February 2026  23:00:20 +0000 (0:00:01.547)       0:04:30.976 *******\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Wait for all OSD to be running] **********************\nMonday 16 February 2026  23:00:22 +0000 (0:00:01.418)       0:04:32.395 *******\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/osd/tasks/check-osds.yml for instance\n\nTASK [vexxhost.ceph.osd : Set the retry count] *********************************\nMonday 16 February 2026  23:00:22 +0000 (0:00:00.063)       0:04:32.458 *******\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Get `ceph orch ps`] **********************************\nMonday 16 February 2026  23:00:22 +0000 (0:00:00.057)       0:04:32.516 *******\nok: [instance]\n\nTASK [vexxhost.ceph.osd : OSD daemon list] *************************************\nMonday 16 February 2026  23:00:23 +0000 (0:00:01.507)       0:04:34.024 *******\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Fail if any OSD not running] *************************\nMonday 16 February 2026  23:00:23 +0000 (0:00:00.038)       0:04:34.063 *******\nskipping: [instance] => (item=1)\nskipping: [instance] => (item=1)\nskipping: [instance] => (item=1)\nskipping: [instance]\n\nTASK [vexxhost.ceph.osd : Fail if any duplicate OSD ID] ************************\nMonday 16 February 2026  23:00:23 +0000 (0:00:00.044)       0:04:34.108 *******\nskipping: [instance]\n\nPLAY [all] *********************************************************************\n\nTASK [Ensure RBD kernel module is loaded] **************************************\nMonday 16 February 2026  23:00:23 +0000 (0:00:00.040)       0:04:34.148 *******\nchanged: [instance]\n\nPLAY [all] *********************************************************************\n\nTASK [Gathering Facts] *********************************************************\nMonday 16 February 2026  23:00:24 +0000 (0:00:00.368)       0:04:34.517 *******\nok: [instance]\n\nTASK [vexxhost.atmosphere.sysctl : Configure sysctl values] ********************\nMonday 16 February 2026  23:00:25 +0000 (0:00:00.881)       0:04:35.398 *******\nchanged: [instance] => (item={'name': 'fs.aio-max-nr', 'value': 1048576})\nchanged: [instance] => (item={'name': 'net.ipv4.tcp_timestamps', 'value': 0})\nchanged: [instance] => (item={'name': 'net.ipv4.tcp_sack', 'value': 1})\nchanged: [instance] => (item={'name': 'net.core.netdev_budget', 'value': 1000})\nchanged: [instance] => (item={'name': 'net.core.netdev_max_backlog', 'value': 250000})\nchanged: [instance] => (item={'name': 'net.core.rmem_max', 'value': 4194304})\nchanged: [instance] => (item={'name': 'net.core.wmem_max', 'value': 4194304})\nchanged: [instance] => (item={'name': 'net.core.rmem_default', 'value': 4194304})\nchanged: [instance] => (item={'name': 'net.core.wmem_default', 'value': 4194304})\nchanged: [instance] => (item={'name': 'net.core.optmem_max', 'value': 4194304})\nchanged: [instance] => (item={'name': 'net.ipv4.tcp_rmem', 'value': '4096 87380 4194304'})\nchanged: [instance] => (item={'name': 'net.ipv4.tcp_wmem', 'value': '4096 65536 4194304'})\nchanged: [instance] => (item={'name': 'net.ipv4.tcp_low_latency', 'value': 1})\nchanged: [instance] => (item={'name': 'net.ipv4.tcp_adv_win_scale', 'value': 1})\nchanged: [instance] => (item={'name': 'net.ipv4.neigh.default.gc_thresh1', 'value': 128})\nchanged: [instance] => (item={'name': 'net.ipv4.neigh.default.gc_thresh2', 'value': 28872})\nchanged: [instance] => (item={'name': 'net.ipv4.neigh.default.gc_thresh3', 'value': 32768})\nchanged: [instance] => (item={'name': 'net.ipv6.neigh.default.gc_thresh1', 'value': 128})\nchanged: [instance] => (item={'name': 'net.ipv6.neigh.default.gc_thresh2', 'value': 28872})\nchanged: [instance] => (item={'name': 'net.ipv6.neigh.default.gc_thresh3', 'value': 32768})\n\nTASK [vexxhost.atmosphere.ethtool : Create folder for persistent configuration] ***\nMonday 16 February 2026  23:00:28 +0000 (0:00:03.614)       0:04:39.013 *******\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.ethtool : Install persistent \"ethtool\" tuning] *******\nMonday 16 February 2026  23:00:29 +0000 (0:00:00.180)       0:04:39.193 *******\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.ethtool : Run \"ethtool\" tuning] **********************\nMonday 16 February 2026  23:00:29 +0000 (0:00:00.413)       0:04:39.607 *******\nok: [instance]\n\nTASK [Set a fact with the \"atmosphere_images\" for other plays] *****************\nMonday 16 February 2026  23:00:29 +0000 (0:00:00.212)       0:04:39.819 *******\nok: [instance]\n\nPLAY [Configure Kubernetes VIP] ************************************************\n\nTASK [Gathering Facts] *********************************************************\nMonday 16 February 2026  23:00:29 +0000 (0:00:00.044)       0:04:39.864 *******\nok: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/etc/kubernetes/manifests)] ***\nMonday 16 February 2026  23:00:30 +0000 (0:00:00.831)       0:04:40.696 *******\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Uninstall legacy HA stack] ****************\nMonday 16 February 2026  23:00:30 +0000 (0:00:00.185)       0:04:40.881 *******\nok: [instance] => (item=/etc/keepalived/keepalived.conf)\nok: [instance] => (item=/etc/keepalived/check_apiserver.sh)\nok: [instance] => (item=/etc/kubernetes/manifests/keepalived.yaml)\nok: [instance] => (item=/etc/haproxy/haproxy.cfg)\nok: [instance] => (item=/etc/kubernetes/manifests/haproxy.yaml)\n\nTASK [vexxhost.kubernetes.kube_vip : Switch API server to run on port 6443] ****\nMonday 16 February 2026  23:00:31 +0000 (0:00:00.859)       0:04:41.741 *******\nfailed: [instance] (item=/etc/kubernetes/manifests/kube-apiserver.yaml) => {\"ansible_loop_var\": \"item\", \"changed\": false, \"item\": \"/etc/kubernetes/manifests/kube-apiserver.yaml\", \"msg\": \"Path /etc/kubernetes/manifests/kube-apiserver.yaml does not exist !\", \"rc\": 257}\nfailed: [instance] (item=/etc/kubernetes/controller-manager.conf) => {\"ansible_loop_var\": \"item\", \"changed\": false, \"item\": \"/etc/kubernetes/controller-manager.conf\", \"msg\": \"Path /etc/kubernetes/controller-manager.conf does not exist !\", \"rc\": 257}\nfailed: [instance] (item=/etc/kubernetes/scheduler.conf) => {\"ansible_loop_var\": \"item\", \"changed\": false, \"item\": \"/etc/kubernetes/scheduler.conf\", \"msg\": \"Path /etc/kubernetes/scheduler.conf does not exist !\", \"rc\": 257}\n...ignoring\n\nTASK [vexxhost.kubernetes.kube_vip : Check if super-admin.conf exists] *********\nMonday 16 February 2026  23:00:32 +0000 (0:00:00.478)       0:04:42.220 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Check if kubeadm has already run] *********\nMonday 16 February 2026  23:00:32 +0000 (0:00:00.164)       0:04:42.384 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Set fact with KUBECONFIG path] ************\nMonday 16 February 2026  23:00:32 +0000 (0:00:00.197)       0:04:42.582 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Set fact with KUBECONFIG path (with super-admin.conf)] ***\nMonday 16 February 2026  23:00:32 +0000 (0:00:00.029)       0:04:42.612 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Upload Kubernetes manifest] ***************\nMonday 16 February 2026  23:00:32 +0000 (0:00:00.035)       0:04:42.647 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Ensure kube-vip configuration file] *******\nMonday 16 February 2026  23:00:32 +0000 (0:00:00.516)       0:04:43.163 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Flush handlers] ***************************\nMonday 16 February 2026  23:00:33 +0000 (0:00:00.188)       0:04:43.352 *******\n\nPLAY [Install Kubernetes] ******************************************************\n\nTASK [Gathering Facts] *********************************************************\nMonday 16 February 2026  23:00:33 +0000 (0:00:00.095)       0:04:43.447 *******\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  23:00:34 +0000 (0:00:00.822)       0:04:44.269 *******\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nMonday 16 February 2026  23:00:34 +0000 (0:00:00.205)       0:04:44.475 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***\nMonday 16 February 2026  23:00:34 +0000 (0:00:00.040)       0:04:44.515 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:00:34 +0000 (0:00:00.199)       0:04:44.715 *******\nok: [instance] => {\n    \"msg\": \"https://github.com/opencontainers/runc/releases/download/v1.4.0/runc.amd64\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:00:34 +0000 (0:00:00.048)       0:04:44.763 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:00:34 +0000 (0:00:00.291)       0:04:45.055 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  23:00:34 +0000 (0:00:00.035)       0:04:45.091 *******\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nMonday 16 February 2026  23:00:35 +0000 (0:00:00.195)       0:04:45.286 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:00:36 +0000 (0:00:01.037)       0:04:46.324 *******\nok: [instance] => {\n    \"msg\": \"https://github.com/containerd/containerd/releases/download/v2.2.0/containerd-2.2.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:00:36 +0000 (0:00:00.061)       0:04:46.385 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:00:36 +0000 (0:00:00.320)       0:04:46.705 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Install SELinux packages] ***************\nMonday 16 February 2026  23:00:38 +0000 (0:00:01.904)       0:04:48.610 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***\nMonday 16 February 2026  23:00:38 +0000 (0:00:00.032)       0:04:48.642 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********\nMonday 16 February 2026  23:00:38 +0000 (0:00:00.030)       0:04:48.672 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Install AppArmor packages] **************\nMonday 16 February 2026  23:00:38 +0000 (0:00:00.026)       0:04:48.699 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***\nMonday 16 February 2026  23:00:39 +0000 (0:00:00.973)       0:04:49.672 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create folders for configuration] *******\nMonday 16 February 2026  23:00:39 +0000 (0:00:00.403)       0:04:50.076 *******\nok: [instance] => (item={'path': '/etc/containerd'})\nok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})\n\nTASK [vexxhost.containers.containerd : Create containerd config file] **********\nMonday 16 February 2026  23:00:40 +0000 (0:00:00.886)       0:04:50.962 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Force any restarts if necessary] ********\nMonday 16 February 2026  23:00:41 +0000 (0:00:00.443)       0:04:51.406 *******\n\nTASK [vexxhost.containers.containerd : Enable and start service] ***************\nMonday 16 February 2026  23:00:41 +0000 (0:00:00.006)       0:04:51.413 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes_upgrade_check : Retrieve the \"kubeadm-config\" ConfigMap] ***\nMonday 16 February 2026  23:00:41 +0000 (0:00:00.332)       0:04:51.745 *******\nfatal: [instance]: FAILED! => {\"changed\": false, \"msg\": \"Failed to import the required Python library (kubernetes) on instance's Python /usr/bin/python3.10. Please read the module documentation and install it in the appropriate location. If the required library is installed, but Ansible is using the wrong Python interpreter, please consult the documentation on ansible_python_interpreter\"}\n...ignoring\n\nTASK [vexxhost.kubernetes.kubernetes_upgrade_check : Parse the ClusterConfiguration] ***\nMonday 16 February 2026  23:00:42 +0000 (0:00:00.532)       0:04:52.278 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes_upgrade_check : Retrieve the current Kubernetes version] ***\nMonday 16 February 2026  23:00:42 +0000 (0:00:00.032)       0:04:52.310 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes_upgrade_check : Extract major, minor, and patch versions] ***\nMonday 16 February 2026  23:00:42 +0000 (0:00:00.036)       0:04:52.346 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes_upgrade_check : Fail if we're jumping more than one minor version] ***\nMonday 16 February 2026  23:00:42 +0000 (0:00:00.033)       0:04:52.380 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes_upgrade_check : Set fact if we need to upgrade] ***\nMonday 16 February 2026  23:00:42 +0000 (0:00:00.034)       0:04:52.415 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  23:00:42 +0000 (0:00:00.045)       0:04:52.460 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:00:42 +0000 (0:00:00.192)       0:04:52.652 *******\nok: [instance] => {\n    \"msg\": \"https://dl.k8s.io/release/v1.28.13/bin/linux/amd64/kubeadm\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:00:42 +0000 (0:00:00.035)       0:04:52.688 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:00:43 +0000 (0:00:00.842)       0:04:53.531 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  23:00:43 +0000 (0:00:00.041)       0:04:53.572 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:00:43 +0000 (0:00:00.189)       0:04:53.762 *******\nok: [instance] => {\n    \"msg\": \"https://dl.k8s.io/release/v1.28.13/bin/linux/amd64/kubectl\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:00:43 +0000 (0:00:00.035)       0:04:53.798 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:00:44 +0000 (0:00:00.807)       0:04:54.605 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Install SELinux packages] ***************\nMonday 16 February 2026  23:00:44 +0000 (0:00:00.053)       0:04:54.659 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***\nMonday 16 February 2026  23:00:44 +0000 (0:00:00.028)       0:04:54.687 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********\nMonday 16 February 2026  23:00:44 +0000 (0:00:00.035)       0:04:54.722 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Install AppArmor packages] **************\nMonday 16 February 2026  23:00:44 +0000 (0:00:00.031)       0:04:54.754 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***\nMonday 16 February 2026  23:00:45 +0000 (0:00:00.934)       0:04:55.689 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create folders for configuration] *******\nMonday 16 February 2026  23:00:45 +0000 (0:00:00.390)       0:04:56.079 *******\nok: [instance] => (item={'path': '/etc/containerd'})\nok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})\n\nTASK [vexxhost.containers.containerd : Create containerd config file] **********\nMonday 16 February 2026  23:00:46 +0000 (0:00:00.854)       0:04:56.934 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Force any restarts if necessary] ********\nMonday 16 February 2026  23:00:47 +0000 (0:00:00.450)       0:04:57.385 *******\n\nTASK [vexxhost.containers.containerd : Enable and start service] ***************\nMonday 16 February 2026  23:00:47 +0000 (0:00:00.008)       0:04:57.393 *******\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  23:00:47 +0000 (0:00:00.341)       0:04:57.734 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:00:47 +0000 (0:00:00.197)       0:04:57.932 *******\nok: [instance] => {\n    \"msg\": \"https://github.com/kubernetes-sigs/cri-tools/releases/download/v1.34.0/crictl-v1.34.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:00:47 +0000 (0:00:00.043)       0:04:57.976 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:00:48 +0000 (0:00:00.602)       0:04:58.579 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:00:49 +0000 (0:00:01.333)       0:04:59.912 *******\nok: [instance] => {\n    \"msg\": \"https://github.com/kubernetes-sigs/cri-tools/releases/download/v1.34.0/critest-v1.34.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:00:49 +0000 (0:00:00.048)       0:04:59.961 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:00:50 +0000 (0:00:00.589)       0:05:00.550 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.cri_tools : Create crictl config] ********************\nMonday 16 February 2026  23:00:51 +0000 (0:00:01.349)       0:05:01.900 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/opt/cni/bin)] *********\nMonday 16 February 2026  23:00:52 +0000 (0:00:00.412)       0:05:02.312 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  23:00:52 +0000 (0:00:00.188)       0:05:02.501 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:00:52 +0000 (0:00:00.194)       0:05:02.695 *******\nok: [instance] => {\n    \"msg\": \"https://github.com/containernetworking/plugins/releases/download/v1.8.0/cni-plugins-linux-amd64-v1.8.0.tgz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:00:52 +0000 (0:00:00.054)       0:05:02.750 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:00:53 +0000 (0:00:00.814)       0:05:03.564 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.cni_plugins : Gather variables for each operating system] ***\nMonday 16 February 2026  23:00:56 +0000 (0:00:02.633)       0:05:06.198 *******\nok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/containers/roles/cni_plugins/vars/debian.yml)\n\nTASK [vexxhost.containers.cni_plugins : Install additional packages] ***********\nMonday 16 February 2026  23:00:56 +0000 (0:00:00.045)       0:05:06.243 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.cni_plugins : Ensure IPv6 is enabled] ****************\nMonday 16 February 2026  23:00:58 +0000 (0:00:02.873)       0:05:09.117 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.cni_plugins : Enable kernel modules on-boot] *********\nMonday 16 February 2026  23:00:59 +0000 (0:00:00.197)       0:05:09.314 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.cni_plugins : Enable kernel modules in runtime] ******\nMonday 16 February 2026  23:00:59 +0000 (0:00:00.415)       0:05:09.729 *******\nchanged: [instance] => (item=br_netfilter)\nok: [instance] => (item=ip_tables)\nchanged: [instance] => (item=ip6_tables)\nchanged: [instance] => (item=nf_conntrack)\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  23:01:00 +0000 (0:00:00.778)       0:05:10.508 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:01:00 +0000 (0:00:00.188)       0:05:10.697 *******\nok: [instance] => {\n    \"msg\": \"https://dl.k8s.io/release/v1.28.13/bin/linux/amd64/kubelet\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:01:00 +0000 (0:00:00.041)       0:05:10.738 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:01:02 +0000 (0:00:01.560)       0:05:12.299 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Gather variables for each operating system] ***\nMonday 16 February 2026  23:01:02 +0000 (0:00:00.038)       0:05:12.337 *******\nok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/kubelet/vars/debian.yml)\n\nTASK [vexxhost.kubernetes.kubelet : Install coreutils] *************************\nMonday 16 February 2026  23:01:02 +0000 (0:00:00.051)       0:05:12.389 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Install additional packages] ***************\nMonday 16 February 2026  23:01:02 +0000 (0:00:00.031)       0:05:12.420 *******\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Configure sysctl values] *******************\nMonday 16 February 2026  23:01:05 +0000 (0:00:03.543)       0:05:15.964 *******\nchanged: [instance] => (item={'name': 'net.ipv4.ip_forward', 'value': 1})\nchanged: [instance] => (item={'name': 'net.bridge.bridge-nf-call-iptables', 'value': 1})\nchanged: [instance] => (item={'name': 'net.bridge.bridge-nf-call-ip6tables', 'value': 1})\nchanged: [instance] => (item={'name': 'net.ipv4.conf.all.rp_filter', 'value': 0})\nchanged: [instance] => (item={'name': 'fs.inotify.max_queued_events', 'value': 1048576})\nchanged: [instance] => (item={'name': 'fs.inotify.max_user_instances', 'value': 8192})\nchanged: [instance] => (item={'name': 'fs.inotify.max_user_watches', 'value': 1048576})\n\nTASK [vexxhost.kubernetes.kubelet : Create folders for kubernetes configuration] ***\nMonday 16 February 2026  23:01:06 +0000 (0:00:01.190)       0:05:17.154 *******\nchanged: [instance] => (item=/etc/systemd/system/kubelet.service.d)\nok: [instance] => (item=/etc/kubernetes)\nok: [instance] => (item=/etc/kubernetes/manifests)\n\nTASK [vexxhost.kubernetes.kubelet : Add kubelet systemd service config] ********\nMonday 16 February 2026  23:01:07 +0000 (0:00:00.523)       0:05:17.678 *******\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Add kubeadm dropin for kubelet systemd service config] ***\nMonday 16 February 2026  23:01:07 +0000 (0:00:00.401)       0:05:18.080 *******\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Check swap status] *************************\nMonday 16 February 2026  23:01:08 +0000 (0:00:00.400)       0:05:18.480 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Disable swap] ******************************\nMonday 16 February 2026  23:01:08 +0000 (0:00:00.197)       0:05:18.678 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Remove swapfile from /etc/fstab] ***********\nMonday 16 February 2026  23:01:08 +0000 (0:00:00.031)       0:05:18.710 *******\nok: [instance] => (item=swap)\nok: [instance] => (item=none)\n\nTASK [vexxhost.kubernetes.kubelet : Create noswap systemd service config file] ***\nMonday 16 February 2026  23:01:08 +0000 (0:00:00.455)       0:05:19.165 *******\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Enable noswap service] *********************\nMonday 16 February 2026  23:01:09 +0000 (0:00:00.399)       0:05:19.565 *******\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Force any restarts if necessary] ***********\nMonday 16 February 2026  23:01:09 +0000 (0:00:00.586)       0:05:20.152 *******\n\nRUNNING HANDLER [vexxhost.kubernetes.kubelet : Reload systemd] *****************\nMonday 16 February 2026  23:01:09 +0000 (0:00:00.008)       0:05:20.160 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Enable and start kubelet service] **********\nMonday 16 February 2026  23:01:10 +0000 (0:00:00.788)       0:05:20.948 *******\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Ensure availability of dbus on Debian] *****\nMonday 16 February 2026  23:01:11 +0000 (0:00:00.619)       0:05:21.568 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Configure short hostname] ******************\nMonday 16 February 2026  23:01:12 +0000 (0:00:00.997)       0:05:22.565 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Ensure hostname inside hosts file] *********\nMonday 16 February 2026  23:01:13 +0000 (0:00:00.737)       0:05:23.303 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Remove kubernetes repository] ***********\nMonday 16 February 2026  23:01:13 +0000 (0:00:00.187)       0:05:23.491 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Setup control plane] ********************\nMonday 16 February 2026  23:01:13 +0000 (0:00:00.443)       0:05:23.934 *******\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/kubernetes/tasks/control-plane.yml for instance\n\nTASK [vexxhost.kubernetes.kubernetes : Bootstrap cluster] **********************\nMonday 16 February 2026  23:01:13 +0000 (0:00:00.071)       0:05:24.006 *******\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/kubernetes/tasks/bootstrap-cluster.yml for instance\n\nTASK [vexxhost.kubernetes.kubernetes : Check if any control plane is bootstrapped] ***\nMonday 16 February 2026  23:01:13 +0000 (0:00:00.074)       0:05:24.081 *******\nok: [instance] => (item=instance)\n\nTASK [vexxhost.kubernetes.kubernetes : Pick node from pre-existing cluster] ****\nMonday 16 February 2026  23:01:14 +0000 (0:00:00.187)       0:05:24.268 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Select first node to initialize cluster] ***\nMonday 16 February 2026  23:01:14 +0000 (0:00:00.041)       0:05:24.309 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Print selected bootstrap node] **********\nMonday 16 February 2026  23:01:14 +0000 (0:00:00.049)       0:05:24.359 *******\nok: [instance] => {\n    \"msg\": \"instance\"\n}\n\nTASK [vexxhost.kubernetes.kubernetes : Upload cluster configuration for bootstrap node] ***\nMonday 16 February 2026  23:01:14 +0000 (0:00:00.038)       0:05:24.397 *******\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create pki folder] **********************\nMonday 16 February 2026  23:01:14 +0000 (0:00:00.488)       0:05:24.885 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create kubernetes ca key] ***************\nMonday 16 February 2026  23:01:14 +0000 (0:00:00.029)       0:05:24.915 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create kubernetes ca cert] **************\nMonday 16 February 2026  23:01:14 +0000 (0:00:00.029)       0:05:24.944 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create etcd-ca key] *********************\nMonday 16 February 2026  23:01:14 +0000 (0:00:00.033)       0:05:24.978 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create etcd-ca cert] ********************\nMonday 16 February 2026  23:01:14 +0000 (0:00:00.030)       0:05:25.008 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create front-proxy-ca key] **************\nMonday 16 February 2026  23:01:14 +0000 (0:00:00.031)       0:05:25.040 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create front-proxy-ca cert] *************\nMonday 16 February 2026  23:01:14 +0000 (0:00:00.030)       0:05:25.070 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Initialize cluster] *********************\nMonday 16 February 2026  23:01:14 +0000 (0:00:00.031)       0:05:25.101 *******\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Check if the node is already part of the cluster] ***\nMonday 16 February 2026  23:01:40 +0000 (0:00:25.460)       0:05:50.562 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Join cluster] ***************************\nMonday 16 February 2026  23:01:40 +0000 (0:00:00.217)       0:05:50.779 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create folder for admin configuration] ***\nMonday 16 February 2026  23:01:40 +0000 (0:00:00.033)       0:05:50.812 *******\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Creating a symlink for admin configuration file] ***\nMonday 16 February 2026  23:01:40 +0000 (0:00:00.192)       0:05:51.005 *******\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Add bash autocomplete for kubectl] ******\nMonday 16 February 2026  23:01:41 +0000 (0:00:00.204)       0:05:51.210 *******\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Install PIP] ****************************\nMonday 16 February 2026  23:01:41 +0000 (0:00:00.228)       0:05:51.438 *******\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Install Kubernetes Python package using pip on supported systems] ***\nMonday 16 February 2026  23:01:45 +0000 (0:00:04.019)       0:05:55.458 *******\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Enable EPEL repository] *****************\nMonday 16 February 2026  23:01:48 +0000 (0:00:03.229)       0:05:58.688 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Install Kubernetes Python package using package manager on supported systems] ***\nMonday 16 February 2026  23:01:48 +0000 (0:00:00.035)       0:05:58.723 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Allow workload on control plane node] ***\nMonday 16 February 2026  23:01:48 +0000 (0:00:00.037)       0:05:58.761 *******\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Remove kube-proxy resources] ************\nMonday 16 February 2026  23:01:49 +0000 (0:00:00.726)       0:05:59.487 *******\nskipping: [instance] => (item=DaemonSet)\nskipping: [instance] => (item=ConfigMap)\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Start an upgrade] ***********************\nMonday 16 February 2026  23:01:49 +0000 (0:00:00.036)       0:05:59.523 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Check if the Kubernetes API services is up to date] ***\nMonday 16 February 2026  23:01:49 +0000 (0:00:00.026)       0:05:59.550 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Trigger an upgrade of the Kubernetes API services] ***\nMonday 16 February 2026  23:01:49 +0000 (0:00:00.037)       0:05:59.588 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Setup nodes] ****************************\nMonday 16 February 2026  23:01:49 +0000 (0:00:00.029)       0:05:59.617 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Upgrade & restart Kubelet node for upgrade (if needed)] ***\nMonday 16 February 2026  23:01:49 +0000 (0:00:00.027)       0:05:59.644 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Set node selector for CoreDNS components] ***\nMonday 16 February 2026  23:01:49 +0000 (0:00:00.383)       0:06:00.028 *******\nchanged: [instance]\n\nPLAY [Install control-plane components] ****************************************\n\nTASK [Gathering Facts] *********************************************************\nMonday 16 February 2026  23:01:50 +0000 (0:00:01.010)       0:06:01.039 *******\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  23:01:51 +0000 (0:00:00.884)       0:06:01.923 *******\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nMonday 16 February 2026  23:01:51 +0000 (0:00:00.197)       0:06:02.120 *******\nok: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***\nMonday 16 February 2026  23:01:52 +0000 (0:00:01.051)       0:06:03.172 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:01:53 +0000 (0:00:00.216)       0:06:03.389 *******\nok: [instance] => {\n    \"msg\": \"https://get.helm.sh/helm-v3.11.2-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:01:53 +0000 (0:00:00.046)       0:06:03.435 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:01:53 +0000 (0:00:00.539)       0:06:03.974 *******\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.helm : Remove Helm repository] ***********************\nMonday 16 February 2026  23:01:55 +0000 (0:00:01.493)       0:06:05.467 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.helm : Add bash autocomplete for helm] ***************\nMonday 16 February 2026  23:01:55 +0000 (0:00:00.363)       0:06:05.831 *******\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.helm : Get Helm plugins dir] *************************\nMonday 16 February 2026  23:01:55 +0000 (0:00:00.171)       0:06:06.003 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.helm : Create Helm plugins directory if it does not exist] ***\nMonday 16 February 2026  23:01:56 +0000 (0:00:00.232)       0:06:06.236 *******\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.helm : Remove Helm diff plugin installed by kubernetes.core.helm_plugin] ***\nMonday 16 February 2026  23:01:56 +0000 (0:00:00.195)       0:06:06.431 *******\nok: [instance]\n\nTASK [Install plugin] **********************************************************\nMonday 16 February 2026  23:01:56 +0000 (0:00:00.192)       0:06:06.624 *******\nincluded: vexxhost.containers.download_artifact for instance\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:01:56 +0000 (0:00:00.045)       0:06:06.669 *******\nok: [instance] => {\n    \"msg\": \"https://github.com/databus23/helm-diff/releases/download/v3.8.1/helm-diff-linux-amd64.tgz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:01:56 +0000 (0:00:00.040)       0:06:06.710 *******\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:01:57 +0000 (0:00:00.684)       0:06:07.394 *******\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nMonday 16 February 2026  23:01:59 +0000 (0:00:01.851)       0:06:09.246 *******\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nMonday 16 February 2026  23:01:59 +0000 (0:00:00.036)       0:06:09.282 *******\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.cilium : Get current Kubernetes version] *************\nMonday 16 February 2026  23:01:59 +0000 (0:00:00.558)       0:06:09.840 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.cilium : Deploy Helm chart] **************************\nMonday 16 February 2026  23:02:00 +0000 (0:00:00.750)       0:06:10.590 *******\nchanged: [instance]\n\nPLAY [all] *********************************************************************\n\nTASK [Gathering Facts] *********************************************************\nMonday 16 February 2026  23:02:02 +0000 (0:00:02.012)       0:06:12.603 *******\nok: [instance]\n\nTASK [vexxhost.atmosphere.kubernetes_node_labels : Add labels to node] *********\nMonday 16 February 2026  23:02:03 +0000 (0:00:00.893)       0:06:13.497 *******\nchanged: [instance]\n\nPLAY [all] *********************************************************************\n\nTASK [Uninstall unattended-upgrades] *******************************************\nMonday 16 February 2026  23:02:04 +0000 (0:00:00.915)       0:06:14.412 *******\nok: [instance]\n\nPLAY [controllers] *************************************************************\n\nTASK [Gathering Facts] *********************************************************\nMonday 16 February 2026  23:02:04 +0000 (0:00:00.760)       0:06:15.172 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nMonday 16 February 2026  23:02:06 +0000 (0:00:01.040)       0:06:16.213 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.local_path_provisioner : Deploy Helm chart] **********\nMonday 16 February 2026  23:02:06 +0000 (0:00:00.026)       0:06:16.240 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nMonday 16 February 2026  23:02:06 +0000 (0:00:00.030)       0:06:16.270 *******\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nMonday 16 February 2026  23:02:06 +0000 (0:00:00.033)       0:06:16.304 *******\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Collect \"ceph mon dump\" output from a monitor] ***\nMonday 16 February 2026  23:02:06 +0000 (0:00:00.426)       0:06:16.731 *******\nok: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Generate fact with list of Ceph monitors] ***\nMonday 16 February 2026  23:02:21 +0000 (0:00:14.752)       0:06:31.484 *******\nok: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Create Ceph pool] *********************\nMonday 16 February 2026  23:02:21 +0000 (0:00:00.074)       0:06:31.559 *******\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Create user client.kube] **************\nMonday 16 February 2026  23:02:55 +0000 (0:00:34.410)       0:07:05.969 *******\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Retrieve keyring for client.kube] *****\nMonday 16 February 2026  23:02:58 +0000 (0:00:02.521)       0:07:08.490 *******\nok: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Store keyring inside fact] ************\nMonday 16 February 2026  23:02:59 +0000 (0:00:01.155)       0:07:09.646 *******\nok: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Deploy Helm chart] ********************\nMonday 16 February 2026  23:02:59 +0000 (0:00:00.037)       0:07:09.684 *******\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.powerstore_csi : Clone PowerStore CSI from GitHub] ***\nMonday 16 February 2026  23:03:01 +0000 (0:00:01.785)       0:07:11.469 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.powerstore_csi : Create Secret] **********************\nMonday 16 February 2026  23:03:01 +0000 (0:00:00.031)       0:07:11.501 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.powerstore_csi : Create StorageClass] ****************\nMonday 16 February 2026  23:03:01 +0000 (0:00:00.036)       0:07:11.538 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.powerstore_csi : Deploy Helm chart] ******************\nMonday 16 February 2026  23:03:01 +0000 (0:00:00.037)       0:07:11.575 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.multipathd : Add backports PPA] **********************\nMonday 16 February 2026  23:03:01 +0000 (0:00:00.056)       0:07:11.632 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.multipathd : Install the multipathd package] *********\nMonday 16 February 2026  23:03:01 +0000 (0:00:00.022)       0:07:11.655 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.multipathd : Install the configuration file] *********\nMonday 16 February 2026  23:03:01 +0000 (0:00:00.024)       0:07:11.679 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Detect if InitiatorName is set] ***********\nMonday 16 February 2026  23:03:01 +0000 (0:00:00.024)       0:07:11.704 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Generate a new InitiatorName] *************\nMonday 16 February 2026  23:03:01 +0000 (0:00:00.023)       0:07:11.727 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Write the new InitiatorName] **************\nMonday 16 February 2026  23:03:01 +0000 (0:00:00.030)       0:07:11.758 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Create namespace] *************************\nMonday 16 February 2026  23:03:01 +0000 (0:00:00.025)       0:07:11.783 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Install Portworx] *************************\nMonday 16 February 2026  23:03:01 +0000 (0:00:00.030)       0:07:11.814 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Wait till the CRDs are created] ***********\nMonday 16 February 2026  23:03:01 +0000 (0:00:00.027)       0:07:11.841 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Create Portworx Storage Cluster] **********\nMonday 16 February 2026  23:03:01 +0000 (0:00:00.024)       0:07:11.865 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.storpool_csi : Deploy CSI RBAC] **********************\nMonday 16 February 2026  23:03:01 +0000 (0:00:00.037)       0:07:11.903 *******\nskipping: [instance] => (item={'name': 'controllerplugin'})\nskipping: [instance] => (item={'name': 'nodeplugin'})\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.storpool_csi : Deploy CSI] ***************************\nMonday 16 February 2026  23:03:01 +0000 (0:00:00.038)       0:07:11.941 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.storpool_csi : Create StorageClass] ******************\nMonday 16 February 2026  23:03:01 +0000 (0:00:00.029)       0:07:11.970 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ibm_block_csi_driver : Deploy CSI] *******************\nMonday 16 February 2026  23:03:01 +0000 (0:00:00.041)       0:07:12.012 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ibm_block_csi_driver : Create Secret] ****************\nMonday 16 February 2026  23:03:01 +0000 (0:00:00.029)       0:07:12.042 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ibm_block_csi_driver : Create StorageClass] **********\nMonday 16 February 2026  23:03:01 +0000 (0:00:00.030)       0:07:12.073 *******\nskipping: [instance]\n\nPLAY RECAP *********************************************************************\ninstance                   : ok=244  changed=76   unreachable=0    failed=0    skipped=83   rescued=0    ignored=2\n\nMonday 16 February 2026  23:03:01 +0000 (0:00:00.033)       0:07:12.106 *******\n===============================================================================\nvexxhost.ceph.mon : Run Bootstrap coomand ------------------------------ 84.77s\nvexxhost.ceph.osd : Install OSDs --------------------------------------- 80.82s\nvexxhost.atmosphere.ceph_csi_rbd : Create Ceph pool -------------------- 34.41s\nvexxhost.kubernetes.kubernetes : Initialize cluster -------------------- 25.46s\nvexxhost.atmosphere.ceph_csi_rbd : Collect \"ceph mon dump\" output from a monitor -- 14.75s\nvexxhost.ceph.mon : Validate monitor exist ------------------------------ 9.98s\nvexxhost.ceph.osd : Get `ceph-volume lvm list` status ------------------- 9.91s\nvexxhost.ceph.osd : Ensure all OSDs are non-legacy ---------------------- 5.29s\nvexxhost.ceph.osd : Get `cephadm ls` status ----------------------------- 5.27s\nvexxhost.ceph.cephadm : Install packages -------------------------------- 5.13s\nvexxhost.containers.containerd : Install AppArmor packages -------------- 4.64s\nvexxhost.containers.download_artifact : Extract archive ----------------- 4.27s\nvexxhost.kubernetes.kubernetes : Install PIP ---------------------------- 4.02s\nvexxhost.atmosphere.sysctl : Configure sysctl values -------------------- 3.61s\nvexxhost.kubernetes.kubelet : Install additional packages --------------- 3.54s\nvexxhost.kubernetes.kubernetes : Install Kubernetes Python package using pip on supported systems --- 3.23s\nvexxhost.containers.download_artifact : Extract archive ----------------- 3.01s\nvexxhost.containers.cni_plugins : Install additional packages ----------- 2.87s\nvexxhost.containers.download_artifact : Extract archive ----------------- 2.78s\nvexxhost.containers.download_artifact : Extract archive ----------------- 2.63s\nINFO     [csi > converge] Executed: Successful\nINFO     [csi > idempotence] Executing\n\nPLAY [Debug CSI driver value] **************************************************\n\nTASK [Gathering Facts] *********************************************************\nMonday 16 February 2026  23:03:03 +0000 (0:00:00.044)       0:00:00.044 *******\n[WARNING]: Platform linux on host instance is using the discovered Python\ninterpreter at /usr/bin/python3.10, but future installation of another Python\ninterpreter could change the meaning of that path. See\nhttps://docs.ansible.com/ansible-\ncore/2.17/reference_appendices/interpreter_discovery.html for more information.\nok: [instance]\n\nTASK [Display CSI driver value and environment variable] ***********************\nMonday 16 February 2026  23:03:04 +0000 (0:00:01.264)       0:00:01.308 *******\nok: [instance] => {\n    \"msg\": \"csi_driver=rbd, MOLECULE_CSI_DRIVER=\"\n}\n\nPLAY [all] *********************************************************************\n\nTASK [Gathering Facts] *********************************************************\nMonday 16 February 2026  23:03:05 +0000 (0:00:00.048)       0:00:01.356 *******\nok: [instance]\n\nTASK [Set a fact with the \"atmosphere_images\" for other plays] *****************\nMonday 16 February 2026  23:03:06 +0000 (0:00:00.993)       0:00:02.350 *******\nok: [instance]\n\nPLAY [Deploy Ceph monitors & managers] *****************************************\n\nTASK [Gathering Facts] *********************************************************\nMonday 16 February 2026  23:03:06 +0000 (0:00:00.208)       0:00:02.559 *******\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  23:03:07 +0000 (0:00:00.974)       0:00:03.533 *******\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nMonday 16 February 2026  23:03:07 +0000 (0:00:00.318)       0:00:03.851 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***\nMonday 16 February 2026  23:03:07 +0000 (0:00:00.042)       0:00:03.893 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:03:07 +0000 (0:00:00.302)       0:00:04.196 *******\nok: [instance] => {\n    \"msg\": \"https://github.com/opencontainers/runc/releases/download/v1.4.0/runc.amd64\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:03:07 +0000 (0:00:00.063)       0:00:04.260 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:03:08 +0000 (0:00:00.509)       0:00:04.769 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nMonday 16 February 2026  23:03:08 +0000 (0:00:00.054)       0:00:04.824 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  23:03:08 +0000 (0:00:00.054)       0:00:04.878 *******\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nMonday 16 February 2026  23:03:08 +0000 (0:00:00.240)       0:00:05.119 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:03:10 +0000 (0:00:01.230)       0:00:06.350 *******\nok: [instance] => {\n    \"msg\": \"https://github.com/containerd/containerd/releases/download/v2.2.0/containerd-2.2.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:03:10 +0000 (0:00:00.064)       0:00:06.415 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:03:10 +0000 (0:00:00.421)       0:00:06.836 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Install SELinux packages] ***************\nMonday 16 February 2026  23:03:12 +0000 (0:00:02.497)       0:00:09.334 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***\nMonday 16 February 2026  23:03:13 +0000 (0:00:00.033)       0:00:09.368 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********\nMonday 16 February 2026  23:03:13 +0000 (0:00:00.032)       0:00:09.400 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Install AppArmor packages] **************\nMonday 16 February 2026  23:03:13 +0000 (0:00:00.033)       0:00:09.434 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***\nMonday 16 February 2026  23:03:14 +0000 (0:00:01.012)       0:00:10.446 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create folders for configuration] *******\nMonday 16 February 2026  23:03:14 +0000 (0:00:00.468)       0:00:10.915 *******\nok: [instance] => (item={'path': '/etc/containerd'})\nok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})\n\nTASK [vexxhost.containers.containerd : Create containerd config file] **********\nMonday 16 February 2026  23:03:15 +0000 (0:00:00.935)       0:00:11.850 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Force any restarts if necessary] ********\nMonday 16 February 2026  23:03:16 +0000 (0:00:00.500)       0:00:12.351 *******\n\nTASK [vexxhost.containers.containerd : Enable and start service] ***************\nMonday 16 February 2026  23:03:16 +0000 (0:00:00.021)       0:00:12.372 *******\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  23:03:16 +0000 (0:00:00.610)       0:00:12.983 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:03:16 +0000 (0:00:00.200)       0:00:13.183 *******\nok: [instance] => {\n    \"msg\": \"https://download.docker.com/linux/static/stable/x86_64/docker-24.0.9.tgz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:03:16 +0000 (0:00:00.077)       0:00:13.261 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:03:17 +0000 (0:00:01.054)       0:00:14.315 *******\nok: [instance]\n\nTASK [vexxhost.containers.docker : Install AppArmor packages] ******************\nMonday 16 February 2026  23:03:21 +0000 (0:00:03.796)       0:00:18.112 *******\nok: [instance]\n\nTASK [vexxhost.containers.docker : Ensure group \"docker\" exists] ***************\nMonday 16 February 2026  23:03:22 +0000 (0:00:01.184)       0:00:19.296 *******\nok: [instance]\n\nTASK [vexxhost.containers.docker : Create systemd service file for docker] *****\nMonday 16 February 2026  23:03:23 +0000 (0:00:00.356)       0:00:19.653 *******\nok: [instance]\n\nTASK [vexxhost.containers.docker : Create folders for configuration] ***********\nMonday 16 February 2026  23:03:23 +0000 (0:00:00.425)       0:00:20.078 *******\nok: [instance] => (item={'path': '/etc/docker'})\nok: [instance] => (item={'path': '/var/lib/docker', 'mode': '0o710'})\nok: [instance] => (item={'path': '/run/docker', 'mode': '0o711'})\n\nTASK [vexxhost.containers.docker : Create systemd socket file for docker] ******\nMonday 16 February 2026  23:03:24 +0000 (0:00:00.613)       0:00:20.692 *******\nok: [instance]\n\nTASK [vexxhost.containers.docker : Create docker daemon config file] ***********\nMonday 16 February 2026  23:03:24 +0000 (0:00:00.461)       0:00:21.153 *******\nok: [instance]\n\nTASK [vexxhost.containers.docker : Force any restarts if necessary] ************\nMonday 16 February 2026  23:03:25 +0000 (0:00:00.468)       0:00:21.622 *******\n\nTASK [vexxhost.containers.docker : Enable and start service] *******************\nMonday 16 February 2026  23:03:25 +0000 (0:00:00.030)       0:00:21.653 *******\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Gather variables for each operating system] ******\nMonday 16 February 2026  23:03:25 +0000 (0:00:00.397)       0:00:22.050 *******\nok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/cephadm/vars/ubuntu-22.04.yml)\n\nTASK [vexxhost.ceph.cephadm : Install packages] ********************************\nMonday 16 February 2026  23:03:25 +0000 (0:00:00.049)       0:00:22.100 *******\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Ensure services are started] *********************\nMonday 16 February 2026  23:03:26 +0000 (0:00:01.060)       0:00:23.161 *******\nok: [instance] => (item=chronyd)\nok: [instance] => (item=sshd)\n\nTASK [vexxhost.ceph.cephadm : Download \"cephadm\"] ******************************\nMonday 16 February 2026  23:03:27 +0000 (0:00:00.818)       0:00:23.979 *******\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Remove cephadm from old path] ********************\nMonday 16 February 2026  23:03:27 +0000 (0:00:00.313)       0:00:24.293 *******\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Ensure \"cephadm\" user is present] ****************\nMonday 16 February 2026  23:03:28 +0000 (0:00:00.247)       0:00:24.540 *******\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Allow \"cephadm\" user to have passwordless sudo] ***\nMonday 16 February 2026  23:03:28 +0000 (0:00:00.410)       0:00:24.951 *******\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Get `cephadm ls` status] *****************************\nMonday 16 February 2026  23:03:28 +0000 (0:00:00.378)       0:00:25.330 *******\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Parse the `cephadm ls` output] ***********************\nMonday 16 February 2026  23:03:34 +0000 (0:00:05.677)       0:00:31.007 *******\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Assimilate existing configs in `ceph.conf`] **********\nMonday 16 February 2026  23:03:34 +0000 (0:00:00.058)       0:00:31.066 *******\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Adopt monitor to cluster] ****************************\nMonday 16 February 2026  23:03:34 +0000 (0:00:00.038)       0:00:31.105 *******\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Adopt manager to cluster] ****************************\nMonday 16 February 2026  23:03:34 +0000 (0:00:00.042)       0:00:31.148 *******\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Enable \"cephadm\" mgr module] *************************\nMonday 16 February 2026  23:03:34 +0000 (0:00:00.030)       0:00:31.179 *******\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Set orchestrator backend to \"cephadm\"] ***************\nMonday 16 February 2026  23:03:34 +0000 (0:00:00.037)       0:00:31.216 *******\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Use `cephadm` user for cephadm] **********************\nMonday 16 February 2026  23:03:34 +0000 (0:00:00.039)       0:00:31.255 *******\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Generate \"cephadm\" key] ******************************\nMonday 16 February 2026  23:03:34 +0000 (0:00:00.034)       0:00:31.290 *******\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Set Ceph Monitor IP address] *************************\nMonday 16 February 2026  23:03:34 +0000 (0:00:00.037)       0:00:31.328 *******\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Check if any node is bootstrapped] *******************\nMonday 16 February 2026  23:03:35 +0000 (0:00:00.080)       0:00:31.408 *******\nok: [instance] => (item=instance)\n\nTASK [vexxhost.ceph.mon : Select pre-existing bootstrap node if exists] ********\nMonday 16 February 2026  23:03:35 +0000 (0:00:00.271)       0:00:31.680 *******\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Bootstrap cluster] ***********************************\nMonday 16 February 2026  23:03:35 +0000 (0:00:00.048)       0:00:31.729 *******\nskipping: [instance]\n\nTASK [Install Ceph host] *******************************************************\nMonday 16 February 2026  23:03:35 +0000 (0:00:00.045)       0:00:31.775 *******\nincluded: vexxhost.ceph.cephadm_host for instance\n\nTASK [vexxhost.ceph.cephadm_host : Get public SSH key for \"cephadm\" user] ******\nMonday 16 February 2026  23:03:35 +0000 (0:00:00.082)       0:00:31.858 *******\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Set fact with public SSH key for \"cephadm\" user] ***\nMonday 16 February 2026  23:03:37 +0000 (0:00:01.706)       0:00:33.564 *******\nok: [instance] => (item=instance)\n\nTASK [vexxhost.ceph.cephadm_host : Set authorized key for \"cephadm\"] ***********\nMonday 16 February 2026  23:03:37 +0000 (0:00:00.065)       0:00:33.630 *******\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Add new host to Ceph] ***********************\nMonday 16 February 2026  23:03:37 +0000 (0:00:00.404)       0:00:34.035 *******\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Configure \"mon\" label for monitors] ******************\nMonday 16 February 2026  23:03:39 +0000 (0:00:02.000)       0:00:36.035 *******\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Validate monitor exist] ******************************\nMonday 16 February 2026  23:03:41 +0000 (0:00:01.620)       0:00:37.656 *******\nok: [instance]\n\nTASK [Install Ceph host] *******************************************************\nMonday 16 February 2026  23:03:51 +0000 (0:00:10.205)       0:00:47.861 *******\nincluded: vexxhost.ceph.cephadm_host for instance\n\nTASK [vexxhost.ceph.cephadm_host : Get public SSH key for \"cephadm\" user] ******\nMonday 16 February 2026  23:03:51 +0000 (0:00:00.085)       0:00:47.946 *******\nskipping: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Set fact with public SSH key for \"cephadm\" user] ***\nMonday 16 February 2026  23:03:51 +0000 (0:00:00.051)       0:00:47.998 *******\nskipping: [instance] => (item=instance)\nskipping: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Set authorized key for \"cephadm\"] ***********\nMonday 16 February 2026  23:03:51 +0000 (0:00:00.058)       0:00:48.057 *******\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Add new host to Ceph] ***********************\nMonday 16 February 2026  23:03:52 +0000 (0:00:00.284)       0:00:48.341 *******\nok: [instance]\n\nTASK [vexxhost.ceph.mgr : Configure \"mgr\" label for managers] ******************\nMonday 16 February 2026  23:03:54 +0000 (0:00:02.058)       0:00:50.399 *******\nok: [instance]\n\nTASK [vexxhost.ceph.mgr : Validate manager exist] ******************************\nMonday 16 February 2026  23:03:55 +0000 (0:00:01.636)       0:00:52.035 *******\nok: [instance]\n\nTASK [vexxhost.ceph.mgr : Enable the Ceph Manager prometheus module] ***********\nMonday 16 February 2026  23:03:57 +0000 (0:00:01.456)       0:00:53.492 *******\nok: [instance]\n\nPLAY [Deploy Ceph OSDs] ********************************************************\n\nTASK [Gathering Facts] *********************************************************\nMonday 16 February 2026  23:03:59 +0000 (0:00:01.959)       0:00:55.452 *******\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  23:04:00 +0000 (0:00:01.075)       0:00:56.527 *******\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nMonday 16 February 2026  23:04:00 +0000 (0:00:00.232)       0:00:56.760 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***\nMonday 16 February 2026  23:04:00 +0000 (0:00:00.046)       0:00:56.806 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:04:00 +0000 (0:00:00.217)       0:00:57.024 *******\nok: [instance] => {\n    \"msg\": \"https://github.com/opencontainers/runc/releases/download/v1.4.0/runc.amd64\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:04:00 +0000 (0:00:00.066)       0:00:57.090 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:04:01 +0000 (0:00:00.323)       0:00:57.414 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nMonday 16 February 2026  23:04:01 +0000 (0:00:00.050)       0:00:57.464 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  23:04:01 +0000 (0:00:00.050)       0:00:57.515 *******\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nMonday 16 February 2026  23:04:01 +0000 (0:00:00.221)       0:00:57.737 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:04:02 +0000 (0:00:01.071)       0:00:58.808 *******\nok: [instance] => {\n    \"msg\": \"https://github.com/containerd/containerd/releases/download/v2.2.0/containerd-2.2.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:04:02 +0000 (0:00:00.079)       0:00:58.888 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:04:02 +0000 (0:00:00.356)       0:00:59.244 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Install SELinux packages] ***************\nMonday 16 February 2026  23:04:05 +0000 (0:00:02.095)       0:01:01.339 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***\nMonday 16 February 2026  23:04:05 +0000 (0:00:00.032)       0:01:01.371 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********\nMonday 16 February 2026  23:04:05 +0000 (0:00:00.038)       0:01:01.410 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Install AppArmor packages] **************\nMonday 16 February 2026  23:04:05 +0000 (0:00:00.030)       0:01:01.441 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***\nMonday 16 February 2026  23:04:06 +0000 (0:00:01.081)       0:01:02.522 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create folders for configuration] *******\nMonday 16 February 2026  23:04:06 +0000 (0:00:00.445)       0:01:02.968 *******\nok: [instance] => (item={'path': '/etc/containerd'})\nok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})\n\nTASK [vexxhost.containers.containerd : Create containerd config file] **********\nMonday 16 February 2026  23:04:07 +0000 (0:00:00.930)       0:01:03.898 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Force any restarts if necessary] ********\nMonday 16 February 2026  23:04:08 +0000 (0:00:00.465)       0:01:04.363 *******\n\nTASK [vexxhost.containers.containerd : Enable and start service] ***************\nMonday 16 February 2026  23:04:08 +0000 (0:00:00.021)       0:01:04.385 *******\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  23:04:08 +0000 (0:00:00.389)       0:01:04.774 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:04:08 +0000 (0:00:00.228)       0:01:05.002 *******\nok: [instance] => {\n    \"msg\": \"https://download.docker.com/linux/static/stable/x86_64/docker-24.0.9.tgz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:04:08 +0000 (0:00:00.049)       0:01:05.052 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:04:09 +0000 (0:00:00.370)       0:01:05.422 *******\nok: [instance]\n\nTASK [vexxhost.containers.docker : Install AppArmor packages] ******************\nMonday 16 February 2026  23:04:12 +0000 (0:00:03.315)       0:01:08.738 *******\nok: [instance]\n\nTASK [vexxhost.containers.docker : Ensure group \"docker\" exists] ***************\nMonday 16 February 2026  23:04:13 +0000 (0:00:01.065)       0:01:09.803 *******\nok: [instance]\n\nTASK [vexxhost.containers.docker : Create systemd service file for docker] *****\nMonday 16 February 2026  23:04:13 +0000 (0:00:00.196)       0:01:10.000 *******\nok: [instance]\n\nTASK [vexxhost.containers.docker : Create folders for configuration] ***********\nMonday 16 February 2026  23:04:14 +0000 (0:00:00.403)       0:01:10.404 *******\nok: [instance] => (item={'path': '/etc/docker'})\nok: [instance] => (item={'path': '/var/lib/docker', 'mode': '0o710'})\nok: [instance] => (item={'path': '/run/docker', 'mode': '0o711'})\n\nTASK [vexxhost.containers.docker : Create systemd socket file for docker] ******\nMonday 16 February 2026  23:04:14 +0000 (0:00:00.557)       0:01:10.962 *******\nok: [instance]\n\nTASK [vexxhost.containers.docker : Create docker daemon config file] ***********\nMonday 16 February 2026  23:04:15 +0000 (0:00:00.411)       0:01:11.373 *******\nok: [instance]\n\nTASK [vexxhost.containers.docker : Force any restarts if necessary] ************\nMonday 16 February 2026  23:04:15 +0000 (0:00:00.410)       0:01:11.784 *******\n\nTASK [vexxhost.containers.docker : Enable and start service] *******************\nMonday 16 February 2026  23:04:15 +0000 (0:00:00.021)       0:01:11.805 *******\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Gather variables for each operating system] ******\nMonday 16 February 2026  23:04:15 +0000 (0:00:00.381)       0:01:12.186 *******\nok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/cephadm/vars/ubuntu-22.04.yml)\n\nTASK [vexxhost.ceph.cephadm : Install packages] ********************************\nMonday 16 February 2026  23:04:15 +0000 (0:00:00.065)       0:01:12.252 *******\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Ensure services are started] *********************\nMonday 16 February 2026  23:04:17 +0000 (0:00:01.106)       0:01:13.358 *******\nok: [instance] => (item=chronyd)\nok: [instance] => (item=sshd)\n\nTASK [vexxhost.ceph.cephadm : Download \"cephadm\"] ******************************\nMonday 16 February 2026  23:04:17 +0000 (0:00:00.667)       0:01:14.026 *******\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Remove cephadm from old path] ********************\nMonday 16 February 2026  23:04:17 +0000 (0:00:00.297)       0:01:14.324 *******\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Ensure \"cephadm\" user is present] ****************\nMonday 16 February 2026  23:04:18 +0000 (0:00:00.216)       0:01:14.541 *******\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Allow \"cephadm\" user to have passwordless sudo] ***\nMonday 16 February 2026  23:04:18 +0000 (0:00:00.298)       0:01:14.839 *******\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Get monitor status] **********************************\nMonday 16 February 2026  23:04:18 +0000 (0:00:00.251)       0:01:15.090 *******\nok: [instance] => (item=instance)\n\nTASK [vexxhost.ceph.osd : Select admin host] ***********************************\nMonday 16 February 2026  23:04:18 +0000 (0:00:00.240)       0:01:15.331 *******\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Get `cephadm ls` status] *****************************\nMonday 16 February 2026  23:04:19 +0000 (0:00:00.054)       0:01:15.385 *******\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Parse the `cephadm ls` output] ***********************\nMonday 16 February 2026  23:04:24 +0000 (0:00:05.458)       0:01:20.844 *******\nok: [instance]\n\nTASK [Install Ceph host] *******************************************************\nMonday 16 February 2026  23:04:24 +0000 (0:00:00.059)       0:01:20.903 *******\nincluded: vexxhost.ceph.cephadm_host for instance\n\nTASK [vexxhost.ceph.cephadm_host : Get public SSH key for \"cephadm\" user] ******\nMonday 16 February 2026  23:04:24 +0000 (0:00:00.075)       0:01:20.978 *******\nskipping: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Set fact with public SSH key for \"cephadm\" user] ***\nMonday 16 February 2026  23:04:24 +0000 (0:00:00.054)       0:01:21.032 *******\nskipping: [instance] => (item=instance)\nskipping: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Set authorized key for \"cephadm\"] ***********\nMonday 16 February 2026  23:04:24 +0000 (0:00:00.053)       0:01:21.086 *******\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Add new host to Ceph] ***********************\nMonday 16 February 2026  23:04:25 +0000 (0:00:00.275)       0:01:21.361 *******\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Adopt OSDs to cluster] *******************************\nMonday 16 February 2026  23:04:27 +0000 (0:00:02.293)       0:01:23.655 *******\nskipping: [instance] => (item=osd.2)\nskipping: [instance] => (item=osd.1)\nskipping: [instance] => (item=osd.0)\nskipping: [instance]\n\nTASK [vexxhost.ceph.osd : Wait until OSD added to cephadm] *********************\nMonday 16 February 2026  23:04:37 +0000 (0:00:10.060)       0:01:33.716 *******\nskipping: [instance] => (item=osd.2)\nskipping: [instance] => (item=osd.1)\nskipping: [instance] => (item=osd.0)\nskipping: [instance]\n\nTASK [vexxhost.ceph.osd : Ensure all OSDs are non-legacy] **********************\nMonday 16 February 2026  23:04:37 +0000 (0:00:00.085)       0:01:33.802 *******\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Get `ceph-volume lvm list` status] *******************\nMonday 16 February 2026  23:04:42 +0000 (0:00:05.435)       0:01:39.237 *******\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Install OSDs] ****************************************\nMonday 16 February 2026  23:04:53 +0000 (0:00:10.410)       0:01:49.648 *******\nskipping: [instance] => (item=/dev/ceph-instance-osd0/data)\nskipping: [instance] => (item=/dev/ceph-instance-osd1/data)\nskipping: [instance] => (item=/dev/ceph-instance-osd2/data)\nskipping: [instance]\n\nTASK [vexxhost.ceph.osd : Get mon dump] ****************************************\nMonday 16 February 2026  23:04:53 +0000 (0:00:00.078)       0:01:49.727 *******\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Mark require osd release] ****************************\nMonday 16 February 2026  23:04:55 +0000 (0:00:01.930)       0:01:51.658 *******\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Wait for all OSD to be running] **********************\nMonday 16 February 2026  23:04:56 +0000 (0:00:01.486)       0:01:53.145 *******\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/osd/tasks/check-osds.yml for instance\n\nTASK [vexxhost.ceph.osd : Set the retry count] *********************************\nMonday 16 February 2026  23:04:56 +0000 (0:00:00.071)       0:01:53.216 *******\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Get `ceph orch ps`] **********************************\nMonday 16 February 2026  23:04:56 +0000 (0:00:00.061)       0:01:53.277 *******\nok: [instance]\n\nTASK [vexxhost.ceph.osd : OSD daemon list] *************************************\nMonday 16 February 2026  23:04:58 +0000 (0:00:01.459)       0:01:54.736 *******\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Fail if any OSD not running] *************************\nMonday 16 February 2026  23:04:58 +0000 (0:00:00.045)       0:01:54.782 *******\nskipping: [instance] => (item=1)\nskipping: [instance] => (item=1)\nskipping: [instance] => (item=1)\nskipping: [instance]\n\nTASK [vexxhost.ceph.osd : Fail if any duplicate OSD ID] ************************\nMonday 16 February 2026  23:04:58 +0000 (0:00:00.046)       0:01:54.828 *******\nskipping: [instance]\n\nPLAY [all] *********************************************************************\n\nTASK [Ensure RBD kernel module is loaded] **************************************\nMonday 16 February 2026  23:04:58 +0000 (0:00:00.066)       0:01:54.895 *******\nok: [instance]\n\nPLAY [all] *********************************************************************\n\nTASK [Gathering Facts] *********************************************************\nMonday 16 February 2026  23:04:58 +0000 (0:00:00.336)       0:01:55.231 *******\nok: [instance]\n\nTASK [vexxhost.atmosphere.sysctl : Configure sysctl values] ********************\nMonday 16 February 2026  23:05:00 +0000 (0:00:01.137)       0:01:56.368 *******\nok: [instance] => (item={'name': 'fs.aio-max-nr', 'value': 1048576})\nok: [instance] => (item={'name': 'net.ipv4.tcp_timestamps', 'value': 0})\nok: [instance] => (item={'name': 'net.ipv4.tcp_sack', 'value': 1})\nok: [instance] => (item={'name': 'net.core.netdev_budget', 'value': 1000})\nok: [instance] => (item={'name': 'net.core.netdev_max_backlog', 'value': 250000})\nok: [instance] => (item={'name': 'net.core.rmem_max', 'value': 4194304})\nok: [instance] => (item={'name': 'net.core.wmem_max', 'value': 4194304})\nok: [instance] => (item={'name': 'net.core.rmem_default', 'value': 4194304})\nok: [instance] => (item={'name': 'net.core.wmem_default', 'value': 4194304})\nok: [instance] => (item={'name': 'net.core.optmem_max', 'value': 4194304})\nok: [instance] => (item={'name': 'net.ipv4.tcp_rmem', 'value': '4096 87380 4194304'})\nok: [instance] => (item={'name': 'net.ipv4.tcp_wmem', 'value': '4096 65536 4194304'})\nok: [instance] => (item={'name': 'net.ipv4.tcp_low_latency', 'value': 1})\nok: [instance] => (item={'name': 'net.ipv4.tcp_adv_win_scale', 'value': 1})\nok: [instance] => (item={'name': 'net.ipv4.neigh.default.gc_thresh1', 'value': 128})\nok: [instance] => (item={'name': 'net.ipv4.neigh.default.gc_thresh2', 'value': 28872})\nok: [instance] => (item={'name': 'net.ipv4.neigh.default.gc_thresh3', 'value': 32768})\nok: [instance] => (item={'name': 'net.ipv6.neigh.default.gc_thresh1', 'value': 128})\nok: [instance] => (item={'name': 'net.ipv6.neigh.default.gc_thresh2', 'value': 28872})\nok: [instance] => (item={'name': 'net.ipv6.neigh.default.gc_thresh3', 'value': 32768})\n\nTASK [vexxhost.atmosphere.ethtool : Create folder for persistent configuration] ***\nMonday 16 February 2026  23:05:03 +0000 (0:00:03.659)       0:02:00.027 *******\nok: [instance]\n\nTASK [vexxhost.atmosphere.ethtool : Install persistent \"ethtool\" tuning] *******\nMonday 16 February 2026  23:05:03 +0000 (0:00:00.236)       0:02:00.264 *******\nok: [instance]\n\nTASK [vexxhost.atmosphere.ethtool : Run \"ethtool\" tuning] **********************\nMonday 16 February 2026  23:05:04 +0000 (0:00:00.405)       0:02:00.669 *******\nok: [instance]\n\nTASK [Set a fact with the \"atmosphere_images\" for other plays] *****************\nMonday 16 February 2026  23:05:04 +0000 (0:00:00.266)       0:02:00.936 *******\nok: [instance]\n\nPLAY [Configure Kubernetes VIP] ************************************************\n\nTASK [Gathering Facts] *********************************************************\nMonday 16 February 2026  23:05:04 +0000 (0:00:00.058)       0:02:00.995 *******\nok: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/etc/kubernetes/manifests)] ***\nMonday 16 February 2026  23:05:05 +0000 (0:00:01.030)       0:02:02.026 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Uninstall legacy HA stack] ****************\nMonday 16 February 2026  23:05:05 +0000 (0:00:00.200)       0:02:02.226 *******\nok: [instance] => (item=/etc/keepalived/keepalived.conf)\nok: [instance] => (item=/etc/keepalived/check_apiserver.sh)\nok: [instance] => (item=/etc/kubernetes/manifests/keepalived.yaml)\nok: [instance] => (item=/etc/haproxy/haproxy.cfg)\nok: [instance] => (item=/etc/kubernetes/manifests/haproxy.yaml)\n\nTASK [vexxhost.kubernetes.kube_vip : Switch API server to run on port 6443] ****\nMonday 16 February 2026  23:05:06 +0000 (0:00:00.925)       0:02:03.151 *******\nok: [instance] => (item=/etc/kubernetes/manifests/kube-apiserver.yaml)\nok: [instance] => (item=/etc/kubernetes/controller-manager.conf)\nok: [instance] => (item=/etc/kubernetes/scheduler.conf)\n\nTASK [vexxhost.kubernetes.kube_vip : Check if super-admin.conf exists] *********\nMonday 16 February 2026  23:05:07 +0000 (0:00:00.525)       0:02:03.676 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Check if kubeadm has already run] *********\nMonday 16 February 2026  23:05:07 +0000 (0:00:00.182)       0:02:03.859 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Set fact with KUBECONFIG path] ************\nMonday 16 February 2026  23:05:07 +0000 (0:00:00.187)       0:02:04.047 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Set fact with KUBECONFIG path (with super-admin.conf)] ***\nMonday 16 February 2026  23:05:07 +0000 (0:00:00.030)       0:02:04.077 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Upload Kubernetes manifest] ***************\nMonday 16 February 2026  23:05:07 +0000 (0:00:00.039)       0:02:04.117 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Ensure kube-vip configuration file] *******\nMonday 16 February 2026  23:05:08 +0000 (0:00:00.477)       0:02:04.594 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Flush handlers] ***************************\nMonday 16 February 2026  23:05:08 +0000 (0:00:00.197)       0:02:04.791 *******\n\nPLAY [Install Kubernetes] ******************************************************\n\nTASK [Gathering Facts] *********************************************************\nMonday 16 February 2026  23:05:08 +0000 (0:00:00.108)       0:02:04.899 *******\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  23:05:09 +0000 (0:00:01.060)       0:02:05.960 *******\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nMonday 16 February 2026  23:05:09 +0000 (0:00:00.220)       0:02:06.181 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***\nMonday 16 February 2026  23:05:09 +0000 (0:00:00.041)       0:02:06.222 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:05:10 +0000 (0:00:00.229)       0:02:06.451 *******\nok: [instance] => {\n    \"msg\": \"https://github.com/opencontainers/runc/releases/download/v1.4.0/runc.amd64\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:05:10 +0000 (0:00:00.056)       0:02:06.507 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:05:10 +0000 (0:00:00.317)       0:02:06.825 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  23:05:10 +0000 (0:00:00.062)       0:02:06.887 *******\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nMonday 16 February 2026  23:05:10 +0000 (0:00:00.216)       0:02:07.104 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:05:11 +0000 (0:00:01.012)       0:02:08.116 *******\nok: [instance] => {\n    \"msg\": \"https://github.com/containerd/containerd/releases/download/v2.2.0/containerd-2.2.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:05:11 +0000 (0:00:00.064)       0:02:08.181 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:05:12 +0000 (0:00:00.331)       0:02:08.512 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Install SELinux packages] ***************\nMonday 16 February 2026  23:05:14 +0000 (0:00:02.040)       0:02:10.554 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***\nMonday 16 February 2026  23:05:14 +0000 (0:00:00.037)       0:02:10.591 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********\nMonday 16 February 2026  23:05:14 +0000 (0:00:00.033)       0:02:10.625 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Install AppArmor packages] **************\nMonday 16 February 2026  23:05:14 +0000 (0:00:00.030)       0:02:10.655 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***\nMonday 16 February 2026  23:05:15 +0000 (0:00:01.071)       0:02:11.726 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create folders for configuration] *******\nMonday 16 February 2026  23:05:15 +0000 (0:00:00.426)       0:02:12.152 *******\nok: [instance] => (item={'path': '/etc/containerd'})\nok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})\n\nTASK [vexxhost.containers.containerd : Create containerd config file] **********\nMonday 16 February 2026  23:05:16 +0000 (0:00:00.970)       0:02:13.122 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Force any restarts if necessary] ********\nMonday 16 February 2026  23:05:17 +0000 (0:00:00.498)       0:02:13.620 *******\n\nTASK [vexxhost.containers.containerd : Enable and start service] ***************\nMonday 16 February 2026  23:05:17 +0000 (0:00:00.010)       0:02:13.631 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes_upgrade_check : Retrieve the \"kubeadm-config\" ConfigMap] ***\nMonday 16 February 2026  23:05:17 +0000 (0:00:00.351)       0:02:13.982 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes_upgrade_check : Parse the ClusterConfiguration] ***\nMonday 16 February 2026  23:05:18 +0000 (0:00:00.873)       0:02:14.856 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes_upgrade_check : Retrieve the current Kubernetes version] ***\nMonday 16 February 2026  23:05:18 +0000 (0:00:00.052)       0:02:14.909 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes_upgrade_check : Extract major, minor, and patch versions] ***\nMonday 16 February 2026  23:05:18 +0000 (0:00:00.047)       0:02:14.956 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes_upgrade_check : Fail if we're jumping more than one minor version] ***\nMonday 16 February 2026  23:05:18 +0000 (0:00:00.060)       0:02:15.016 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes_upgrade_check : Set fact if we need to upgrade] ***\nMonday 16 February 2026  23:05:18 +0000 (0:00:00.037)       0:02:15.053 *******\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  23:05:18 +0000 (0:00:00.054)       0:02:15.108 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:05:18 +0000 (0:00:00.211)       0:02:15.319 *******\nok: [instance] => {\n    \"msg\": \"https://dl.k8s.io/release/v1.28.13/bin/linux/amd64/kubeadm\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:05:19 +0000 (0:00:00.050)       0:02:15.370 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:05:19 +0000 (0:00:00.471)       0:02:15.841 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  23:05:19 +0000 (0:00:00.050)       0:02:15.892 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:05:19 +0000 (0:00:00.219)       0:02:16.112 *******\nok: [instance] => {\n    \"msg\": \"https://dl.k8s.io/release/v1.28.13/bin/linux/amd64/kubectl\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:05:19 +0000 (0:00:00.049)       0:02:16.162 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:05:20 +0000 (0:00:00.638)       0:02:16.801 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Install SELinux packages] ***************\nMonday 16 February 2026  23:05:20 +0000 (0:00:00.075)       0:02:16.876 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***\nMonday 16 February 2026  23:05:20 +0000 (0:00:00.037)       0:02:16.914 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********\nMonday 16 February 2026  23:05:20 +0000 (0:00:00.032)       0:02:16.947 *******\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Install AppArmor packages] **************\nMonday 16 February 2026  23:05:20 +0000 (0:00:00.034)       0:02:16.981 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***\nMonday 16 February 2026  23:05:21 +0000 (0:00:01.064)       0:02:18.046 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create folders for configuration] *******\nMonday 16 February 2026  23:05:22 +0000 (0:00:00.422)       0:02:18.468 *******\nok: [instance] => (item={'path': '/etc/containerd'})\nok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})\n\nTASK [vexxhost.containers.containerd : Create containerd config file] **********\nMonday 16 February 2026  23:05:23 +0000 (0:00:00.934)       0:02:19.402 *******\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Force any restarts if necessary] ********\nMonday 16 February 2026  23:05:23 +0000 (0:00:00.496)       0:02:19.899 *******\n\nTASK [vexxhost.containers.containerd : Enable and start service] ***************\nMonday 16 February 2026  23:05:23 +0000 (0:00:00.009)       0:02:19.909 *******\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  23:05:23 +0000 (0:00:00.366)       0:02:20.275 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:05:24 +0000 (0:00:00.261)       0:02:20.536 *******\nok: [instance] => {\n    \"msg\": \"https://github.com/kubernetes-sigs/cri-tools/releases/download/v1.34.0/crictl-v1.34.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:05:24 +0000 (0:00:00.065)       0:02:20.601 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:05:24 +0000 (0:00:00.341)       0:02:20.943 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:05:25 +0000 (0:00:01.345)       0:02:22.288 *******\nok: [instance] => {\n    \"msg\": \"https://github.com/kubernetes-sigs/cri-tools/releases/download/v1.34.0/critest-v1.34.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:05:26 +0000 (0:00:00.065)       0:02:22.354 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:05:26 +0000 (0:00:00.425)       0:02:22.779 *******\nok: [instance]\n\nTASK [vexxhost.containers.cri_tools : Create crictl config] ********************\nMonday 16 February 2026  23:05:27 +0000 (0:00:01.316)       0:02:24.096 *******\nok: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/opt/cni/bin)] *********\nMonday 16 February 2026  23:05:28 +0000 (0:00:00.402)       0:02:24.498 *******\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  23:05:28 +0000 (0:00:00.241)       0:02:24.739 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:05:28 +0000 (0:00:00.218)       0:02:24.958 *******\nok: [instance] => {\n    \"msg\": \"https://github.com/containernetworking/plugins/releases/download/v1.8.0/cni-plugins-linux-amd64-v1.8.0.tgz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:05:28 +0000 (0:00:00.079)       0:02:25.038 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:05:29 +0000 (0:00:00.463)       0:02:25.502 *******\nok: [instance]\n\nTASK [vexxhost.containers.cni_plugins : Gather variables for each operating system] ***\nMonday 16 February 2026  23:05:31 +0000 (0:00:02.426)       0:02:27.928 *******\nok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/containers/roles/cni_plugins/vars/debian.yml)\n\nTASK [vexxhost.containers.cni_plugins : Install additional packages] ***********\nMonday 16 February 2026  23:05:31 +0000 (0:00:00.061)       0:02:27.990 *******\nok: [instance]\n\nTASK [vexxhost.containers.cni_plugins : Ensure IPv6 is enabled] ****************\nMonday 16 February 2026  23:05:32 +0000 (0:00:01.112)       0:02:29.102 *******\nok: [instance]\n\nTASK [vexxhost.containers.cni_plugins : Enable kernel modules on-boot] *********\nMonday 16 February 2026  23:05:32 +0000 (0:00:00.222)       0:02:29.324 *******\nok: [instance]\n\nTASK [vexxhost.containers.cni_plugins : Enable kernel modules in runtime] ******\nMonday 16 February 2026  23:05:33 +0000 (0:00:00.440)       0:02:29.765 *******\nok: [instance] => (item=br_netfilter)\nok: [instance] => (item=ip_tables)\nok: [instance] => (item=ip6_tables)\nok: [instance] => (item=nf_conntrack)\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  23:05:34 +0000 (0:00:00.756)       0:02:30.522 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:05:34 +0000 (0:00:00.215)       0:02:30.737 *******\nok: [instance] => {\n    \"msg\": \"https://dl.k8s.io/release/v1.28.13/bin/linux/amd64/kubelet\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:05:34 +0000 (0:00:00.049)       0:02:30.787 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:05:35 +0000 (0:00:00.599)       0:02:31.387 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Gather variables for each operating system] ***\nMonday 16 February 2026  23:05:35 +0000 (0:00:00.046)       0:02:31.433 *******\nok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/kubelet/vars/debian.yml)\n\nTASK [vexxhost.kubernetes.kubelet : Install coreutils] *************************\nMonday 16 February 2026  23:05:35 +0000 (0:00:00.058)       0:02:31.491 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Install additional packages] ***************\nMonday 16 February 2026  23:05:35 +0000 (0:00:00.029)       0:02:31.521 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Configure sysctl values] *******************\nMonday 16 February 2026  23:05:36 +0000 (0:00:01.077)       0:02:32.598 *******\nok: [instance] => (item={'name': 'net.ipv4.ip_forward', 'value': 1})\nok: [instance] => (item={'name': 'net.bridge.bridge-nf-call-iptables', 'value': 1})\nok: [instance] => (item={'name': 'net.bridge.bridge-nf-call-ip6tables', 'value': 1})\nok: [instance] => (item={'name': 'net.ipv4.conf.all.rp_filter', 'value': 0})\nok: [instance] => (item={'name': 'fs.inotify.max_queued_events', 'value': 1048576})\nok: [instance] => (item={'name': 'fs.inotify.max_user_instances', 'value': 8192})\nok: [instance] => (item={'name': 'fs.inotify.max_user_watches', 'value': 1048576})\n\nTASK [vexxhost.kubernetes.kubelet : Create folders for kubernetes configuration] ***\nMonday 16 February 2026  23:05:37 +0000 (0:00:01.273)       0:02:33.872 *******\nok: [instance] => (item=/etc/systemd/system/kubelet.service.d)\nok: [instance] => (item=/etc/kubernetes)\nok: [instance] => (item=/etc/kubernetes/manifests)\n\nTASK [vexxhost.kubernetes.kubelet : Add kubelet systemd service config] ********\nMonday 16 February 2026  23:05:38 +0000 (0:00:00.535)       0:02:34.407 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Add kubeadm dropin for kubelet systemd service config] ***\nMonday 16 February 2026  23:05:38 +0000 (0:00:00.427)       0:02:34.835 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Check swap status] *************************\nMonday 16 February 2026  23:05:38 +0000 (0:00:00.434)       0:02:35.269 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Disable swap] ******************************\nMonday 16 February 2026  23:05:39 +0000 (0:00:00.227)       0:02:35.496 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Remove swapfile from /etc/fstab] ***********\nMonday 16 February 2026  23:05:39 +0000 (0:00:00.034)       0:02:35.531 *******\nok: [instance] => (item=swap)\nok: [instance] => (item=none)\n\nTASK [vexxhost.kubernetes.kubelet : Create noswap systemd service config file] ***\nMonday 16 February 2026  23:05:39 +0000 (0:00:00.540)       0:02:36.071 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Enable noswap service] *********************\nMonday 16 February 2026  23:05:40 +0000 (0:00:00.435)       0:02:36.506 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Force any restarts if necessary] ***********\nMonday 16 February 2026  23:05:40 +0000 (0:00:00.343)       0:02:36.850 *******\n\nTASK [vexxhost.kubernetes.kubelet : Enable and start kubelet service] **********\nMonday 16 February 2026  23:05:40 +0000 (0:00:00.011)       0:02:36.861 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Ensure availability of dbus on Debian] *****\nMonday 16 February 2026  23:05:40 +0000 (0:00:00.353)       0:02:37.215 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Configure short hostname] ******************\nMonday 16 February 2026  23:05:41 +0000 (0:00:01.080)       0:02:38.295 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Ensure hostname inside hosts file] *********\nMonday 16 February 2026  23:05:42 +0000 (0:00:00.913)       0:02:39.209 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Remove kubernetes repository] ***********\nMonday 16 February 2026  23:05:43 +0000 (0:00:00.213)       0:02:39.423 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Setup control plane] ********************\nMonday 16 February 2026  23:05:43 +0000 (0:00:00.483)       0:02:39.906 *******\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/kubernetes/tasks/control-plane.yml for instance\n\nTASK [vexxhost.kubernetes.kubernetes : Bootstrap cluster] **********************\nMonday 16 February 2026  23:05:43 +0000 (0:00:00.095)       0:02:40.001 *******\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/kubernetes/tasks/bootstrap-cluster.yml for instance\n\nTASK [vexxhost.kubernetes.kubernetes : Check if any control plane is bootstrapped] ***\nMonday 16 February 2026  23:05:43 +0000 (0:00:00.095)       0:02:40.097 *******\nok: [instance] => (item=instance)\n\nTASK [vexxhost.kubernetes.kubernetes : Pick node from pre-existing cluster] ****\nMonday 16 February 2026  23:05:43 +0000 (0:00:00.222)       0:02:40.320 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Select first node to initialize cluster] ***\nMonday 16 February 2026  23:05:44 +0000 (0:00:00.047)       0:02:40.368 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Print selected bootstrap node] **********\nMonday 16 February 2026  23:05:44 +0000 (0:00:00.040)       0:02:40.408 *******\nok: [instance] => {\n    \"msg\": \"instance\"\n}\n\nTASK [vexxhost.kubernetes.kubernetes : Upload cluster configuration for bootstrap node] ***\nMonday 16 February 2026  23:05:44 +0000 (0:00:00.037)       0:02:40.445 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create pki folder] **********************\nMonday 16 February 2026  23:05:44 +0000 (0:00:00.543)       0:02:40.989 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create kubernetes ca key] ***************\nMonday 16 February 2026  23:05:44 +0000 (0:00:00.037)       0:02:41.027 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create kubernetes ca cert] **************\nMonday 16 February 2026  23:05:44 +0000 (0:00:00.036)       0:02:41.063 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create etcd-ca key] *********************\nMonday 16 February 2026  23:05:44 +0000 (0:00:00.034)       0:02:41.098 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create etcd-ca cert] ********************\nMonday 16 February 2026  23:05:44 +0000 (0:00:00.040)       0:02:41.138 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create front-proxy-ca key] **************\nMonday 16 February 2026  23:05:44 +0000 (0:00:00.031)       0:02:41.170 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create front-proxy-ca cert] *************\nMonday 16 February 2026  23:05:44 +0000 (0:00:00.034)       0:02:41.205 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Initialize cluster] *********************\nMonday 16 February 2026  23:05:44 +0000 (0:00:00.035)       0:02:41.240 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Check if the node is already part of the cluster] ***\nMonday 16 February 2026  23:05:45 +0000 (0:00:00.199)       0:02:41.439 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Join cluster] ***************************\nMonday 16 February 2026  23:05:45 +0000 (0:00:00.222)       0:02:41.662 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create folder for admin configuration] ***\nMonday 16 February 2026  23:05:45 +0000 (0:00:00.077)       0:02:41.739 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Creating a symlink for admin configuration file] ***\nMonday 16 February 2026  23:05:45 +0000 (0:00:00.192)       0:02:41.931 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Add bash autocomplete for kubectl] ******\nMonday 16 February 2026  23:05:45 +0000 (0:00:00.199)       0:02:42.131 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Install PIP] ****************************\nMonday 16 February 2026  23:05:45 +0000 (0:00:00.198)       0:02:42.329 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Install Kubernetes Python package using pip on supported systems] ***\nMonday 16 February 2026  23:05:47 +0000 (0:00:01.073)       0:02:43.403 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Enable EPEL repository] *****************\nMonday 16 February 2026  23:05:48 +0000 (0:00:01.310)       0:02:44.713 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Install Kubernetes Python package using package manager on supported systems] ***\nMonday 16 February 2026  23:05:48 +0000 (0:00:00.048)       0:02:44.762 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Allow workload on control plane node] ***\nMonday 16 February 2026  23:05:48 +0000 (0:00:00.047)       0:02:44.809 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Remove kube-proxy resources] ************\nMonday 16 February 2026  23:05:49 +0000 (0:00:00.742)       0:02:45.551 *******\nskipping: [instance] => (item=DaemonSet)\nskipping: [instance] => (item=ConfigMap)\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Start an upgrade] ***********************\nMonday 16 February 2026  23:05:49 +0000 (0:00:00.050)       0:02:45.602 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Check if the Kubernetes API services is up to date] ***\nMonday 16 February 2026  23:05:49 +0000 (0:00:00.045)       0:02:45.647 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Trigger an upgrade of the Kubernetes API services] ***\nMonday 16 February 2026  23:05:49 +0000 (0:00:00.049)       0:02:45.696 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Setup nodes] ****************************\nMonday 16 February 2026  23:05:49 +0000 (0:00:00.053)       0:02:45.750 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Upgrade & restart Kubelet node for upgrade (if needed)] ***\nMonday 16 February 2026  23:05:49 +0000 (0:00:00.036)       0:02:45.786 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Set node selector for CoreDNS components] ***\nMonday 16 February 2026  23:05:49 +0000 (0:00:00.392)       0:02:46.179 *******\nok: [instance]\n\nPLAY [Install control-plane components] ****************************************\n\nTASK [Gathering Facts] *********************************************************\nMonday 16 February 2026  23:05:50 +0000 (0:00:00.903)       0:02:47.083 *******\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nMonday 16 February 2026  23:05:51 +0000 (0:00:01.071)       0:02:48.154 *******\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nMonday 16 February 2026  23:05:52 +0000 (0:00:00.213)       0:02:48.368 *******\nok: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***\nMonday 16 February 2026  23:05:53 +0000 (0:00:01.088)       0:02:49.456 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:05:53 +0000 (0:00:00.222)       0:02:49.678 *******\nok: [instance] => {\n    \"msg\": \"https://get.helm.sh/helm-v3.11.2-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:05:53 +0000 (0:00:00.040)       0:02:49.718 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:05:53 +0000 (0:00:00.322)       0:02:50.041 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.helm : Remove Helm repository] ***********************\nMonday 16 February 2026  23:05:54 +0000 (0:00:01.248)       0:02:51.289 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.helm : Add bash autocomplete for helm] ***************\nMonday 16 February 2026  23:05:55 +0000 (0:00:00.347)       0:02:51.636 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.helm : Get Helm plugins dir] *************************\nMonday 16 February 2026  23:05:55 +0000 (0:00:00.208)       0:02:51.845 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.helm : Create Helm plugins directory if it does not exist] ***\nMonday 16 February 2026  23:05:55 +0000 (0:00:00.258)       0:02:52.103 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.helm : Remove Helm diff plugin installed by kubernetes.core.helm_plugin] ***\nMonday 16 February 2026  23:05:55 +0000 (0:00:00.205)       0:02:52.309 *******\nok: [instance]\n\nTASK [Install plugin] **********************************************************\nMonday 16 February 2026  23:05:56 +0000 (0:00:00.208)       0:02:52.517 *******\nincluded: vexxhost.containers.download_artifact for instance\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nMonday 16 February 2026  23:05:56 +0000 (0:00:00.051)       0:02:52.569 *******\nok: [instance] => {\n    \"msg\": \"https://github.com/databus23/helm-diff/releases/download/v3.8.1/helm-diff-linux-amd64.tgz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nMonday 16 February 2026  23:05:56 +0000 (0:00:00.055)       0:02:52.624 *******\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nMonday 16 February 2026  23:05:56 +0000 (0:00:00.316)       0:02:52.940 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nMonday 16 February 2026  23:05:58 +0000 (0:00:01.441)       0:02:54.382 *******\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nMonday 16 February 2026  23:05:58 +0000 (0:00:00.044)       0:02:54.426 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.cilium : Get current Kubernetes version] *************\nMonday 16 February 2026  23:05:58 +0000 (0:00:00.635)       0:02:55.061 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.cilium : Deploy Helm chart] **************************\nMonday 16 February 2026  23:05:59 +0000 (0:00:00.805)       0:02:55.867 *******\nok: [instance]\n\nPLAY [all] *********************************************************************\n\nTASK [Gathering Facts] *********************************************************\nMonday 16 February 2026  23:06:01 +0000 (0:00:01.621)       0:02:57.489 *******\nok: [instance]\n\nTASK [vexxhost.atmosphere.kubernetes_node_labels : Add labels to node] *********\nMonday 16 February 2026  23:06:02 +0000 (0:00:01.063)       0:02:58.552 *******\nok: [instance]\n\nPLAY [all] *********************************************************************\n\nTASK [Uninstall unattended-upgrades] *******************************************\nMonday 16 February 2026  23:06:02 +0000 (0:00:00.711)       0:02:59.264 *******\nok: [instance]\n\nPLAY [controllers] *************************************************************\n\nTASK [Gathering Facts] *********************************************************\nMonday 16 February 2026  23:06:03 +0000 (0:00:00.692)       0:02:59.957 *******\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nMonday 16 February 2026  23:06:04 +0000 (0:00:01.178)       0:03:01.135 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.local_path_provisioner : Deploy Helm chart] **********\nMonday 16 February 2026  23:06:04 +0000 (0:00:00.038)       0:03:01.174 *******\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nMonday 16 February 2026  23:06:04 +0000 (0:00:00.049)       0:03:01.224 *******\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nMonday 16 February 2026  23:06:04 +0000 (0:00:00.035)       0:03:01.259 *******\nok: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Collect \"ceph mon dump\" output from a monitor] ***\nMonday 16 February 2026  23:06:05 +0000 (0:00:00.466)       0:03:01.726 *******\nok: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Generate fact with list of Ceph monitors] ***\nMonday 16 February 2026  23:06:19 +0000 (0:00:14.359)       0:03:16.085 *******\nok: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Create Ceph pool] *********************\nMonday 16 February 2026  23:06:19 +0000 (0:00:00.057)       0:03:16.143 *******\nok: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Create user client.kube] **************\nMonday 16 February 2026  23:06:22 +0000 (0:00:03.043)       0:03:19.187 *******\nok: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Retrieve keyring for client.kube] *****\nMonday 16 February 2026  23:06:24 +0000 (0:00:01.360)       0:03:20.547 *******\nok: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Store keyring inside fact] ************\nMonday 16 February 2026  23:06:25 +0000 (0:00:01.166)       0:03:21.713 *******\nok: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Deploy Helm chart] ********************\nMonday 16 February 2026  23:06:25 +0000 (0:00:00.042)       0:03:21.756 *******\nok: [instance]\n\nTASK [vexxhost.atmosphere.powerstore_csi : Clone PowerStore CSI from GitHub] ***\nMonday 16 February 2026  23:06:26 +0000 (0:00:01.229)       0:03:22.986 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.powerstore_csi : Create Secret] **********************\nMonday 16 February 2026  23:06:26 +0000 (0:00:00.030)       0:03:23.016 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.powerstore_csi : Create StorageClass] ****************\nMonday 16 February 2026  23:06:26 +0000 (0:00:00.028)       0:03:23.045 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.powerstore_csi : Deploy Helm chart] ******************\nMonday 16 February 2026  23:06:26 +0000 (0:00:00.027)       0:03:23.072 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.multipathd : Add backports PPA] **********************\nMonday 16 February 2026  23:06:26 +0000 (0:00:00.048)       0:03:23.121 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.multipathd : Install the multipathd package] *********\nMonday 16 February 2026  23:06:26 +0000 (0:00:00.022)       0:03:23.143 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.multipathd : Install the configuration file] *********\nMonday 16 February 2026  23:06:26 +0000 (0:00:00.029)       0:03:23.172 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Detect if InitiatorName is set] ***********\nMonday 16 February 2026  23:06:26 +0000 (0:00:00.038)       0:03:23.210 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Generate a new InitiatorName] *************\nMonday 16 February 2026  23:06:26 +0000 (0:00:00.033)       0:03:23.244 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Write the new InitiatorName] **************\nMonday 16 February 2026  23:06:26 +0000 (0:00:00.028)       0:03:23.273 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Create namespace] *************************\nMonday 16 February 2026  23:06:26 +0000 (0:00:00.030)       0:03:23.303 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Install Portworx] *************************\nMonday 16 February 2026  23:06:26 +0000 (0:00:00.021)       0:03:23.325 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Wait till the CRDs are created] ***********\nMonday 16 February 2026  23:06:27 +0000 (0:00:00.024)       0:03:23.350 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Create Portworx Storage Cluster] **********\nMonday 16 February 2026  23:06:27 +0000 (0:00:00.020)       0:03:23.370 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.storpool_csi : Deploy CSI RBAC] **********************\nMonday 16 February 2026  23:06:27 +0000 (0:00:00.041)       0:03:23.412 *******\nskipping: [instance] => (item={'name': 'controllerplugin'})\nskipping: [instance] => (item={'name': 'nodeplugin'})\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.storpool_csi : Deploy CSI] ***************************\nMonday 16 February 2026  23:06:27 +0000 (0:00:00.034)       0:03:23.446 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.storpool_csi : Create StorageClass] ******************\nMonday 16 February 2026  23:06:27 +0000 (0:00:00.025)       0:03:23.472 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ibm_block_csi_driver : Deploy CSI] *******************\nMonday 16 February 2026  23:06:27 +0000 (0:00:00.038)       0:03:23.510 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ibm_block_csi_driver : Create Secret] ****************\nMonday 16 February 2026  23:06:27 +0000 (0:00:00.022)       0:03:23.532 *******\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ibm_block_csi_driver : Create StorageClass] **********\nMonday 16 February 2026  23:06:27 +0000 (0:00:00.019)       0:03:23.552 *******\nskipping: [instance]\n\nPLAY RECAP *********************************************************************\ninstance                   : ok=235  changed=0    unreachable=0    failed=0    skipped=82   rescued=0    ignored=0\n\nMonday 16 February 2026  23:06:27 +0000 (0:00:00.036)       0:03:23.588 *******\n===============================================================================\nvexxhost.atmosphere.ceph_csi_rbd : Collect \"ceph mon dump\" output from a monitor -- 14.36s\nvexxhost.ceph.osd : Get `ceph-volume lvm list` status ------------------ 10.41s\nvexxhost.ceph.mon : Validate monitor exist ----------------------------- 10.21s\nvexxhost.ceph.osd : Adopt OSDs to cluster ------------------------------ 10.06s\nvexxhost.ceph.mon : Get `cephadm ls` status ----------------------------- 5.68s\nvexxhost.ceph.osd : Get `cephadm ls` status ----------------------------- 5.46s\nvexxhost.ceph.osd : Ensure all OSDs are non-legacy ---------------------- 5.44s\nvexxhost.containers.download_artifact : Extract archive ----------------- 3.80s\nvexxhost.atmosphere.sysctl : Configure sysctl values -------------------- 3.66s\nvexxhost.containers.download_artifact : Extract archive ----------------- 3.32s\nvexxhost.atmosphere.ceph_csi_rbd : Create Ceph pool --------------------- 3.04s\nvexxhost.containers.download_artifact : Extract archive ----------------- 2.50s\nvexxhost.containers.download_artifact : Extract archive ----------------- 2.43s\nvexxhost.ceph.cephadm_host : Add new host to Ceph ----------------------- 2.29s\nvexxhost.containers.download_artifact : Extract archive ----------------- 2.10s\nvexxhost.ceph.cephadm_host : Add new host to Ceph ----------------------- 2.06s\nvexxhost.containers.download_artifact : Extract archive ----------------- 2.04s\nvexxhost.ceph.cephadm_host : Add new host to Ceph ----------------------- 2.00s\nvexxhost.ceph.mgr : Enable the Ceph Manager prometheus module ----------- 1.96s\nvexxhost.ceph.osd : Get mon dump ---------------------------------------- 1.93s\nINFO     [csi > idempotence] Executed: Successful\nINFO     [csi > side_effect] Executing\nWARNING  [csi > side_effect] Executed: Missing playbook (Remove from test_sequence to suppress)\nINFO     [csi > verify] Executing\n\nPLAY [Verify] ******************************************************************\n\nTASK [Gathering Facts] *********************************************************\nMonday 16 February 2026  23:06:28 +0000 (0:00:00.033)       0:00:00.033 *******\n[WARNING]: Platform linux on host instance is using the discovered Python\ninterpreter at /usr/bin/python3.10, but future installation of another Python\ninterpreter could change the meaning of that path. See\nhttps://docs.ansible.com/ansible-\ncore/2.17/reference_appendices/interpreter_discovery.html for more information.\nok: [instance]\n\nTASK [Create a persistent volume] **********************************************\nMonday 16 February 2026  23:06:29 +0000 (0:00:01.298)       0:00:01.331 *******\nchanged: [instance]\n\nTASK [Create a pod] ************************************************************\nMonday 16 February 2026  23:06:30 +0000 (0:00:00.830)       0:00:02.161 *******\nchanged: [instance]\n\nTASK [Delete the pod] **********************************************************\nMonday 16 February 2026  23:06:45 +0000 (0:00:15.683)       0:00:17.845 *******\nchanged: [instance]\n\nTASK [Delete the persistent volume] ********************************************\nMonday 16 February 2026  23:06:46 +0000 (0:00:00.715)       0:00:18.560 *******\nok: [instance]\n\nPLAY RECAP *********************************************************************\ninstance                   : ok=5    changed=3    unreachable=0    failed=0    skipped=0    rescued=0    ignored=0\n\nMonday 16 February 2026  23:06:47 +0000 (0:00:00.708)       0:00:19.269 *******\n===============================================================================\nCreate a pod ----------------------------------------------------------- 15.68s\nGathering Facts --------------------------------------------------------- 1.30s\nCreate a persistent volume ---------------------------------------------- 0.83s\nDelete the pod ---------------------------------------------------------- 0.72s\nDelete the persistent volume -------------------------------------------- 0.71s\nINFO     [csi > verify] Executed: Successful\nINFO     [csi > cleanup] Executing\nWARNING  [csi > cleanup] Executed: Missing playbook (Remove from test_sequence to suppress)\nINFO     [csi > destroy] Executing\nWARNING  [csi > destroy] Skipping, '--destroy=never' requested.\nINFO     [csi > destroy] Executed: Successful\nWARNING  Molecule executed 1 scenario (1 missing files)",
                            "stdout_lines": [
                                "Using CPython 3.10.12 interpreter at: /usr/bin/python3",
                                "Creating virtual environment at: .venv",
                                "   Building atmosphere @ file:///home/zuul/src/github.com/vexxhost/atmosphere",
                                "Downloading cryptography (4.2MiB)",
                                "Downloading pygments (1.2MiB)",
                                "Downloading ansible-core (2.1MiB)",
                                "Downloading kubernetes (1.9MiB)",
                                "Downloading netaddr (2.2MiB)",
                                "Downloading openstacksdk (1.7MiB)",
                                "Downloading setuptools (1.1MiB)",
                                "Downloading rjsonnet (1.2MiB)",
                                "   Building pyperclip==1.9.0",
                                " Downloading rjsonnet",
                                " Downloading pygments",
                                " Downloading netaddr",
                                " Downloading cryptography",
                                " Downloading setuptools",
                                " Downloading kubernetes",
                                " Downloading ansible-core",
                                " Downloading openstacksdk",
                                "      Built pyperclip==1.9.0",
                                "      Built atmosphere @ file:///home/zuul/src/github.com/vexxhost/atmosphere",
                                "Installed 79 packages in 62ms",
                                "WARNING  Molecule scenarios should migrate to 'extensions/molecule'",
                                "INFO     [csi > discovery] scenario test matrix: dependency, cleanup, destroy, syntax, create, prepare, converge, idempotence, side_effect, verify, cleanup, destroy",
                                "INFO     [csi > prerun] Performing prerun with role_name_check=0...",
                                "INFO     [csi > dependency] Executing",
                                "WARNING  [csi > dependency] Missing roles requirements file: requirements.yml",
                                "WARNING  [csi > dependency] Missing collections requirements file: collections.yml",
                                "WARNING  [csi > dependency] Executed: 2 missing (Remove from test_sequence to suppress)",
                                "INFO     [csi > cleanup] Executing",
                                "WARNING  [csi > cleanup] Executed: Missing playbook (Remove from test_sequence to suppress)",
                                "INFO     [csi > destroy] Executing",
                                "WARNING  [csi > destroy] Skipping, '--destroy=never' requested.",
                                "INFO     [csi > destroy] Executed: Successful",
                                "INFO     [csi > syntax] Executing",
                                "",
                                "playbook: /home/zuul/src/github.com/vexxhost/atmosphere/molecule/csi/converge.yml",
                                "INFO     [csi > syntax] Executed: Successful",
                                "INFO     [csi > create] Executing",
                                "WARNING  [csi > create] Executed: Missing playbook (Remove from test_sequence to suppress)",
                                "INFO     [csi > prepare] Executing",
                                "",
                                "PLAY [Prepare] *****************************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Monday 16 February 2026  22:55:18 +0000 (0:00:00.027)       0:00:00.027 *******",
                                "[WARNING]: Platform linux on host instance is using the discovered Python",
                                "interpreter at /usr/bin/python3.10, but future installation of another Python",
                                "interpreter could change the meaning of that path. See",
                                "https://docs.ansible.com/ansible-",
                                "core/2.17/reference_appendices/interpreter_discovery.html for more information.",
                                "ok: [instance]",
                                "",
                                "TASK [Configure short hostname] ************************************************",
                                "Monday 16 February 2026  22:55:19 +0000 (0:00:01.045)       0:00:01.073 *******",
                                "changed: [instance]",
                                "",
                                "TASK [Ensure hostname inside hosts file] ***************************************",
                                "Monday 16 February 2026  22:55:19 +0000 (0:00:00.633)       0:00:01.706 *******",
                                "[WARNING]: Module remote_tmp /root/.ansible/tmp did not exist and was created",
                                "with a mode of 0700, this may cause issues when running as another user. To",
                                "avoid this, create the remote_tmp dir with the correct permissions manually",
                                "changed: [instance]",
                                "",
                                "TASK [Purge \"snapd\" package] ***************************************************",
                                "Monday 16 February 2026  22:55:20 +0000 (0:00:00.263)       0:00:01.969 *******",
                                "ok: [instance]",
                                "",
                                "PLAY [Create devices for Ceph] *************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Monday 16 February 2026  22:55:20 +0000 (0:00:00.730)       0:00:02.700 *******",
                                "ok: [instance]",
                                "",
                                "TASK [Install depedencies] *****************************************************",
                                "Monday 16 February 2026  22:55:21 +0000 (0:00:00.653)       0:00:03.354 *******",
                                "changed: [instance]",
                                "",
                                "TASK [Start up service] ********************************************************",
                                "Monday 16 February 2026  22:55:40 +0000 (0:00:19.601)       0:00:22.956 *******",
                                "ok: [instance]",
                                "",
                                "TASK [Generate lvm.conf] *******************************************************",
                                "Monday 16 February 2026  22:55:41 +0000 (0:00:00.519)       0:00:23.475 *******",
                                "ok: [instance]",
                                "",
                                "TASK [Write /etc/lvm/lvm.conf] *************************************************",
                                "Monday 16 February 2026  22:55:41 +0000 (0:00:00.277)       0:00:23.753 *******",
                                "changed: [instance]",
                                "",
                                "TASK [Get list of all loopback devices] ****************************************",
                                "Monday 16 February 2026  22:55:42 +0000 (0:00:00.568)       0:00:24.321 *******",
                                "ok: [instance]",
                                "",
                                "TASK [Fail if there is any existing loopback devices] **************************",
                                "Monday 16 February 2026  22:55:42 +0000 (0:00:00.175)       0:00:24.497 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [Create devices for Ceph] *************************************************",
                                "Monday 16 February 2026  22:55:42 +0000 (0:00:00.018)       0:00:24.516 *******",
                                "changed: [instance] => (item=osd0)",
                                "changed: [instance] => (item=osd1)",
                                "changed: [instance] => (item=osd2)",
                                "",
                                "TASK [Set permissions on loopback devices] *************************************",
                                "Monday 16 February 2026  22:55:43 +0000 (0:00:00.519)       0:00:25.035 *******",
                                "changed: [instance] => (item=osd0)",
                                "changed: [instance] => (item=osd1)",
                                "changed: [instance] => (item=osd2)",
                                "",
                                "TASK [Start loop devices] ******************************************************",
                                "Monday 16 February 2026  22:55:43 +0000 (0:00:00.590)       0:00:25.626 *******",
                                "changed: [instance] => (item=osd0)",
                                "changed: [instance] => (item=osd1)",
                                "changed: [instance] => (item=osd2)",
                                "",
                                "TASK [Create a volume group for each loop device] ******************************",
                                "Monday 16 February 2026  22:55:44 +0000 (0:00:00.636)       0:00:26.262 *******",
                                "changed: [instance] => (item=osd0)",
                                "changed: [instance] => (item=osd1)",
                                "changed: [instance] => (item=osd2)",
                                "",
                                "TASK [Create a logical volume for each loop device] ****************************",
                                "Monday 16 February 2026  22:55:46 +0000 (0:00:02.453)       0:00:28.715 *******",
                                "changed: [instance] => (item=ceph-instance-osd0)",
                                "changed: [instance] => (item=ceph-instance-osd1)",
                                "changed: [instance] => (item=ceph-instance-osd2)",
                                "",
                                "PLAY RECAP *********************************************************************",
                                "instance                   : ok=15   changed=9    unreachable=0    failed=0    skipped=1    rescued=0    ignored=0",
                                "",
                                "Monday 16 February 2026  22:55:48 +0000 (0:00:01.655)       0:00:30.371 *******",
                                "===============================================================================",
                                "Install depedencies ---------------------------------------------------- 19.60s",
                                "Create a volume group for each loop device ------------------------------ 2.45s",
                                "Create a logical volume for each loop device ---------------------------- 1.66s",
                                "Gathering Facts --------------------------------------------------------- 1.05s",
                                "Purge \"snapd\" package --------------------------------------------------- 0.73s",
                                "Gathering Facts --------------------------------------------------------- 0.65s",
                                "Start loop devices ------------------------------------------------------ 0.64s",
                                "Configure short hostname ------------------------------------------------ 0.63s",
                                "Set permissions on loopback devices ------------------------------------- 0.59s",
                                "Write /etc/lvm/lvm.conf ------------------------------------------------- 0.57s",
                                "Start up service -------------------------------------------------------- 0.52s",
                                "Create devices for Ceph ------------------------------------------------- 0.52s",
                                "Generate lvm.conf ------------------------------------------------------- 0.28s",
                                "Ensure hostname inside hosts file --------------------------------------- 0.26s",
                                "Get list of all loopback devices ---------------------------------------- 0.18s",
                                "Fail if there is any existing loopback devices -------------------------- 0.02s",
                                "INFO     [csi > prepare] Executed: Successful",
                                "INFO     [csi > converge] Executing",
                                "",
                                "PLAY [Debug CSI driver value] **************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Monday 16 February 2026  22:55:49 +0000 (0:00:00.035)       0:00:00.035 *******",
                                "[WARNING]: Platform linux on host instance is using the discovered Python",
                                "interpreter at /usr/bin/python3.10, but future installation of another Python",
                                "interpreter could change the meaning of that path. See",
                                "https://docs.ansible.com/ansible-",
                                "core/2.17/reference_appendices/interpreter_discovery.html for more information.",
                                "ok: [instance]",
                                "",
                                "TASK [Display CSI driver value and environment variable] ***********************",
                                "Monday 16 February 2026  22:55:50 +0000 (0:00:00.899)       0:00:00.935 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"csi_driver=rbd, MOLECULE_CSI_DRIVER=\"",
                                "}",
                                "",
                                "PLAY [all] *********************************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Monday 16 February 2026  22:55:50 +0000 (0:00:00.036)       0:00:00.971 *******",
                                "ok: [instance]",
                                "",
                                "TASK [Set a fact with the \"atmosphere_images\" for other plays] *****************",
                                "Monday 16 February 2026  22:55:51 +0000 (0:00:00.803)       0:00:01.774 *******",
                                "ok: [instance]",
                                "",
                                "PLAY [Deploy Ceph monitors & managers] *****************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Monday 16 February 2026  22:55:51 +0000 (0:00:00.170)       0:00:01.944 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  22:55:52 +0000 (0:00:00.813)       0:00:02.758 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Monday 16 February 2026  22:55:52 +0000 (0:00:00.271)       0:00:03.030 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***",
                                "Monday 16 February 2026  22:55:52 +0000 (0:00:00.038)       0:00:03.068 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  22:55:53 +0000 (0:00:00.271)       0:00:03.340 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/opencontainers/runc/releases/download/v1.4.0/runc.amd64\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  22:55:53 +0000 (0:00:00.058)       0:00:03.399 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  22:55:53 +0000 (0:00:00.607)       0:00:04.006 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Monday 16 February 2026  22:55:53 +0000 (0:00:00.050)       0:00:04.056 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  22:55:53 +0000 (0:00:00.048)       0:00:04.105 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Monday 16 February 2026  22:55:54 +0000 (0:00:00.191)       0:00:04.297 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  22:55:55 +0000 (0:00:01.086)       0:00:05.384 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/containerd/containerd/releases/download/v2.2.0/containerd-2.2.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  22:55:55 +0000 (0:00:00.061)       0:00:05.445 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  22:55:55 +0000 (0:00:00.729)       0:00:06.175 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install SELinux packages] ***************",
                                "Monday 16 February 2026  22:55:58 +0000 (0:00:02.779)       0:00:08.955 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***",
                                "Monday 16 February 2026  22:55:58 +0000 (0:00:00.034)       0:00:08.989 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********",
                                "Monday 16 February 2026  22:55:58 +0000 (0:00:00.027)       0:00:09.017 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install AppArmor packages] **************",
                                "Monday 16 February 2026  22:55:58 +0000 (0:00:00.030)       0:00:09.047 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***",
                                "Monday 16 February 2026  22:56:03 +0000 (0:00:04.639)       0:00:13.687 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create folders for configuration] *******",
                                "Monday 16 February 2026  22:56:03 +0000 (0:00:00.485)       0:00:14.172 *******",
                                "changed: [instance] => (item={'path': '/etc/containerd'})",
                                "changed: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})",
                                "changed: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})",
                                "changed: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})",
                                "changed: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})",
                                "",
                                "TASK [vexxhost.containers.containerd : Create containerd config file] **********",
                                "Monday 16 February 2026  22:56:04 +0000 (0:00:00.859)       0:00:15.031 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Force any restarts if necessary] ********",
                                "Monday 16 February 2026  22:56:05 +0000 (0:00:00.483)       0:00:15.515 *******",
                                "",
                                "RUNNING HANDLER [vexxhost.containers.containerd : Reload systemd] **************",
                                "Monday 16 February 2026  22:56:05 +0000 (0:00:00.017)       0:00:15.532 *******",
                                "ok: [instance]",
                                "",
                                "RUNNING HANDLER [vexxhost.containers.containerd : Restart containerd] **********",
                                "Monday 16 February 2026  22:56:06 +0000 (0:00:00.953)       0:00:16.486 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Enable and start service] ***************",
                                "Monday 16 February 2026  22:56:06 +0000 (0:00:00.564)       0:00:17.050 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  22:56:07 +0000 (0:00:00.559)       0:00:17.610 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  22:56:07 +0000 (0:00:00.197)       0:00:17.807 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://download.docker.com/linux/static/stable/x86_64/docker-24.0.9.tgz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  22:56:07 +0000 (0:00:00.053)       0:00:17.861 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  22:56:08 +0000 (0:00:00.755)       0:00:18.617 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Install AppArmor packages] ******************",
                                "Monday 16 February 2026  22:56:12 +0000 (0:00:04.272)       0:00:22.889 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Ensure group \"docker\" exists] ***************",
                                "Monday 16 February 2026  22:56:13 +0000 (0:00:00.925)       0:00:23.815 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Create systemd service file for docker] *****",
                                "Monday 16 February 2026  22:56:13 +0000 (0:00:00.319)       0:00:24.135 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Create folders for configuration] ***********",
                                "Monday 16 February 2026  22:56:14 +0000 (0:00:00.411)       0:00:24.547 *******",
                                "changed: [instance] => (item={'path': '/etc/docker'})",
                                "changed: [instance] => (item={'path': '/var/lib/docker', 'mode': '0o710'})",
                                "changed: [instance] => (item={'path': '/run/docker', 'mode': '0o711'})",
                                "",
                                "TASK [vexxhost.containers.docker : Create systemd socket file for docker] ******",
                                "Monday 16 February 2026  22:56:14 +0000 (0:00:00.526)       0:00:25.073 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Create docker daemon config file] ***********",
                                "Monday 16 February 2026  22:56:15 +0000 (0:00:00.431)       0:00:25.504 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Force any restarts if necessary] ************",
                                "Monday 16 February 2026  22:56:15 +0000 (0:00:00.401)       0:00:25.905 *******",
                                "",
                                "RUNNING HANDLER [vexxhost.containers.containerd : Reload systemd] **************",
                                "Monday 16 February 2026  22:56:15 +0000 (0:00:00.016)       0:00:25.922 *******",
                                "ok: [instance]",
                                "",
                                "RUNNING HANDLER [vexxhost.containers.docker : Restart docker] ******************",
                                "Monday 16 February 2026  22:56:16 +0000 (0:00:00.773)       0:00:26.696 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Enable and start service] *******************",
                                "Monday 16 February 2026  22:56:17 +0000 (0:00:00.839)       0:00:27.536 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Gather variables for each operating system] ******",
                                "Monday 16 February 2026  22:56:17 +0000 (0:00:00.528)       0:00:28.064 *******",
                                "ok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/cephadm/vars/ubuntu-22.04.yml)",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Install packages] ********************************",
                                "Monday 16 February 2026  22:56:17 +0000 (0:00:00.053)       0:00:28.118 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Ensure services are started] *********************",
                                "Monday 16 February 2026  22:56:23 +0000 (0:00:05.130)       0:00:33.248 *******",
                                "ok: [instance] => (item=chronyd)",
                                "ok: [instance] => (item=sshd)",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Download \"cephadm\"] ******************************",
                                "Monday 16 February 2026  22:56:23 +0000 (0:00:00.623)       0:00:33.872 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Remove cephadm from old path] ********************",
                                "Monday 16 February 2026  22:56:24 +0000 (0:00:01.283)       0:00:35.155 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Ensure \"cephadm\" user is present] ****************",
                                "Monday 16 February 2026  22:56:25 +0000 (0:00:00.189)       0:00:35.344 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Allow \"cephadm\" user to have passwordless sudo] ***",
                                "Monday 16 February 2026  22:56:25 +0000 (0:00:00.428)       0:00:35.773 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Get `cephadm ls` status] *****************************",
                                "Monday 16 February 2026  22:56:25 +0000 (0:00:00.320)       0:00:36.093 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Parse the `cephadm ls` output] ***********************",
                                "Monday 16 February 2026  22:56:27 +0000 (0:00:01.603)       0:00:37.696 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Assimilate existing configs in `ceph.conf`] **********",
                                "Monday 16 February 2026  22:56:27 +0000 (0:00:00.042)       0:00:37.738 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Adopt monitor to cluster] ****************************",
                                "Monday 16 February 2026  22:56:27 +0000 (0:00:00.028)       0:00:37.767 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Adopt manager to cluster] ****************************",
                                "Monday 16 February 2026  22:56:27 +0000 (0:00:00.026)       0:00:37.794 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Enable \"cephadm\" mgr module] *************************",
                                "Monday 16 February 2026  22:56:27 +0000 (0:00:00.028)       0:00:37.823 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Set orchestrator backend to \"cephadm\"] ***************",
                                "Monday 16 February 2026  22:56:27 +0000 (0:00:00.027)       0:00:37.850 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Use `cephadm` user for cephadm] **********************",
                                "Monday 16 February 2026  22:56:27 +0000 (0:00:00.033)       0:00:37.884 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Generate \"cephadm\" key] ******************************",
                                "Monday 16 February 2026  22:56:27 +0000 (0:00:00.029)       0:00:37.913 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Set Ceph Monitor IP address] *************************",
                                "Monday 16 February 2026  22:56:27 +0000 (0:00:00.028)       0:00:37.942 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Check if any node is bootstrapped] *******************",
                                "Monday 16 February 2026  22:56:27 +0000 (0:00:00.097)       0:00:38.039 *******",
                                "ok: [instance] => (item=instance)",
                                "",
                                "TASK [vexxhost.ceph.mon : Select pre-existing bootstrap node if exists] ********",
                                "Monday 16 February 2026  22:56:28 +0000 (0:00:00.188)       0:00:38.227 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Bootstrap cluster] ***********************************",
                                "Monday 16 February 2026  22:56:28 +0000 (0:00:00.040)       0:00:38.268 *******",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/mon/tasks/bootstrap-ceph.yml for instance",
                                "",
                                "TASK [vexxhost.ceph.mon : Generate temporary file for \"ceph.conf\"] *************",
                                "Monday 16 February 2026  22:56:28 +0000 (0:00:00.056)       0:00:38.325 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Include extra configuration values] ******************",
                                "Monday 16 February 2026  22:56:28 +0000 (0:00:00.289)       0:00:38.614 *******",
                                "changed: [instance] => (item={'option': 'mon allow pool size one', 'section': 'global', 'value': True})",
                                "changed: [instance] => (item={'option': 'osd crush chooseleaf type', 'section': 'global', 'value': 0})",
                                "changed: [instance] => (item={'option': 'auth allow insecure global id reclaim', 'section': 'mon', 'value': False})",
                                "",
                                "TASK [vexxhost.ceph.mon : Run Bootstrap coomand] *******************************",
                                "Monday 16 February 2026  22:56:29 +0000 (0:00:00.646)       0:00:39.261 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Remove temporary file for \"ceph.conf\"] ***************",
                                "Monday 16 February 2026  22:57:53 +0000 (0:01:24.772)       0:02:04.033 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Set bootstrap node] **********************************",
                                "Monday 16 February 2026  22:57:54 +0000 (0:00:00.208)       0:02:04.242 *******",
                                "ok: [instance]",
                                "",
                                "TASK [Install Ceph host] *******************************************************",
                                "Monday 16 February 2026  22:57:54 +0000 (0:00:00.036)       0:02:04.279 *******",
                                "included: vexxhost.ceph.cephadm_host for instance",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Get public SSH key for \"cephadm\" user] ******",
                                "Monday 16 February 2026  22:57:54 +0000 (0:00:00.076)       0:02:04.355 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Set fact with public SSH key for \"cephadm\" user] ***",
                                "Monday 16 February 2026  22:57:55 +0000 (0:00:01.492)       0:02:05.848 *******",
                                "ok: [instance] => (item=instance)",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Set authorized key for \"cephadm\"] ***********",
                                "Monday 16 February 2026  22:57:55 +0000 (0:00:00.052)       0:02:05.900 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Add new host to Ceph] ***********************",
                                "Monday 16 February 2026  22:57:56 +0000 (0:00:00.370)       0:02:06.271 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Configure \"mon\" label for monitors] ******************",
                                "Monday 16 February 2026  22:57:57 +0000 (0:00:01.761)       0:02:08.033 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Validate monitor exist] ******************************",
                                "Monday 16 February 2026  22:57:59 +0000 (0:00:01.466)       0:02:09.499 *******",
                                "ok: [instance]",
                                "",
                                "TASK [Install Ceph host] *******************************************************",
                                "Monday 16 February 2026  22:58:09 +0000 (0:00:09.980)       0:02:19.480 *******",
                                "included: vexxhost.ceph.cephadm_host for instance",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Get public SSH key for \"cephadm\" user] ******",
                                "Monday 16 February 2026  22:58:09 +0000 (0:00:00.080)       0:02:19.560 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Set fact with public SSH key for \"cephadm\" user] ***",
                                "Monday 16 February 2026  22:58:09 +0000 (0:00:00.049)       0:02:19.610 *******",
                                "skipping: [instance] => (item=instance)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Set authorized key for \"cephadm\"] ***********",
                                "Monday 16 February 2026  22:58:09 +0000 (0:00:00.043)       0:02:19.653 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Add new host to Ceph] ***********************",
                                "Monday 16 February 2026  22:58:09 +0000 (0:00:00.263)       0:02:19.917 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mgr : Configure \"mgr\" label for managers] ******************",
                                "Monday 16 February 2026  22:58:11 +0000 (0:00:02.244)       0:02:22.162 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mgr : Validate manager exist] ******************************",
                                "Monday 16 February 2026  22:58:13 +0000 (0:00:01.413)       0:02:23.576 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mgr : Enable the Ceph Manager prometheus module] ***********",
                                "Monday 16 February 2026  22:58:14 +0000 (0:00:01.310)       0:02:24.886 *******",
                                "ok: [instance]",
                                "",
                                "PLAY [Deploy Ceph OSDs] ********************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Monday 16 February 2026  22:58:17 +0000 (0:00:02.307)       0:02:27.193 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  22:58:17 +0000 (0:00:00.785)       0:02:27.979 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Monday 16 February 2026  22:58:17 +0000 (0:00:00.198)       0:02:28.178 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***",
                                "Monday 16 February 2026  22:58:18 +0000 (0:00:00.030)       0:02:28.208 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  22:58:18 +0000 (0:00:00.203)       0:02:28.412 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/opencontainers/runc/releases/download/v1.4.0/runc.amd64\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  22:58:18 +0000 (0:00:00.044)       0:02:28.456 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  22:58:18 +0000 (0:00:00.280)       0:02:28.737 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Monday 16 February 2026  22:58:18 +0000 (0:00:00.035)       0:02:28.772 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  22:58:18 +0000 (0:00:00.037)       0:02:28.809 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Monday 16 February 2026  22:58:18 +0000 (0:00:00.188)       0:02:28.998 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  22:58:19 +0000 (0:00:00.979)       0:02:29.977 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/containerd/containerd/releases/download/v2.2.0/containerd-2.2.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  22:58:19 +0000 (0:00:00.052)       0:02:30.029 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  22:58:20 +0000 (0:00:00.314)       0:02:30.344 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install SELinux packages] ***************",
                                "Monday 16 February 2026  22:58:22 +0000 (0:00:01.900)       0:02:32.244 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***",
                                "Monday 16 February 2026  22:58:22 +0000 (0:00:00.022)       0:02:32.266 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********",
                                "Monday 16 February 2026  22:58:22 +0000 (0:00:00.021)       0:02:32.288 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install AppArmor packages] **************",
                                "Monday 16 February 2026  22:58:22 +0000 (0:00:00.022)       0:02:32.310 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***",
                                "Monday 16 February 2026  22:58:23 +0000 (0:00:00.951)       0:02:33.262 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create folders for configuration] *******",
                                "Monday 16 February 2026  22:58:23 +0000 (0:00:00.395)       0:02:33.657 *******",
                                "ok: [instance] => (item={'path': '/etc/containerd'})",
                                "ok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})",
                                "",
                                "TASK [vexxhost.containers.containerd : Create containerd config file] **********",
                                "Monday 16 February 2026  22:58:24 +0000 (0:00:00.875)       0:02:34.533 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Force any restarts if necessary] ********",
                                "Monday 16 February 2026  22:58:24 +0000 (0:00:00.598)       0:02:35.132 *******",
                                "",
                                "TASK [vexxhost.containers.containerd : Enable and start service] ***************",
                                "Monday 16 February 2026  22:58:24 +0000 (0:00:00.032)       0:02:35.165 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  22:58:25 +0000 (0:00:00.385)       0:02:35.550 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  22:58:25 +0000 (0:00:00.200)       0:02:35.750 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://download.docker.com/linux/static/stable/x86_64/docker-24.0.9.tgz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  22:58:25 +0000 (0:00:00.048)       0:02:35.798 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  22:58:25 +0000 (0:00:00.320)       0:02:36.119 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Install AppArmor packages] ******************",
                                "Monday 16 February 2026  22:58:28 +0000 (0:00:03.013)       0:02:39.132 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Ensure group \"docker\" exists] ***************",
                                "Monday 16 February 2026  22:58:30 +0000 (0:00:01.056)       0:02:40.188 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Create systemd service file for docker] *****",
                                "Monday 16 February 2026  22:58:30 +0000 (0:00:00.174)       0:02:40.363 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Create folders for configuration] ***********",
                                "Monday 16 February 2026  22:58:30 +0000 (0:00:00.426)       0:02:40.789 *******",
                                "ok: [instance] => (item={'path': '/etc/docker'})",
                                "ok: [instance] => (item={'path': '/var/lib/docker', 'mode': '0o710'})",
                                "ok: [instance] => (item={'path': '/run/docker', 'mode': '0o711'})",
                                "",
                                "TASK [vexxhost.containers.docker : Create systemd socket file for docker] ******",
                                "Monday 16 February 2026  22:58:31 +0000 (0:00:00.585)       0:02:41.374 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Create docker daemon config file] ***********",
                                "Monday 16 February 2026  22:58:31 +0000 (0:00:00.445)       0:02:41.820 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Force any restarts if necessary] ************",
                                "Monday 16 February 2026  22:58:32 +0000 (0:00:00.383)       0:02:42.203 *******",
                                "",
                                "TASK [vexxhost.containers.docker : Enable and start service] *******************",
                                "Monday 16 February 2026  22:58:32 +0000 (0:00:00.016)       0:02:42.219 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Gather variables for each operating system] ******",
                                "Monday 16 February 2026  22:58:32 +0000 (0:00:00.358)       0:02:42.577 *******",
                                "ok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/cephadm/vars/ubuntu-22.04.yml)",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Install packages] ********************************",
                                "Monday 16 February 2026  22:58:32 +0000 (0:00:00.050)       0:02:42.628 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Ensure services are started] *********************",
                                "Monday 16 February 2026  22:58:33 +0000 (0:00:00.994)       0:02:43.623 *******",
                                "ok: [instance] => (item=chronyd)",
                                "ok: [instance] => (item=sshd)",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Download \"cephadm\"] ******************************",
                                "Monday 16 February 2026  22:58:34 +0000 (0:00:00.631)       0:02:44.254 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Remove cephadm from old path] ********************",
                                "Monday 16 February 2026  22:58:34 +0000 (0:00:00.773)       0:02:45.028 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Ensure \"cephadm\" user is present] ****************",
                                "Monday 16 February 2026  22:58:35 +0000 (0:00:00.184)       0:02:45.212 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Allow \"cephadm\" user to have passwordless sudo] ***",
                                "Monday 16 February 2026  22:58:35 +0000 (0:00:00.229)       0:02:45.442 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Get monitor status] **********************************",
                                "Monday 16 February 2026  22:58:35 +0000 (0:00:00.196)       0:02:45.639 *******",
                                "ok: [instance] => (item=instance)",
                                "",
                                "TASK [vexxhost.ceph.osd : Select admin host] ***********************************",
                                "Monday 16 February 2026  22:58:35 +0000 (0:00:00.215)       0:02:45.854 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Get `cephadm ls` status] *****************************",
                                "Monday 16 February 2026  22:58:35 +0000 (0:00:00.047)       0:02:45.902 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Parse the `cephadm ls` output] ***********************",
                                "Monday 16 February 2026  22:58:40 +0000 (0:00:05.274)       0:02:51.177 *******",
                                "ok: [instance]",
                                "",
                                "TASK [Install Ceph host] *******************************************************",
                                "Monday 16 February 2026  22:58:41 +0000 (0:00:00.045)       0:02:51.222 *******",
                                "included: vexxhost.ceph.cephadm_host for instance",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Get public SSH key for \"cephadm\" user] ******",
                                "Monday 16 February 2026  22:58:41 +0000 (0:00:00.057)       0:02:51.280 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Set fact with public SSH key for \"cephadm\" user] ***",
                                "Monday 16 February 2026  22:58:41 +0000 (0:00:00.037)       0:02:51.318 *******",
                                "skipping: [instance] => (item=instance)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Set authorized key for \"cephadm\"] ***********",
                                "Monday 16 February 2026  22:58:41 +0000 (0:00:00.040)       0:02:51.359 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Add new host to Ceph] ***********************",
                                "Monday 16 February 2026  22:58:41 +0000 (0:00:00.247)       0:02:51.606 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Adopt OSDs to cluster] *******************************",
                                "Monday 16 February 2026  22:58:43 +0000 (0:00:01.756)       0:02:53.362 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Wait until OSD added to cephadm] *********************",
                                "Monday 16 February 2026  22:58:43 +0000 (0:00:00.026)       0:02:53.389 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Ensure all OSDs are non-legacy] **********************",
                                "Monday 16 February 2026  22:58:43 +0000 (0:00:00.023)       0:02:53.412 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Get `ceph-volume lvm list` status] *******************",
                                "Monday 16 February 2026  22:58:48 +0000 (0:00:05.291)       0:02:58.704 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Install OSDs] ****************************************",
                                "Monday 16 February 2026  22:58:58 +0000 (0:00:09.906)       0:03:08.610 *******",
                                "ok: [instance] => (item=/dev/ceph-instance-osd0/data)",
                                "ok: [instance] => (item=/dev/ceph-instance-osd1/data)",
                                "ok: [instance] => (item=/dev/ceph-instance-osd2/data)",
                                "",
                                "TASK [vexxhost.ceph.osd : Get mon dump] ****************************************",
                                "Monday 16 February 2026  23:00:19 +0000 (0:01:20.818)       0:04:29.429 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Mark require osd release] ****************************",
                                "Monday 16 February 2026  23:00:20 +0000 (0:00:01.547)       0:04:30.976 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Wait for all OSD to be running] **********************",
                                "Monday 16 February 2026  23:00:22 +0000 (0:00:01.418)       0:04:32.395 *******",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/osd/tasks/check-osds.yml for instance",
                                "",
                                "TASK [vexxhost.ceph.osd : Set the retry count] *********************************",
                                "Monday 16 February 2026  23:00:22 +0000 (0:00:00.063)       0:04:32.458 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Get `ceph orch ps`] **********************************",
                                "Monday 16 February 2026  23:00:22 +0000 (0:00:00.057)       0:04:32.516 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : OSD daemon list] *************************************",
                                "Monday 16 February 2026  23:00:23 +0000 (0:00:01.507)       0:04:34.024 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Fail if any OSD not running] *************************",
                                "Monday 16 February 2026  23:00:23 +0000 (0:00:00.038)       0:04:34.063 *******",
                                "skipping: [instance] => (item=1)",
                                "skipping: [instance] => (item=1)",
                                "skipping: [instance] => (item=1)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Fail if any duplicate OSD ID] ************************",
                                "Monday 16 February 2026  23:00:23 +0000 (0:00:00.044)       0:04:34.108 *******",
                                "skipping: [instance]",
                                "",
                                "PLAY [all] *********************************************************************",
                                "",
                                "TASK [Ensure RBD kernel module is loaded] **************************************",
                                "Monday 16 February 2026  23:00:23 +0000 (0:00:00.040)       0:04:34.148 *******",
                                "changed: [instance]",
                                "",
                                "PLAY [all] *********************************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Monday 16 February 2026  23:00:24 +0000 (0:00:00.368)       0:04:34.517 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.sysctl : Configure sysctl values] ********************",
                                "Monday 16 February 2026  23:00:25 +0000 (0:00:00.881)       0:04:35.398 *******",
                                "changed: [instance] => (item={'name': 'fs.aio-max-nr', 'value': 1048576})",
                                "changed: [instance] => (item={'name': 'net.ipv4.tcp_timestamps', 'value': 0})",
                                "changed: [instance] => (item={'name': 'net.ipv4.tcp_sack', 'value': 1})",
                                "changed: [instance] => (item={'name': 'net.core.netdev_budget', 'value': 1000})",
                                "changed: [instance] => (item={'name': 'net.core.netdev_max_backlog', 'value': 250000})",
                                "changed: [instance] => (item={'name': 'net.core.rmem_max', 'value': 4194304})",
                                "changed: [instance] => (item={'name': 'net.core.wmem_max', 'value': 4194304})",
                                "changed: [instance] => (item={'name': 'net.core.rmem_default', 'value': 4194304})",
                                "changed: [instance] => (item={'name': 'net.core.wmem_default', 'value': 4194304})",
                                "changed: [instance] => (item={'name': 'net.core.optmem_max', 'value': 4194304})",
                                "changed: [instance] => (item={'name': 'net.ipv4.tcp_rmem', 'value': '4096 87380 4194304'})",
                                "changed: [instance] => (item={'name': 'net.ipv4.tcp_wmem', 'value': '4096 65536 4194304'})",
                                "changed: [instance] => (item={'name': 'net.ipv4.tcp_low_latency', 'value': 1})",
                                "changed: [instance] => (item={'name': 'net.ipv4.tcp_adv_win_scale', 'value': 1})",
                                "changed: [instance] => (item={'name': 'net.ipv4.neigh.default.gc_thresh1', 'value': 128})",
                                "changed: [instance] => (item={'name': 'net.ipv4.neigh.default.gc_thresh2', 'value': 28872})",
                                "changed: [instance] => (item={'name': 'net.ipv4.neigh.default.gc_thresh3', 'value': 32768})",
                                "changed: [instance] => (item={'name': 'net.ipv6.neigh.default.gc_thresh1', 'value': 128})",
                                "changed: [instance] => (item={'name': 'net.ipv6.neigh.default.gc_thresh2', 'value': 28872})",
                                "changed: [instance] => (item={'name': 'net.ipv6.neigh.default.gc_thresh3', 'value': 32768})",
                                "",
                                "TASK [vexxhost.atmosphere.ethtool : Create folder for persistent configuration] ***",
                                "Monday 16 February 2026  23:00:28 +0000 (0:00:03.614)       0:04:39.013 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ethtool : Install persistent \"ethtool\" tuning] *******",
                                "Monday 16 February 2026  23:00:29 +0000 (0:00:00.180)       0:04:39.193 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ethtool : Run \"ethtool\" tuning] **********************",
                                "Monday 16 February 2026  23:00:29 +0000 (0:00:00.413)       0:04:39.607 *******",
                                "ok: [instance]",
                                "",
                                "TASK [Set a fact with the \"atmosphere_images\" for other plays] *****************",
                                "Monday 16 February 2026  23:00:29 +0000 (0:00:00.212)       0:04:39.819 *******",
                                "ok: [instance]",
                                "",
                                "PLAY [Configure Kubernetes VIP] ************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Monday 16 February 2026  23:00:29 +0000 (0:00:00.044)       0:04:39.864 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/etc/kubernetes/manifests)] ***",
                                "Monday 16 February 2026  23:00:30 +0000 (0:00:00.831)       0:04:40.696 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Uninstall legacy HA stack] ****************",
                                "Monday 16 February 2026  23:00:30 +0000 (0:00:00.185)       0:04:40.881 *******",
                                "ok: [instance] => (item=/etc/keepalived/keepalived.conf)",
                                "ok: [instance] => (item=/etc/keepalived/check_apiserver.sh)",
                                "ok: [instance] => (item=/etc/kubernetes/manifests/keepalived.yaml)",
                                "ok: [instance] => (item=/etc/haproxy/haproxy.cfg)",
                                "ok: [instance] => (item=/etc/kubernetes/manifests/haproxy.yaml)",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Switch API server to run on port 6443] ****",
                                "Monday 16 February 2026  23:00:31 +0000 (0:00:00.859)       0:04:41.741 *******",
                                "failed: [instance] (item=/etc/kubernetes/manifests/kube-apiserver.yaml) => {\"ansible_loop_var\": \"item\", \"changed\": false, \"item\": \"/etc/kubernetes/manifests/kube-apiserver.yaml\", \"msg\": \"Path /etc/kubernetes/manifests/kube-apiserver.yaml does not exist !\", \"rc\": 257}",
                                "failed: [instance] (item=/etc/kubernetes/controller-manager.conf) => {\"ansible_loop_var\": \"item\", \"changed\": false, \"item\": \"/etc/kubernetes/controller-manager.conf\", \"msg\": \"Path /etc/kubernetes/controller-manager.conf does not exist !\", \"rc\": 257}",
                                "failed: [instance] (item=/etc/kubernetes/scheduler.conf) => {\"ansible_loop_var\": \"item\", \"changed\": false, \"item\": \"/etc/kubernetes/scheduler.conf\", \"msg\": \"Path /etc/kubernetes/scheduler.conf does not exist !\", \"rc\": 257}",
                                "...ignoring",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Check if super-admin.conf exists] *********",
                                "Monday 16 February 2026  23:00:32 +0000 (0:00:00.478)       0:04:42.220 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Check if kubeadm has already run] *********",
                                "Monday 16 February 2026  23:00:32 +0000 (0:00:00.164)       0:04:42.384 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Set fact with KUBECONFIG path] ************",
                                "Monday 16 February 2026  23:00:32 +0000 (0:00:00.197)       0:04:42.582 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Set fact with KUBECONFIG path (with super-admin.conf)] ***",
                                "Monday 16 February 2026  23:00:32 +0000 (0:00:00.029)       0:04:42.612 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Upload Kubernetes manifest] ***************",
                                "Monday 16 February 2026  23:00:32 +0000 (0:00:00.035)       0:04:42.647 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Ensure kube-vip configuration file] *******",
                                "Monday 16 February 2026  23:00:32 +0000 (0:00:00.516)       0:04:43.163 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Flush handlers] ***************************",
                                "Monday 16 February 2026  23:00:33 +0000 (0:00:00.188)       0:04:43.352 *******",
                                "",
                                "PLAY [Install Kubernetes] ******************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Monday 16 February 2026  23:00:33 +0000 (0:00:00.095)       0:04:43.447 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  23:00:34 +0000 (0:00:00.822)       0:04:44.269 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Monday 16 February 2026  23:00:34 +0000 (0:00:00.205)       0:04:44.475 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***",
                                "Monday 16 February 2026  23:00:34 +0000 (0:00:00.040)       0:04:44.515 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:00:34 +0000 (0:00:00.199)       0:04:44.715 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/opencontainers/runc/releases/download/v1.4.0/runc.amd64\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:00:34 +0000 (0:00:00.048)       0:04:44.763 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:00:34 +0000 (0:00:00.291)       0:04:45.055 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  23:00:34 +0000 (0:00:00.035)       0:04:45.091 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Monday 16 February 2026  23:00:35 +0000 (0:00:00.195)       0:04:45.286 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:00:36 +0000 (0:00:01.037)       0:04:46.324 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/containerd/containerd/releases/download/v2.2.0/containerd-2.2.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:00:36 +0000 (0:00:00.061)       0:04:46.385 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:00:36 +0000 (0:00:00.320)       0:04:46.705 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install SELinux packages] ***************",
                                "Monday 16 February 2026  23:00:38 +0000 (0:00:01.904)       0:04:48.610 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***",
                                "Monday 16 February 2026  23:00:38 +0000 (0:00:00.032)       0:04:48.642 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********",
                                "Monday 16 February 2026  23:00:38 +0000 (0:00:00.030)       0:04:48.672 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install AppArmor packages] **************",
                                "Monday 16 February 2026  23:00:38 +0000 (0:00:00.026)       0:04:48.699 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***",
                                "Monday 16 February 2026  23:00:39 +0000 (0:00:00.973)       0:04:49.672 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create folders for configuration] *******",
                                "Monday 16 February 2026  23:00:39 +0000 (0:00:00.403)       0:04:50.076 *******",
                                "ok: [instance] => (item={'path': '/etc/containerd'})",
                                "ok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})",
                                "",
                                "TASK [vexxhost.containers.containerd : Create containerd config file] **********",
                                "Monday 16 February 2026  23:00:40 +0000 (0:00:00.886)       0:04:50.962 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Force any restarts if necessary] ********",
                                "Monday 16 February 2026  23:00:41 +0000 (0:00:00.443)       0:04:51.406 *******",
                                "",
                                "TASK [vexxhost.containers.containerd : Enable and start service] ***************",
                                "Monday 16 February 2026  23:00:41 +0000 (0:00:00.006)       0:04:51.413 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes_upgrade_check : Retrieve the \"kubeadm-config\" ConfigMap] ***",
                                "Monday 16 February 2026  23:00:41 +0000 (0:00:00.332)       0:04:51.745 *******",
                                "fatal: [instance]: FAILED! => {\"changed\": false, \"msg\": \"Failed to import the required Python library (kubernetes) on instance's Python /usr/bin/python3.10. Please read the module documentation and install it in the appropriate location. If the required library is installed, but Ansible is using the wrong Python interpreter, please consult the documentation on ansible_python_interpreter\"}",
                                "...ignoring",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes_upgrade_check : Parse the ClusterConfiguration] ***",
                                "Monday 16 February 2026  23:00:42 +0000 (0:00:00.532)       0:04:52.278 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes_upgrade_check : Retrieve the current Kubernetes version] ***",
                                "Monday 16 February 2026  23:00:42 +0000 (0:00:00.032)       0:04:52.310 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes_upgrade_check : Extract major, minor, and patch versions] ***",
                                "Monday 16 February 2026  23:00:42 +0000 (0:00:00.036)       0:04:52.346 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes_upgrade_check : Fail if we're jumping more than one minor version] ***",
                                "Monday 16 February 2026  23:00:42 +0000 (0:00:00.033)       0:04:52.380 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes_upgrade_check : Set fact if we need to upgrade] ***",
                                "Monday 16 February 2026  23:00:42 +0000 (0:00:00.034)       0:04:52.415 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  23:00:42 +0000 (0:00:00.045)       0:04:52.460 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:00:42 +0000 (0:00:00.192)       0:04:52.652 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://dl.k8s.io/release/v1.28.13/bin/linux/amd64/kubeadm\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:00:42 +0000 (0:00:00.035)       0:04:52.688 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:00:43 +0000 (0:00:00.842)       0:04:53.531 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  23:00:43 +0000 (0:00:00.041)       0:04:53.572 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:00:43 +0000 (0:00:00.189)       0:04:53.762 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://dl.k8s.io/release/v1.28.13/bin/linux/amd64/kubectl\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:00:43 +0000 (0:00:00.035)       0:04:53.798 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:00:44 +0000 (0:00:00.807)       0:04:54.605 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install SELinux packages] ***************",
                                "Monday 16 February 2026  23:00:44 +0000 (0:00:00.053)       0:04:54.659 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***",
                                "Monday 16 February 2026  23:00:44 +0000 (0:00:00.028)       0:04:54.687 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********",
                                "Monday 16 February 2026  23:00:44 +0000 (0:00:00.035)       0:04:54.722 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install AppArmor packages] **************",
                                "Monday 16 February 2026  23:00:44 +0000 (0:00:00.031)       0:04:54.754 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***",
                                "Monday 16 February 2026  23:00:45 +0000 (0:00:00.934)       0:04:55.689 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create folders for configuration] *******",
                                "Monday 16 February 2026  23:00:45 +0000 (0:00:00.390)       0:04:56.079 *******",
                                "ok: [instance] => (item={'path': '/etc/containerd'})",
                                "ok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})",
                                "",
                                "TASK [vexxhost.containers.containerd : Create containerd config file] **********",
                                "Monday 16 February 2026  23:00:46 +0000 (0:00:00.854)       0:04:56.934 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Force any restarts if necessary] ********",
                                "Monday 16 February 2026  23:00:47 +0000 (0:00:00.450)       0:04:57.385 *******",
                                "",
                                "TASK [vexxhost.containers.containerd : Enable and start service] ***************",
                                "Monday 16 February 2026  23:00:47 +0000 (0:00:00.008)       0:04:57.393 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  23:00:47 +0000 (0:00:00.341)       0:04:57.734 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:00:47 +0000 (0:00:00.197)       0:04:57.932 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/kubernetes-sigs/cri-tools/releases/download/v1.34.0/crictl-v1.34.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:00:47 +0000 (0:00:00.043)       0:04:57.976 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:00:48 +0000 (0:00:00.602)       0:04:58.579 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:00:49 +0000 (0:00:01.333)       0:04:59.912 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/kubernetes-sigs/cri-tools/releases/download/v1.34.0/critest-v1.34.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:00:49 +0000 (0:00:00.048)       0:04:59.961 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:00:50 +0000 (0:00:00.589)       0:05:00.550 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.cri_tools : Create crictl config] ********************",
                                "Monday 16 February 2026  23:00:51 +0000 (0:00:01.349)       0:05:01.900 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/opt/cni/bin)] *********",
                                "Monday 16 February 2026  23:00:52 +0000 (0:00:00.412)       0:05:02.312 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  23:00:52 +0000 (0:00:00.188)       0:05:02.501 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:00:52 +0000 (0:00:00.194)       0:05:02.695 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/containernetworking/plugins/releases/download/v1.8.0/cni-plugins-linux-amd64-v1.8.0.tgz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:00:52 +0000 (0:00:00.054)       0:05:02.750 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:00:53 +0000 (0:00:00.814)       0:05:03.564 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.cni_plugins : Gather variables for each operating system] ***",
                                "Monday 16 February 2026  23:00:56 +0000 (0:00:02.633)       0:05:06.198 *******",
                                "ok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/containers/roles/cni_plugins/vars/debian.yml)",
                                "",
                                "TASK [vexxhost.containers.cni_plugins : Install additional packages] ***********",
                                "Monday 16 February 2026  23:00:56 +0000 (0:00:00.045)       0:05:06.243 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.cni_plugins : Ensure IPv6 is enabled] ****************",
                                "Monday 16 February 2026  23:00:58 +0000 (0:00:02.873)       0:05:09.117 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.cni_plugins : Enable kernel modules on-boot] *********",
                                "Monday 16 February 2026  23:00:59 +0000 (0:00:00.197)       0:05:09.314 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.cni_plugins : Enable kernel modules in runtime] ******",
                                "Monday 16 February 2026  23:00:59 +0000 (0:00:00.415)       0:05:09.729 *******",
                                "changed: [instance] => (item=br_netfilter)",
                                "ok: [instance] => (item=ip_tables)",
                                "changed: [instance] => (item=ip6_tables)",
                                "changed: [instance] => (item=nf_conntrack)",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  23:01:00 +0000 (0:00:00.778)       0:05:10.508 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:01:00 +0000 (0:00:00.188)       0:05:10.697 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://dl.k8s.io/release/v1.28.13/bin/linux/amd64/kubelet\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:01:00 +0000 (0:00:00.041)       0:05:10.738 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:01:02 +0000 (0:00:01.560)       0:05:12.299 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Gather variables for each operating system] ***",
                                "Monday 16 February 2026  23:01:02 +0000 (0:00:00.038)       0:05:12.337 *******",
                                "ok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/kubelet/vars/debian.yml)",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Install coreutils] *************************",
                                "Monday 16 February 2026  23:01:02 +0000 (0:00:00.051)       0:05:12.389 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Install additional packages] ***************",
                                "Monday 16 February 2026  23:01:02 +0000 (0:00:00.031)       0:05:12.420 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Configure sysctl values] *******************",
                                "Monday 16 February 2026  23:01:05 +0000 (0:00:03.543)       0:05:15.964 *******",
                                "changed: [instance] => (item={'name': 'net.ipv4.ip_forward', 'value': 1})",
                                "changed: [instance] => (item={'name': 'net.bridge.bridge-nf-call-iptables', 'value': 1})",
                                "changed: [instance] => (item={'name': 'net.bridge.bridge-nf-call-ip6tables', 'value': 1})",
                                "changed: [instance] => (item={'name': 'net.ipv4.conf.all.rp_filter', 'value': 0})",
                                "changed: [instance] => (item={'name': 'fs.inotify.max_queued_events', 'value': 1048576})",
                                "changed: [instance] => (item={'name': 'fs.inotify.max_user_instances', 'value': 8192})",
                                "changed: [instance] => (item={'name': 'fs.inotify.max_user_watches', 'value': 1048576})",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Create folders for kubernetes configuration] ***",
                                "Monday 16 February 2026  23:01:06 +0000 (0:00:01.190)       0:05:17.154 *******",
                                "changed: [instance] => (item=/etc/systemd/system/kubelet.service.d)",
                                "ok: [instance] => (item=/etc/kubernetes)",
                                "ok: [instance] => (item=/etc/kubernetes/manifests)",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Add kubelet systemd service config] ********",
                                "Monday 16 February 2026  23:01:07 +0000 (0:00:00.523)       0:05:17.678 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Add kubeadm dropin for kubelet systemd service config] ***",
                                "Monday 16 February 2026  23:01:07 +0000 (0:00:00.401)       0:05:18.080 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Check swap status] *************************",
                                "Monday 16 February 2026  23:01:08 +0000 (0:00:00.400)       0:05:18.480 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Disable swap] ******************************",
                                "Monday 16 February 2026  23:01:08 +0000 (0:00:00.197)       0:05:18.678 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Remove swapfile from /etc/fstab] ***********",
                                "Monday 16 February 2026  23:01:08 +0000 (0:00:00.031)       0:05:18.710 *******",
                                "ok: [instance] => (item=swap)",
                                "ok: [instance] => (item=none)",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Create noswap systemd service config file] ***",
                                "Monday 16 February 2026  23:01:08 +0000 (0:00:00.455)       0:05:19.165 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Enable noswap service] *********************",
                                "Monday 16 February 2026  23:01:09 +0000 (0:00:00.399)       0:05:19.565 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Force any restarts if necessary] ***********",
                                "Monday 16 February 2026  23:01:09 +0000 (0:00:00.586)       0:05:20.152 *******",
                                "",
                                "RUNNING HANDLER [vexxhost.kubernetes.kubelet : Reload systemd] *****************",
                                "Monday 16 February 2026  23:01:09 +0000 (0:00:00.008)       0:05:20.160 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Enable and start kubelet service] **********",
                                "Monday 16 February 2026  23:01:10 +0000 (0:00:00.788)       0:05:20.948 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Ensure availability of dbus on Debian] *****",
                                "Monday 16 February 2026  23:01:11 +0000 (0:00:00.619)       0:05:21.568 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Configure short hostname] ******************",
                                "Monday 16 February 2026  23:01:12 +0000 (0:00:00.997)       0:05:22.565 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Ensure hostname inside hosts file] *********",
                                "Monday 16 February 2026  23:01:13 +0000 (0:00:00.737)       0:05:23.303 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Remove kubernetes repository] ***********",
                                "Monday 16 February 2026  23:01:13 +0000 (0:00:00.187)       0:05:23.491 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Setup control plane] ********************",
                                "Monday 16 February 2026  23:01:13 +0000 (0:00:00.443)       0:05:23.934 *******",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/kubernetes/tasks/control-plane.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Bootstrap cluster] **********************",
                                "Monday 16 February 2026  23:01:13 +0000 (0:00:00.071)       0:05:24.006 *******",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/kubernetes/tasks/bootstrap-cluster.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Check if any control plane is bootstrapped] ***",
                                "Monday 16 February 2026  23:01:13 +0000 (0:00:00.074)       0:05:24.081 *******",
                                "ok: [instance] => (item=instance)",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Pick node from pre-existing cluster] ****",
                                "Monday 16 February 2026  23:01:14 +0000 (0:00:00.187)       0:05:24.268 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Select first node to initialize cluster] ***",
                                "Monday 16 February 2026  23:01:14 +0000 (0:00:00.041)       0:05:24.309 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Print selected bootstrap node] **********",
                                "Monday 16 February 2026  23:01:14 +0000 (0:00:00.049)       0:05:24.359 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"instance\"",
                                "}",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Upload cluster configuration for bootstrap node] ***",
                                "Monday 16 February 2026  23:01:14 +0000 (0:00:00.038)       0:05:24.397 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create pki folder] **********************",
                                "Monday 16 February 2026  23:01:14 +0000 (0:00:00.488)       0:05:24.885 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create kubernetes ca key] ***************",
                                "Monday 16 February 2026  23:01:14 +0000 (0:00:00.029)       0:05:24.915 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create kubernetes ca cert] **************",
                                "Monday 16 February 2026  23:01:14 +0000 (0:00:00.029)       0:05:24.944 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create etcd-ca key] *********************",
                                "Monday 16 February 2026  23:01:14 +0000 (0:00:00.033)       0:05:24.978 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create etcd-ca cert] ********************",
                                "Monday 16 February 2026  23:01:14 +0000 (0:00:00.030)       0:05:25.008 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create front-proxy-ca key] **************",
                                "Monday 16 February 2026  23:01:14 +0000 (0:00:00.031)       0:05:25.040 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create front-proxy-ca cert] *************",
                                "Monday 16 February 2026  23:01:14 +0000 (0:00:00.030)       0:05:25.070 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Initialize cluster] *********************",
                                "Monday 16 February 2026  23:01:14 +0000 (0:00:00.031)       0:05:25.101 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Check if the node is already part of the cluster] ***",
                                "Monday 16 February 2026  23:01:40 +0000 (0:00:25.460)       0:05:50.562 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Join cluster] ***************************",
                                "Monday 16 February 2026  23:01:40 +0000 (0:00:00.217)       0:05:50.779 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create folder for admin configuration] ***",
                                "Monday 16 February 2026  23:01:40 +0000 (0:00:00.033)       0:05:50.812 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Creating a symlink for admin configuration file] ***",
                                "Monday 16 February 2026  23:01:40 +0000 (0:00:00.192)       0:05:51.005 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Add bash autocomplete for kubectl] ******",
                                "Monday 16 February 2026  23:01:41 +0000 (0:00:00.204)       0:05:51.210 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Install PIP] ****************************",
                                "Monday 16 February 2026  23:01:41 +0000 (0:00:00.228)       0:05:51.438 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Install Kubernetes Python package using pip on supported systems] ***",
                                "Monday 16 February 2026  23:01:45 +0000 (0:00:04.019)       0:05:55.458 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Enable EPEL repository] *****************",
                                "Monday 16 February 2026  23:01:48 +0000 (0:00:03.229)       0:05:58.688 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Install Kubernetes Python package using package manager on supported systems] ***",
                                "Monday 16 February 2026  23:01:48 +0000 (0:00:00.035)       0:05:58.723 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Allow workload on control plane node] ***",
                                "Monday 16 February 2026  23:01:48 +0000 (0:00:00.037)       0:05:58.761 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Remove kube-proxy resources] ************",
                                "Monday 16 February 2026  23:01:49 +0000 (0:00:00.726)       0:05:59.487 *******",
                                "skipping: [instance] => (item=DaemonSet)",
                                "skipping: [instance] => (item=ConfigMap)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Start an upgrade] ***********************",
                                "Monday 16 February 2026  23:01:49 +0000 (0:00:00.036)       0:05:59.523 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Check if the Kubernetes API services is up to date] ***",
                                "Monday 16 February 2026  23:01:49 +0000 (0:00:00.026)       0:05:59.550 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Trigger an upgrade of the Kubernetes API services] ***",
                                "Monday 16 February 2026  23:01:49 +0000 (0:00:00.037)       0:05:59.588 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Setup nodes] ****************************",
                                "Monday 16 February 2026  23:01:49 +0000 (0:00:00.029)       0:05:59.617 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Upgrade & restart Kubelet node for upgrade (if needed)] ***",
                                "Monday 16 February 2026  23:01:49 +0000 (0:00:00.027)       0:05:59.644 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Set node selector for CoreDNS components] ***",
                                "Monday 16 February 2026  23:01:49 +0000 (0:00:00.383)       0:06:00.028 *******",
                                "changed: [instance]",
                                "",
                                "PLAY [Install control-plane components] ****************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Monday 16 February 2026  23:01:50 +0000 (0:00:01.010)       0:06:01.039 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  23:01:51 +0000 (0:00:00.884)       0:06:01.923 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Monday 16 February 2026  23:01:51 +0000 (0:00:00.197)       0:06:02.120 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***",
                                "Monday 16 February 2026  23:01:52 +0000 (0:00:01.051)       0:06:03.172 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:01:53 +0000 (0:00:00.216)       0:06:03.389 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://get.helm.sh/helm-v3.11.2-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:01:53 +0000 (0:00:00.046)       0:06:03.435 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:01:53 +0000 (0:00:00.539)       0:06:03.974 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.helm : Remove Helm repository] ***********************",
                                "Monday 16 February 2026  23:01:55 +0000 (0:00:01.493)       0:06:05.467 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.helm : Add bash autocomplete for helm] ***************",
                                "Monday 16 February 2026  23:01:55 +0000 (0:00:00.363)       0:06:05.831 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.helm : Get Helm plugins dir] *************************",
                                "Monday 16 February 2026  23:01:55 +0000 (0:00:00.171)       0:06:06.003 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.helm : Create Helm plugins directory if it does not exist] ***",
                                "Monday 16 February 2026  23:01:56 +0000 (0:00:00.232)       0:06:06.236 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.helm : Remove Helm diff plugin installed by kubernetes.core.helm_plugin] ***",
                                "Monday 16 February 2026  23:01:56 +0000 (0:00:00.195)       0:06:06.431 *******",
                                "ok: [instance]",
                                "",
                                "TASK [Install plugin] **********************************************************",
                                "Monday 16 February 2026  23:01:56 +0000 (0:00:00.192)       0:06:06.624 *******",
                                "included: vexxhost.containers.download_artifact for instance",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:01:56 +0000 (0:00:00.045)       0:06:06.669 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/databus23/helm-diff/releases/download/v3.8.1/helm-diff-linux-amd64.tgz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:01:56 +0000 (0:00:00.040)       0:06:06.710 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:01:57 +0000 (0:00:00.684)       0:06:07.394 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Monday 16 February 2026  23:01:59 +0000 (0:00:01.851)       0:06:09.246 *******",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Monday 16 February 2026  23:01:59 +0000 (0:00:00.036)       0:06:09.282 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cilium : Get current Kubernetes version] *************",
                                "Monday 16 February 2026  23:01:59 +0000 (0:00:00.558)       0:06:09.840 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cilium : Deploy Helm chart] **************************",
                                "Monday 16 February 2026  23:02:00 +0000 (0:00:00.750)       0:06:10.590 *******",
                                "changed: [instance]",
                                "",
                                "PLAY [all] *********************************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Monday 16 February 2026  23:02:02 +0000 (0:00:02.012)       0:06:12.603 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kubernetes_node_labels : Add labels to node] *********",
                                "Monday 16 February 2026  23:02:03 +0000 (0:00:00.893)       0:06:13.497 *******",
                                "changed: [instance]",
                                "",
                                "PLAY [all] *********************************************************************",
                                "",
                                "TASK [Uninstall unattended-upgrades] *******************************************",
                                "Monday 16 February 2026  23:02:04 +0000 (0:00:00.915)       0:06:14.412 *******",
                                "ok: [instance]",
                                "",
                                "PLAY [controllers] *************************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Monday 16 February 2026  23:02:04 +0000 (0:00:00.760)       0:06:15.172 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Monday 16 February 2026  23:02:06 +0000 (0:00:01.040)       0:06:16.213 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.local_path_provisioner : Deploy Helm chart] **********",
                                "Monday 16 February 2026  23:02:06 +0000 (0:00:00.026)       0:06:16.240 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Monday 16 February 2026  23:02:06 +0000 (0:00:00.030)       0:06:16.270 *******",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Monday 16 February 2026  23:02:06 +0000 (0:00:00.033)       0:06:16.304 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Collect \"ceph mon dump\" output from a monitor] ***",
                                "Monday 16 February 2026  23:02:06 +0000 (0:00:00.426)       0:06:16.731 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Generate fact with list of Ceph monitors] ***",
                                "Monday 16 February 2026  23:02:21 +0000 (0:00:14.752)       0:06:31.484 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Create Ceph pool] *********************",
                                "Monday 16 February 2026  23:02:21 +0000 (0:00:00.074)       0:06:31.559 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Create user client.kube] **************",
                                "Monday 16 February 2026  23:02:55 +0000 (0:00:34.410)       0:07:05.969 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Retrieve keyring for client.kube] *****",
                                "Monday 16 February 2026  23:02:58 +0000 (0:00:02.521)       0:07:08.490 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Store keyring inside fact] ************",
                                "Monday 16 February 2026  23:02:59 +0000 (0:00:01.155)       0:07:09.646 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Deploy Helm chart] ********************",
                                "Monday 16 February 2026  23:02:59 +0000 (0:00:00.037)       0:07:09.684 *******",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.powerstore_csi : Clone PowerStore CSI from GitHub] ***",
                                "Monday 16 February 2026  23:03:01 +0000 (0:00:01.785)       0:07:11.469 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.powerstore_csi : Create Secret] **********************",
                                "Monday 16 February 2026  23:03:01 +0000 (0:00:00.031)       0:07:11.501 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.powerstore_csi : Create StorageClass] ****************",
                                "Monday 16 February 2026  23:03:01 +0000 (0:00:00.036)       0:07:11.538 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.powerstore_csi : Deploy Helm chart] ******************",
                                "Monday 16 February 2026  23:03:01 +0000 (0:00:00.037)       0:07:11.575 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.multipathd : Add backports PPA] **********************",
                                "Monday 16 February 2026  23:03:01 +0000 (0:00:00.056)       0:07:11.632 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.multipathd : Install the multipathd package] *********",
                                "Monday 16 February 2026  23:03:01 +0000 (0:00:00.022)       0:07:11.655 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.multipathd : Install the configuration file] *********",
                                "Monday 16 February 2026  23:03:01 +0000 (0:00:00.024)       0:07:11.679 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Detect if InitiatorName is set] ***********",
                                "Monday 16 February 2026  23:03:01 +0000 (0:00:00.024)       0:07:11.704 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Generate a new InitiatorName] *************",
                                "Monday 16 February 2026  23:03:01 +0000 (0:00:00.023)       0:07:11.727 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Write the new InitiatorName] **************",
                                "Monday 16 February 2026  23:03:01 +0000 (0:00:00.030)       0:07:11.758 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Create namespace] *************************",
                                "Monday 16 February 2026  23:03:01 +0000 (0:00:00.025)       0:07:11.783 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Install Portworx] *************************",
                                "Monday 16 February 2026  23:03:01 +0000 (0:00:00.030)       0:07:11.814 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Wait till the CRDs are created] ***********",
                                "Monday 16 February 2026  23:03:01 +0000 (0:00:00.027)       0:07:11.841 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Create Portworx Storage Cluster] **********",
                                "Monday 16 February 2026  23:03:01 +0000 (0:00:00.024)       0:07:11.865 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.storpool_csi : Deploy CSI RBAC] **********************",
                                "Monday 16 February 2026  23:03:01 +0000 (0:00:00.037)       0:07:11.903 *******",
                                "skipping: [instance] => (item={'name': 'controllerplugin'})",
                                "skipping: [instance] => (item={'name': 'nodeplugin'})",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.storpool_csi : Deploy CSI] ***************************",
                                "Monday 16 February 2026  23:03:01 +0000 (0:00:00.038)       0:07:11.941 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.storpool_csi : Create StorageClass] ******************",
                                "Monday 16 February 2026  23:03:01 +0000 (0:00:00.029)       0:07:11.970 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ibm_block_csi_driver : Deploy CSI] *******************",
                                "Monday 16 February 2026  23:03:01 +0000 (0:00:00.041)       0:07:12.012 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ibm_block_csi_driver : Create Secret] ****************",
                                "Monday 16 February 2026  23:03:01 +0000 (0:00:00.029)       0:07:12.042 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ibm_block_csi_driver : Create StorageClass] **********",
                                "Monday 16 February 2026  23:03:01 +0000 (0:00:00.030)       0:07:12.073 *******",
                                "skipping: [instance]",
                                "",
                                "PLAY RECAP *********************************************************************",
                                "instance                   : ok=244  changed=76   unreachable=0    failed=0    skipped=83   rescued=0    ignored=2",
                                "",
                                "Monday 16 February 2026  23:03:01 +0000 (0:00:00.033)       0:07:12.106 *******",
                                "===============================================================================",
                                "vexxhost.ceph.mon : Run Bootstrap coomand ------------------------------ 84.77s",
                                "vexxhost.ceph.osd : Install OSDs --------------------------------------- 80.82s",
                                "vexxhost.atmosphere.ceph_csi_rbd : Create Ceph pool -------------------- 34.41s",
                                "vexxhost.kubernetes.kubernetes : Initialize cluster -------------------- 25.46s",
                                "vexxhost.atmosphere.ceph_csi_rbd : Collect \"ceph mon dump\" output from a monitor -- 14.75s",
                                "vexxhost.ceph.mon : Validate monitor exist ------------------------------ 9.98s",
                                "vexxhost.ceph.osd : Get `ceph-volume lvm list` status ------------------- 9.91s",
                                "vexxhost.ceph.osd : Ensure all OSDs are non-legacy ---------------------- 5.29s",
                                "vexxhost.ceph.osd : Get `cephadm ls` status ----------------------------- 5.27s",
                                "vexxhost.ceph.cephadm : Install packages -------------------------------- 5.13s",
                                "vexxhost.containers.containerd : Install AppArmor packages -------------- 4.64s",
                                "vexxhost.containers.download_artifact : Extract archive ----------------- 4.27s",
                                "vexxhost.kubernetes.kubernetes : Install PIP ---------------------------- 4.02s",
                                "vexxhost.atmosphere.sysctl : Configure sysctl values -------------------- 3.61s",
                                "vexxhost.kubernetes.kubelet : Install additional packages --------------- 3.54s",
                                "vexxhost.kubernetes.kubernetes : Install Kubernetes Python package using pip on supported systems --- 3.23s",
                                "vexxhost.containers.download_artifact : Extract archive ----------------- 3.01s",
                                "vexxhost.containers.cni_plugins : Install additional packages ----------- 2.87s",
                                "vexxhost.containers.download_artifact : Extract archive ----------------- 2.78s",
                                "vexxhost.containers.download_artifact : Extract archive ----------------- 2.63s",
                                "INFO     [csi > converge] Executed: Successful",
                                "INFO     [csi > idempotence] Executing",
                                "",
                                "PLAY [Debug CSI driver value] **************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Monday 16 February 2026  23:03:03 +0000 (0:00:00.044)       0:00:00.044 *******",
                                "[WARNING]: Platform linux on host instance is using the discovered Python",
                                "interpreter at /usr/bin/python3.10, but future installation of another Python",
                                "interpreter could change the meaning of that path. See",
                                "https://docs.ansible.com/ansible-",
                                "core/2.17/reference_appendices/interpreter_discovery.html for more information.",
                                "ok: [instance]",
                                "",
                                "TASK [Display CSI driver value and environment variable] ***********************",
                                "Monday 16 February 2026  23:03:04 +0000 (0:00:01.264)       0:00:01.308 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"csi_driver=rbd, MOLECULE_CSI_DRIVER=\"",
                                "}",
                                "",
                                "PLAY [all] *********************************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Monday 16 February 2026  23:03:05 +0000 (0:00:00.048)       0:00:01.356 *******",
                                "ok: [instance]",
                                "",
                                "TASK [Set a fact with the \"atmosphere_images\" for other plays] *****************",
                                "Monday 16 February 2026  23:03:06 +0000 (0:00:00.993)       0:00:02.350 *******",
                                "ok: [instance]",
                                "",
                                "PLAY [Deploy Ceph monitors & managers] *****************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Monday 16 February 2026  23:03:06 +0000 (0:00:00.208)       0:00:02.559 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  23:03:07 +0000 (0:00:00.974)       0:00:03.533 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Monday 16 February 2026  23:03:07 +0000 (0:00:00.318)       0:00:03.851 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***",
                                "Monday 16 February 2026  23:03:07 +0000 (0:00:00.042)       0:00:03.893 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:03:07 +0000 (0:00:00.302)       0:00:04.196 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/opencontainers/runc/releases/download/v1.4.0/runc.amd64\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:03:07 +0000 (0:00:00.063)       0:00:04.260 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:03:08 +0000 (0:00:00.509)       0:00:04.769 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Monday 16 February 2026  23:03:08 +0000 (0:00:00.054)       0:00:04.824 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  23:03:08 +0000 (0:00:00.054)       0:00:04.878 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Monday 16 February 2026  23:03:08 +0000 (0:00:00.240)       0:00:05.119 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:03:10 +0000 (0:00:01.230)       0:00:06.350 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/containerd/containerd/releases/download/v2.2.0/containerd-2.2.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:03:10 +0000 (0:00:00.064)       0:00:06.415 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:03:10 +0000 (0:00:00.421)       0:00:06.836 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install SELinux packages] ***************",
                                "Monday 16 February 2026  23:03:12 +0000 (0:00:02.497)       0:00:09.334 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***",
                                "Monday 16 February 2026  23:03:13 +0000 (0:00:00.033)       0:00:09.368 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********",
                                "Monday 16 February 2026  23:03:13 +0000 (0:00:00.032)       0:00:09.400 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install AppArmor packages] **************",
                                "Monday 16 February 2026  23:03:13 +0000 (0:00:00.033)       0:00:09.434 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***",
                                "Monday 16 February 2026  23:03:14 +0000 (0:00:01.012)       0:00:10.446 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create folders for configuration] *******",
                                "Monday 16 February 2026  23:03:14 +0000 (0:00:00.468)       0:00:10.915 *******",
                                "ok: [instance] => (item={'path': '/etc/containerd'})",
                                "ok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})",
                                "",
                                "TASK [vexxhost.containers.containerd : Create containerd config file] **********",
                                "Monday 16 February 2026  23:03:15 +0000 (0:00:00.935)       0:00:11.850 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Force any restarts if necessary] ********",
                                "Monday 16 February 2026  23:03:16 +0000 (0:00:00.500)       0:00:12.351 *******",
                                "",
                                "TASK [vexxhost.containers.containerd : Enable and start service] ***************",
                                "Monday 16 February 2026  23:03:16 +0000 (0:00:00.021)       0:00:12.372 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  23:03:16 +0000 (0:00:00.610)       0:00:12.983 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:03:16 +0000 (0:00:00.200)       0:00:13.183 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://download.docker.com/linux/static/stable/x86_64/docker-24.0.9.tgz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:03:16 +0000 (0:00:00.077)       0:00:13.261 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:03:17 +0000 (0:00:01.054)       0:00:14.315 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Install AppArmor packages] ******************",
                                "Monday 16 February 2026  23:03:21 +0000 (0:00:03.796)       0:00:18.112 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Ensure group \"docker\" exists] ***************",
                                "Monday 16 February 2026  23:03:22 +0000 (0:00:01.184)       0:00:19.296 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Create systemd service file for docker] *****",
                                "Monday 16 February 2026  23:03:23 +0000 (0:00:00.356)       0:00:19.653 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Create folders for configuration] ***********",
                                "Monday 16 February 2026  23:03:23 +0000 (0:00:00.425)       0:00:20.078 *******",
                                "ok: [instance] => (item={'path': '/etc/docker'})",
                                "ok: [instance] => (item={'path': '/var/lib/docker', 'mode': '0o710'})",
                                "ok: [instance] => (item={'path': '/run/docker', 'mode': '0o711'})",
                                "",
                                "TASK [vexxhost.containers.docker : Create systemd socket file for docker] ******",
                                "Monday 16 February 2026  23:03:24 +0000 (0:00:00.613)       0:00:20.692 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Create docker daemon config file] ***********",
                                "Monday 16 February 2026  23:03:24 +0000 (0:00:00.461)       0:00:21.153 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Force any restarts if necessary] ************",
                                "Monday 16 February 2026  23:03:25 +0000 (0:00:00.468)       0:00:21.622 *******",
                                "",
                                "TASK [vexxhost.containers.docker : Enable and start service] *******************",
                                "Monday 16 February 2026  23:03:25 +0000 (0:00:00.030)       0:00:21.653 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Gather variables for each operating system] ******",
                                "Monday 16 February 2026  23:03:25 +0000 (0:00:00.397)       0:00:22.050 *******",
                                "ok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/cephadm/vars/ubuntu-22.04.yml)",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Install packages] ********************************",
                                "Monday 16 February 2026  23:03:25 +0000 (0:00:00.049)       0:00:22.100 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Ensure services are started] *********************",
                                "Monday 16 February 2026  23:03:26 +0000 (0:00:01.060)       0:00:23.161 *******",
                                "ok: [instance] => (item=chronyd)",
                                "ok: [instance] => (item=sshd)",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Download \"cephadm\"] ******************************",
                                "Monday 16 February 2026  23:03:27 +0000 (0:00:00.818)       0:00:23.979 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Remove cephadm from old path] ********************",
                                "Monday 16 February 2026  23:03:27 +0000 (0:00:00.313)       0:00:24.293 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Ensure \"cephadm\" user is present] ****************",
                                "Monday 16 February 2026  23:03:28 +0000 (0:00:00.247)       0:00:24.540 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Allow \"cephadm\" user to have passwordless sudo] ***",
                                "Monday 16 February 2026  23:03:28 +0000 (0:00:00.410)       0:00:24.951 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Get `cephadm ls` status] *****************************",
                                "Monday 16 February 2026  23:03:28 +0000 (0:00:00.378)       0:00:25.330 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Parse the `cephadm ls` output] ***********************",
                                "Monday 16 February 2026  23:03:34 +0000 (0:00:05.677)       0:00:31.007 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Assimilate existing configs in `ceph.conf`] **********",
                                "Monday 16 February 2026  23:03:34 +0000 (0:00:00.058)       0:00:31.066 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Adopt monitor to cluster] ****************************",
                                "Monday 16 February 2026  23:03:34 +0000 (0:00:00.038)       0:00:31.105 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Adopt manager to cluster] ****************************",
                                "Monday 16 February 2026  23:03:34 +0000 (0:00:00.042)       0:00:31.148 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Enable \"cephadm\" mgr module] *************************",
                                "Monday 16 February 2026  23:03:34 +0000 (0:00:00.030)       0:00:31.179 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Set orchestrator backend to \"cephadm\"] ***************",
                                "Monday 16 February 2026  23:03:34 +0000 (0:00:00.037)       0:00:31.216 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Use `cephadm` user for cephadm] **********************",
                                "Monday 16 February 2026  23:03:34 +0000 (0:00:00.039)       0:00:31.255 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Generate \"cephadm\" key] ******************************",
                                "Monday 16 February 2026  23:03:34 +0000 (0:00:00.034)       0:00:31.290 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Set Ceph Monitor IP address] *************************",
                                "Monday 16 February 2026  23:03:34 +0000 (0:00:00.037)       0:00:31.328 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Check if any node is bootstrapped] *******************",
                                "Monday 16 February 2026  23:03:35 +0000 (0:00:00.080)       0:00:31.408 *******",
                                "ok: [instance] => (item=instance)",
                                "",
                                "TASK [vexxhost.ceph.mon : Select pre-existing bootstrap node if exists] ********",
                                "Monday 16 February 2026  23:03:35 +0000 (0:00:00.271)       0:00:31.680 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Bootstrap cluster] ***********************************",
                                "Monday 16 February 2026  23:03:35 +0000 (0:00:00.048)       0:00:31.729 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [Install Ceph host] *******************************************************",
                                "Monday 16 February 2026  23:03:35 +0000 (0:00:00.045)       0:00:31.775 *******",
                                "included: vexxhost.ceph.cephadm_host for instance",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Get public SSH key for \"cephadm\" user] ******",
                                "Monday 16 February 2026  23:03:35 +0000 (0:00:00.082)       0:00:31.858 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Set fact with public SSH key for \"cephadm\" user] ***",
                                "Monday 16 February 2026  23:03:37 +0000 (0:00:01.706)       0:00:33.564 *******",
                                "ok: [instance] => (item=instance)",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Set authorized key for \"cephadm\"] ***********",
                                "Monday 16 February 2026  23:03:37 +0000 (0:00:00.065)       0:00:33.630 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Add new host to Ceph] ***********************",
                                "Monday 16 February 2026  23:03:37 +0000 (0:00:00.404)       0:00:34.035 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Configure \"mon\" label for monitors] ******************",
                                "Monday 16 February 2026  23:03:39 +0000 (0:00:02.000)       0:00:36.035 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Validate monitor exist] ******************************",
                                "Monday 16 February 2026  23:03:41 +0000 (0:00:01.620)       0:00:37.656 *******",
                                "ok: [instance]",
                                "",
                                "TASK [Install Ceph host] *******************************************************",
                                "Monday 16 February 2026  23:03:51 +0000 (0:00:10.205)       0:00:47.861 *******",
                                "included: vexxhost.ceph.cephadm_host for instance",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Get public SSH key for \"cephadm\" user] ******",
                                "Monday 16 February 2026  23:03:51 +0000 (0:00:00.085)       0:00:47.946 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Set fact with public SSH key for \"cephadm\" user] ***",
                                "Monday 16 February 2026  23:03:51 +0000 (0:00:00.051)       0:00:47.998 *******",
                                "skipping: [instance] => (item=instance)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Set authorized key for \"cephadm\"] ***********",
                                "Monday 16 February 2026  23:03:51 +0000 (0:00:00.058)       0:00:48.057 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Add new host to Ceph] ***********************",
                                "Monday 16 February 2026  23:03:52 +0000 (0:00:00.284)       0:00:48.341 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mgr : Configure \"mgr\" label for managers] ******************",
                                "Monday 16 February 2026  23:03:54 +0000 (0:00:02.058)       0:00:50.399 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mgr : Validate manager exist] ******************************",
                                "Monday 16 February 2026  23:03:55 +0000 (0:00:01.636)       0:00:52.035 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mgr : Enable the Ceph Manager prometheus module] ***********",
                                "Monday 16 February 2026  23:03:57 +0000 (0:00:01.456)       0:00:53.492 *******",
                                "ok: [instance]",
                                "",
                                "PLAY [Deploy Ceph OSDs] ********************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Monday 16 February 2026  23:03:59 +0000 (0:00:01.959)       0:00:55.452 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  23:04:00 +0000 (0:00:01.075)       0:00:56.527 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Monday 16 February 2026  23:04:00 +0000 (0:00:00.232)       0:00:56.760 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***",
                                "Monday 16 February 2026  23:04:00 +0000 (0:00:00.046)       0:00:56.806 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:04:00 +0000 (0:00:00.217)       0:00:57.024 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/opencontainers/runc/releases/download/v1.4.0/runc.amd64\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:04:00 +0000 (0:00:00.066)       0:00:57.090 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:04:01 +0000 (0:00:00.323)       0:00:57.414 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Monday 16 February 2026  23:04:01 +0000 (0:00:00.050)       0:00:57.464 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  23:04:01 +0000 (0:00:00.050)       0:00:57.515 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Monday 16 February 2026  23:04:01 +0000 (0:00:00.221)       0:00:57.737 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:04:02 +0000 (0:00:01.071)       0:00:58.808 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/containerd/containerd/releases/download/v2.2.0/containerd-2.2.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:04:02 +0000 (0:00:00.079)       0:00:58.888 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:04:02 +0000 (0:00:00.356)       0:00:59.244 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install SELinux packages] ***************",
                                "Monday 16 February 2026  23:04:05 +0000 (0:00:02.095)       0:01:01.339 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***",
                                "Monday 16 February 2026  23:04:05 +0000 (0:00:00.032)       0:01:01.371 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********",
                                "Monday 16 February 2026  23:04:05 +0000 (0:00:00.038)       0:01:01.410 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install AppArmor packages] **************",
                                "Monday 16 February 2026  23:04:05 +0000 (0:00:00.030)       0:01:01.441 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***",
                                "Monday 16 February 2026  23:04:06 +0000 (0:00:01.081)       0:01:02.522 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create folders for configuration] *******",
                                "Monday 16 February 2026  23:04:06 +0000 (0:00:00.445)       0:01:02.968 *******",
                                "ok: [instance] => (item={'path': '/etc/containerd'})",
                                "ok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})",
                                "",
                                "TASK [vexxhost.containers.containerd : Create containerd config file] **********",
                                "Monday 16 February 2026  23:04:07 +0000 (0:00:00.930)       0:01:03.898 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Force any restarts if necessary] ********",
                                "Monday 16 February 2026  23:04:08 +0000 (0:00:00.465)       0:01:04.363 *******",
                                "",
                                "TASK [vexxhost.containers.containerd : Enable and start service] ***************",
                                "Monday 16 February 2026  23:04:08 +0000 (0:00:00.021)       0:01:04.385 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  23:04:08 +0000 (0:00:00.389)       0:01:04.774 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:04:08 +0000 (0:00:00.228)       0:01:05.002 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://download.docker.com/linux/static/stable/x86_64/docker-24.0.9.tgz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:04:08 +0000 (0:00:00.049)       0:01:05.052 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:04:09 +0000 (0:00:00.370)       0:01:05.422 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Install AppArmor packages] ******************",
                                "Monday 16 February 2026  23:04:12 +0000 (0:00:03.315)       0:01:08.738 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Ensure group \"docker\" exists] ***************",
                                "Monday 16 February 2026  23:04:13 +0000 (0:00:01.065)       0:01:09.803 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Create systemd service file for docker] *****",
                                "Monday 16 February 2026  23:04:13 +0000 (0:00:00.196)       0:01:10.000 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Create folders for configuration] ***********",
                                "Monday 16 February 2026  23:04:14 +0000 (0:00:00.403)       0:01:10.404 *******",
                                "ok: [instance] => (item={'path': '/etc/docker'})",
                                "ok: [instance] => (item={'path': '/var/lib/docker', 'mode': '0o710'})",
                                "ok: [instance] => (item={'path': '/run/docker', 'mode': '0o711'})",
                                "",
                                "TASK [vexxhost.containers.docker : Create systemd socket file for docker] ******",
                                "Monday 16 February 2026  23:04:14 +0000 (0:00:00.557)       0:01:10.962 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Create docker daemon config file] ***********",
                                "Monday 16 February 2026  23:04:15 +0000 (0:00:00.411)       0:01:11.373 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Force any restarts if necessary] ************",
                                "Monday 16 February 2026  23:04:15 +0000 (0:00:00.410)       0:01:11.784 *******",
                                "",
                                "TASK [vexxhost.containers.docker : Enable and start service] *******************",
                                "Monday 16 February 2026  23:04:15 +0000 (0:00:00.021)       0:01:11.805 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Gather variables for each operating system] ******",
                                "Monday 16 February 2026  23:04:15 +0000 (0:00:00.381)       0:01:12.186 *******",
                                "ok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/cephadm/vars/ubuntu-22.04.yml)",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Install packages] ********************************",
                                "Monday 16 February 2026  23:04:15 +0000 (0:00:00.065)       0:01:12.252 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Ensure services are started] *********************",
                                "Monday 16 February 2026  23:04:17 +0000 (0:00:01.106)       0:01:13.358 *******",
                                "ok: [instance] => (item=chronyd)",
                                "ok: [instance] => (item=sshd)",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Download \"cephadm\"] ******************************",
                                "Monday 16 February 2026  23:04:17 +0000 (0:00:00.667)       0:01:14.026 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Remove cephadm from old path] ********************",
                                "Monday 16 February 2026  23:04:17 +0000 (0:00:00.297)       0:01:14.324 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Ensure \"cephadm\" user is present] ****************",
                                "Monday 16 February 2026  23:04:18 +0000 (0:00:00.216)       0:01:14.541 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Allow \"cephadm\" user to have passwordless sudo] ***",
                                "Monday 16 February 2026  23:04:18 +0000 (0:00:00.298)       0:01:14.839 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Get monitor status] **********************************",
                                "Monday 16 February 2026  23:04:18 +0000 (0:00:00.251)       0:01:15.090 *******",
                                "ok: [instance] => (item=instance)",
                                "",
                                "TASK [vexxhost.ceph.osd : Select admin host] ***********************************",
                                "Monday 16 February 2026  23:04:18 +0000 (0:00:00.240)       0:01:15.331 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Get `cephadm ls` status] *****************************",
                                "Monday 16 February 2026  23:04:19 +0000 (0:00:00.054)       0:01:15.385 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Parse the `cephadm ls` output] ***********************",
                                "Monday 16 February 2026  23:04:24 +0000 (0:00:05.458)       0:01:20.844 *******",
                                "ok: [instance]",
                                "",
                                "TASK [Install Ceph host] *******************************************************",
                                "Monday 16 February 2026  23:04:24 +0000 (0:00:00.059)       0:01:20.903 *******",
                                "included: vexxhost.ceph.cephadm_host for instance",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Get public SSH key for \"cephadm\" user] ******",
                                "Monday 16 February 2026  23:04:24 +0000 (0:00:00.075)       0:01:20.978 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Set fact with public SSH key for \"cephadm\" user] ***",
                                "Monday 16 February 2026  23:04:24 +0000 (0:00:00.054)       0:01:21.032 *******",
                                "skipping: [instance] => (item=instance)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Set authorized key for \"cephadm\"] ***********",
                                "Monday 16 February 2026  23:04:24 +0000 (0:00:00.053)       0:01:21.086 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Add new host to Ceph] ***********************",
                                "Monday 16 February 2026  23:04:25 +0000 (0:00:00.275)       0:01:21.361 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Adopt OSDs to cluster] *******************************",
                                "Monday 16 February 2026  23:04:27 +0000 (0:00:02.293)       0:01:23.655 *******",
                                "skipping: [instance] => (item=osd.2)",
                                "skipping: [instance] => (item=osd.1)",
                                "skipping: [instance] => (item=osd.0)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Wait until OSD added to cephadm] *********************",
                                "Monday 16 February 2026  23:04:37 +0000 (0:00:10.060)       0:01:33.716 *******",
                                "skipping: [instance] => (item=osd.2)",
                                "skipping: [instance] => (item=osd.1)",
                                "skipping: [instance] => (item=osd.0)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Ensure all OSDs are non-legacy] **********************",
                                "Monday 16 February 2026  23:04:37 +0000 (0:00:00.085)       0:01:33.802 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Get `ceph-volume lvm list` status] *******************",
                                "Monday 16 February 2026  23:04:42 +0000 (0:00:05.435)       0:01:39.237 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Install OSDs] ****************************************",
                                "Monday 16 February 2026  23:04:53 +0000 (0:00:10.410)       0:01:49.648 *******",
                                "skipping: [instance] => (item=/dev/ceph-instance-osd0/data)",
                                "skipping: [instance] => (item=/dev/ceph-instance-osd1/data)",
                                "skipping: [instance] => (item=/dev/ceph-instance-osd2/data)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Get mon dump] ****************************************",
                                "Monday 16 February 2026  23:04:53 +0000 (0:00:00.078)       0:01:49.727 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Mark require osd release] ****************************",
                                "Monday 16 February 2026  23:04:55 +0000 (0:00:01.930)       0:01:51.658 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Wait for all OSD to be running] **********************",
                                "Monday 16 February 2026  23:04:56 +0000 (0:00:01.486)       0:01:53.145 *******",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/osd/tasks/check-osds.yml for instance",
                                "",
                                "TASK [vexxhost.ceph.osd : Set the retry count] *********************************",
                                "Monday 16 February 2026  23:04:56 +0000 (0:00:00.071)       0:01:53.216 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Get `ceph orch ps`] **********************************",
                                "Monday 16 February 2026  23:04:56 +0000 (0:00:00.061)       0:01:53.277 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : OSD daemon list] *************************************",
                                "Monday 16 February 2026  23:04:58 +0000 (0:00:01.459)       0:01:54.736 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Fail if any OSD not running] *************************",
                                "Monday 16 February 2026  23:04:58 +0000 (0:00:00.045)       0:01:54.782 *******",
                                "skipping: [instance] => (item=1)",
                                "skipping: [instance] => (item=1)",
                                "skipping: [instance] => (item=1)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Fail if any duplicate OSD ID] ************************",
                                "Monday 16 February 2026  23:04:58 +0000 (0:00:00.046)       0:01:54.828 *******",
                                "skipping: [instance]",
                                "",
                                "PLAY [all] *********************************************************************",
                                "",
                                "TASK [Ensure RBD kernel module is loaded] **************************************",
                                "Monday 16 February 2026  23:04:58 +0000 (0:00:00.066)       0:01:54.895 *******",
                                "ok: [instance]",
                                "",
                                "PLAY [all] *********************************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Monday 16 February 2026  23:04:58 +0000 (0:00:00.336)       0:01:55.231 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.sysctl : Configure sysctl values] ********************",
                                "Monday 16 February 2026  23:05:00 +0000 (0:00:01.137)       0:01:56.368 *******",
                                "ok: [instance] => (item={'name': 'fs.aio-max-nr', 'value': 1048576})",
                                "ok: [instance] => (item={'name': 'net.ipv4.tcp_timestamps', 'value': 0})",
                                "ok: [instance] => (item={'name': 'net.ipv4.tcp_sack', 'value': 1})",
                                "ok: [instance] => (item={'name': 'net.core.netdev_budget', 'value': 1000})",
                                "ok: [instance] => (item={'name': 'net.core.netdev_max_backlog', 'value': 250000})",
                                "ok: [instance] => (item={'name': 'net.core.rmem_max', 'value': 4194304})",
                                "ok: [instance] => (item={'name': 'net.core.wmem_max', 'value': 4194304})",
                                "ok: [instance] => (item={'name': 'net.core.rmem_default', 'value': 4194304})",
                                "ok: [instance] => (item={'name': 'net.core.wmem_default', 'value': 4194304})",
                                "ok: [instance] => (item={'name': 'net.core.optmem_max', 'value': 4194304})",
                                "ok: [instance] => (item={'name': 'net.ipv4.tcp_rmem', 'value': '4096 87380 4194304'})",
                                "ok: [instance] => (item={'name': 'net.ipv4.tcp_wmem', 'value': '4096 65536 4194304'})",
                                "ok: [instance] => (item={'name': 'net.ipv4.tcp_low_latency', 'value': 1})",
                                "ok: [instance] => (item={'name': 'net.ipv4.tcp_adv_win_scale', 'value': 1})",
                                "ok: [instance] => (item={'name': 'net.ipv4.neigh.default.gc_thresh1', 'value': 128})",
                                "ok: [instance] => (item={'name': 'net.ipv4.neigh.default.gc_thresh2', 'value': 28872})",
                                "ok: [instance] => (item={'name': 'net.ipv4.neigh.default.gc_thresh3', 'value': 32768})",
                                "ok: [instance] => (item={'name': 'net.ipv6.neigh.default.gc_thresh1', 'value': 128})",
                                "ok: [instance] => (item={'name': 'net.ipv6.neigh.default.gc_thresh2', 'value': 28872})",
                                "ok: [instance] => (item={'name': 'net.ipv6.neigh.default.gc_thresh3', 'value': 32768})",
                                "",
                                "TASK [vexxhost.atmosphere.ethtool : Create folder for persistent configuration] ***",
                                "Monday 16 February 2026  23:05:03 +0000 (0:00:03.659)       0:02:00.027 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ethtool : Install persistent \"ethtool\" tuning] *******",
                                "Monday 16 February 2026  23:05:03 +0000 (0:00:00.236)       0:02:00.264 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ethtool : Run \"ethtool\" tuning] **********************",
                                "Monday 16 February 2026  23:05:04 +0000 (0:00:00.405)       0:02:00.669 *******",
                                "ok: [instance]",
                                "",
                                "TASK [Set a fact with the \"atmosphere_images\" for other plays] *****************",
                                "Monday 16 February 2026  23:05:04 +0000 (0:00:00.266)       0:02:00.936 *******",
                                "ok: [instance]",
                                "",
                                "PLAY [Configure Kubernetes VIP] ************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Monday 16 February 2026  23:05:04 +0000 (0:00:00.058)       0:02:00.995 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/etc/kubernetes/manifests)] ***",
                                "Monday 16 February 2026  23:05:05 +0000 (0:00:01.030)       0:02:02.026 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Uninstall legacy HA stack] ****************",
                                "Monday 16 February 2026  23:05:05 +0000 (0:00:00.200)       0:02:02.226 *******",
                                "ok: [instance] => (item=/etc/keepalived/keepalived.conf)",
                                "ok: [instance] => (item=/etc/keepalived/check_apiserver.sh)",
                                "ok: [instance] => (item=/etc/kubernetes/manifests/keepalived.yaml)",
                                "ok: [instance] => (item=/etc/haproxy/haproxy.cfg)",
                                "ok: [instance] => (item=/etc/kubernetes/manifests/haproxy.yaml)",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Switch API server to run on port 6443] ****",
                                "Monday 16 February 2026  23:05:06 +0000 (0:00:00.925)       0:02:03.151 *******",
                                "ok: [instance] => (item=/etc/kubernetes/manifests/kube-apiserver.yaml)",
                                "ok: [instance] => (item=/etc/kubernetes/controller-manager.conf)",
                                "ok: [instance] => (item=/etc/kubernetes/scheduler.conf)",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Check if super-admin.conf exists] *********",
                                "Monday 16 February 2026  23:05:07 +0000 (0:00:00.525)       0:02:03.676 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Check if kubeadm has already run] *********",
                                "Monday 16 February 2026  23:05:07 +0000 (0:00:00.182)       0:02:03.859 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Set fact with KUBECONFIG path] ************",
                                "Monday 16 February 2026  23:05:07 +0000 (0:00:00.187)       0:02:04.047 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Set fact with KUBECONFIG path (with super-admin.conf)] ***",
                                "Monday 16 February 2026  23:05:07 +0000 (0:00:00.030)       0:02:04.077 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Upload Kubernetes manifest] ***************",
                                "Monday 16 February 2026  23:05:07 +0000 (0:00:00.039)       0:02:04.117 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Ensure kube-vip configuration file] *******",
                                "Monday 16 February 2026  23:05:08 +0000 (0:00:00.477)       0:02:04.594 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Flush handlers] ***************************",
                                "Monday 16 February 2026  23:05:08 +0000 (0:00:00.197)       0:02:04.791 *******",
                                "",
                                "PLAY [Install Kubernetes] ******************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Monday 16 February 2026  23:05:08 +0000 (0:00:00.108)       0:02:04.899 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  23:05:09 +0000 (0:00:01.060)       0:02:05.960 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Monday 16 February 2026  23:05:09 +0000 (0:00:00.220)       0:02:06.181 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***",
                                "Monday 16 February 2026  23:05:09 +0000 (0:00:00.041)       0:02:06.222 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:05:10 +0000 (0:00:00.229)       0:02:06.451 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/opencontainers/runc/releases/download/v1.4.0/runc.amd64\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:05:10 +0000 (0:00:00.056)       0:02:06.507 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:05:10 +0000 (0:00:00.317)       0:02:06.825 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  23:05:10 +0000 (0:00:00.062)       0:02:06.887 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Monday 16 February 2026  23:05:10 +0000 (0:00:00.216)       0:02:07.104 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:05:11 +0000 (0:00:01.012)       0:02:08.116 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/containerd/containerd/releases/download/v2.2.0/containerd-2.2.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:05:11 +0000 (0:00:00.064)       0:02:08.181 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:05:12 +0000 (0:00:00.331)       0:02:08.512 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install SELinux packages] ***************",
                                "Monday 16 February 2026  23:05:14 +0000 (0:00:02.040)       0:02:10.554 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***",
                                "Monday 16 February 2026  23:05:14 +0000 (0:00:00.037)       0:02:10.591 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********",
                                "Monday 16 February 2026  23:05:14 +0000 (0:00:00.033)       0:02:10.625 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install AppArmor packages] **************",
                                "Monday 16 February 2026  23:05:14 +0000 (0:00:00.030)       0:02:10.655 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***",
                                "Monday 16 February 2026  23:05:15 +0000 (0:00:01.071)       0:02:11.726 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create folders for configuration] *******",
                                "Monday 16 February 2026  23:05:15 +0000 (0:00:00.426)       0:02:12.152 *******",
                                "ok: [instance] => (item={'path': '/etc/containerd'})",
                                "ok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})",
                                "",
                                "TASK [vexxhost.containers.containerd : Create containerd config file] **********",
                                "Monday 16 February 2026  23:05:16 +0000 (0:00:00.970)       0:02:13.122 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Force any restarts if necessary] ********",
                                "Monday 16 February 2026  23:05:17 +0000 (0:00:00.498)       0:02:13.620 *******",
                                "",
                                "TASK [vexxhost.containers.containerd : Enable and start service] ***************",
                                "Monday 16 February 2026  23:05:17 +0000 (0:00:00.010)       0:02:13.631 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes_upgrade_check : Retrieve the \"kubeadm-config\" ConfigMap] ***",
                                "Monday 16 February 2026  23:05:17 +0000 (0:00:00.351)       0:02:13.982 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes_upgrade_check : Parse the ClusterConfiguration] ***",
                                "Monday 16 February 2026  23:05:18 +0000 (0:00:00.873)       0:02:14.856 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes_upgrade_check : Retrieve the current Kubernetes version] ***",
                                "Monday 16 February 2026  23:05:18 +0000 (0:00:00.052)       0:02:14.909 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes_upgrade_check : Extract major, minor, and patch versions] ***",
                                "Monday 16 February 2026  23:05:18 +0000 (0:00:00.047)       0:02:14.956 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes_upgrade_check : Fail if we're jumping more than one minor version] ***",
                                "Monday 16 February 2026  23:05:18 +0000 (0:00:00.060)       0:02:15.016 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes_upgrade_check : Set fact if we need to upgrade] ***",
                                "Monday 16 February 2026  23:05:18 +0000 (0:00:00.037)       0:02:15.053 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  23:05:18 +0000 (0:00:00.054)       0:02:15.108 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:05:18 +0000 (0:00:00.211)       0:02:15.319 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://dl.k8s.io/release/v1.28.13/bin/linux/amd64/kubeadm\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:05:19 +0000 (0:00:00.050)       0:02:15.370 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:05:19 +0000 (0:00:00.471)       0:02:15.841 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  23:05:19 +0000 (0:00:00.050)       0:02:15.892 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:05:19 +0000 (0:00:00.219)       0:02:16.112 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://dl.k8s.io/release/v1.28.13/bin/linux/amd64/kubectl\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:05:19 +0000 (0:00:00.049)       0:02:16.162 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:05:20 +0000 (0:00:00.638)       0:02:16.801 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install SELinux packages] ***************",
                                "Monday 16 February 2026  23:05:20 +0000 (0:00:00.075)       0:02:16.876 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***",
                                "Monday 16 February 2026  23:05:20 +0000 (0:00:00.037)       0:02:16.914 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********",
                                "Monday 16 February 2026  23:05:20 +0000 (0:00:00.032)       0:02:16.947 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install AppArmor packages] **************",
                                "Monday 16 February 2026  23:05:20 +0000 (0:00:00.034)       0:02:16.981 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***",
                                "Monday 16 February 2026  23:05:21 +0000 (0:00:01.064)       0:02:18.046 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create folders for configuration] *******",
                                "Monday 16 February 2026  23:05:22 +0000 (0:00:00.422)       0:02:18.468 *******",
                                "ok: [instance] => (item={'path': '/etc/containerd'})",
                                "ok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})",
                                "",
                                "TASK [vexxhost.containers.containerd : Create containerd config file] **********",
                                "Monday 16 February 2026  23:05:23 +0000 (0:00:00.934)       0:02:19.402 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Force any restarts if necessary] ********",
                                "Monday 16 February 2026  23:05:23 +0000 (0:00:00.496)       0:02:19.899 *******",
                                "",
                                "TASK [vexxhost.containers.containerd : Enable and start service] ***************",
                                "Monday 16 February 2026  23:05:23 +0000 (0:00:00.009)       0:02:19.909 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  23:05:23 +0000 (0:00:00.366)       0:02:20.275 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:05:24 +0000 (0:00:00.261)       0:02:20.536 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/kubernetes-sigs/cri-tools/releases/download/v1.34.0/crictl-v1.34.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:05:24 +0000 (0:00:00.065)       0:02:20.601 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:05:24 +0000 (0:00:00.341)       0:02:20.943 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:05:25 +0000 (0:00:01.345)       0:02:22.288 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/kubernetes-sigs/cri-tools/releases/download/v1.34.0/critest-v1.34.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:05:26 +0000 (0:00:00.065)       0:02:22.354 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:05:26 +0000 (0:00:00.425)       0:02:22.779 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.cri_tools : Create crictl config] ********************",
                                "Monday 16 February 2026  23:05:27 +0000 (0:00:01.316)       0:02:24.096 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/opt/cni/bin)] *********",
                                "Monday 16 February 2026  23:05:28 +0000 (0:00:00.402)       0:02:24.498 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  23:05:28 +0000 (0:00:00.241)       0:02:24.739 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:05:28 +0000 (0:00:00.218)       0:02:24.958 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/containernetworking/plugins/releases/download/v1.8.0/cni-plugins-linux-amd64-v1.8.0.tgz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:05:28 +0000 (0:00:00.079)       0:02:25.038 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:05:29 +0000 (0:00:00.463)       0:02:25.502 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.cni_plugins : Gather variables for each operating system] ***",
                                "Monday 16 February 2026  23:05:31 +0000 (0:00:02.426)       0:02:27.928 *******",
                                "ok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/containers/roles/cni_plugins/vars/debian.yml)",
                                "",
                                "TASK [vexxhost.containers.cni_plugins : Install additional packages] ***********",
                                "Monday 16 February 2026  23:05:31 +0000 (0:00:00.061)       0:02:27.990 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.cni_plugins : Ensure IPv6 is enabled] ****************",
                                "Monday 16 February 2026  23:05:32 +0000 (0:00:01.112)       0:02:29.102 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.cni_plugins : Enable kernel modules on-boot] *********",
                                "Monday 16 February 2026  23:05:32 +0000 (0:00:00.222)       0:02:29.324 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.cni_plugins : Enable kernel modules in runtime] ******",
                                "Monday 16 February 2026  23:05:33 +0000 (0:00:00.440)       0:02:29.765 *******",
                                "ok: [instance] => (item=br_netfilter)",
                                "ok: [instance] => (item=ip_tables)",
                                "ok: [instance] => (item=ip6_tables)",
                                "ok: [instance] => (item=nf_conntrack)",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  23:05:34 +0000 (0:00:00.756)       0:02:30.522 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:05:34 +0000 (0:00:00.215)       0:02:30.737 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://dl.k8s.io/release/v1.28.13/bin/linux/amd64/kubelet\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:05:34 +0000 (0:00:00.049)       0:02:30.787 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:05:35 +0000 (0:00:00.599)       0:02:31.387 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Gather variables for each operating system] ***",
                                "Monday 16 February 2026  23:05:35 +0000 (0:00:00.046)       0:02:31.433 *******",
                                "ok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/kubelet/vars/debian.yml)",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Install coreutils] *************************",
                                "Monday 16 February 2026  23:05:35 +0000 (0:00:00.058)       0:02:31.491 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Install additional packages] ***************",
                                "Monday 16 February 2026  23:05:35 +0000 (0:00:00.029)       0:02:31.521 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Configure sysctl values] *******************",
                                "Monday 16 February 2026  23:05:36 +0000 (0:00:01.077)       0:02:32.598 *******",
                                "ok: [instance] => (item={'name': 'net.ipv4.ip_forward', 'value': 1})",
                                "ok: [instance] => (item={'name': 'net.bridge.bridge-nf-call-iptables', 'value': 1})",
                                "ok: [instance] => (item={'name': 'net.bridge.bridge-nf-call-ip6tables', 'value': 1})",
                                "ok: [instance] => (item={'name': 'net.ipv4.conf.all.rp_filter', 'value': 0})",
                                "ok: [instance] => (item={'name': 'fs.inotify.max_queued_events', 'value': 1048576})",
                                "ok: [instance] => (item={'name': 'fs.inotify.max_user_instances', 'value': 8192})",
                                "ok: [instance] => (item={'name': 'fs.inotify.max_user_watches', 'value': 1048576})",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Create folders for kubernetes configuration] ***",
                                "Monday 16 February 2026  23:05:37 +0000 (0:00:01.273)       0:02:33.872 *******",
                                "ok: [instance] => (item=/etc/systemd/system/kubelet.service.d)",
                                "ok: [instance] => (item=/etc/kubernetes)",
                                "ok: [instance] => (item=/etc/kubernetes/manifests)",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Add kubelet systemd service config] ********",
                                "Monday 16 February 2026  23:05:38 +0000 (0:00:00.535)       0:02:34.407 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Add kubeadm dropin for kubelet systemd service config] ***",
                                "Monday 16 February 2026  23:05:38 +0000 (0:00:00.427)       0:02:34.835 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Check swap status] *************************",
                                "Monday 16 February 2026  23:05:38 +0000 (0:00:00.434)       0:02:35.269 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Disable swap] ******************************",
                                "Monday 16 February 2026  23:05:39 +0000 (0:00:00.227)       0:02:35.496 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Remove swapfile from /etc/fstab] ***********",
                                "Monday 16 February 2026  23:05:39 +0000 (0:00:00.034)       0:02:35.531 *******",
                                "ok: [instance] => (item=swap)",
                                "ok: [instance] => (item=none)",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Create noswap systemd service config file] ***",
                                "Monday 16 February 2026  23:05:39 +0000 (0:00:00.540)       0:02:36.071 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Enable noswap service] *********************",
                                "Monday 16 February 2026  23:05:40 +0000 (0:00:00.435)       0:02:36.506 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Force any restarts if necessary] ***********",
                                "Monday 16 February 2026  23:05:40 +0000 (0:00:00.343)       0:02:36.850 *******",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Enable and start kubelet service] **********",
                                "Monday 16 February 2026  23:05:40 +0000 (0:00:00.011)       0:02:36.861 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Ensure availability of dbus on Debian] *****",
                                "Monday 16 February 2026  23:05:40 +0000 (0:00:00.353)       0:02:37.215 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Configure short hostname] ******************",
                                "Monday 16 February 2026  23:05:41 +0000 (0:00:01.080)       0:02:38.295 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Ensure hostname inside hosts file] *********",
                                "Monday 16 February 2026  23:05:42 +0000 (0:00:00.913)       0:02:39.209 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Remove kubernetes repository] ***********",
                                "Monday 16 February 2026  23:05:43 +0000 (0:00:00.213)       0:02:39.423 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Setup control plane] ********************",
                                "Monday 16 February 2026  23:05:43 +0000 (0:00:00.483)       0:02:39.906 *******",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/kubernetes/tasks/control-plane.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Bootstrap cluster] **********************",
                                "Monday 16 February 2026  23:05:43 +0000 (0:00:00.095)       0:02:40.001 *******",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/kubernetes/tasks/bootstrap-cluster.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Check if any control plane is bootstrapped] ***",
                                "Monday 16 February 2026  23:05:43 +0000 (0:00:00.095)       0:02:40.097 *******",
                                "ok: [instance] => (item=instance)",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Pick node from pre-existing cluster] ****",
                                "Monday 16 February 2026  23:05:43 +0000 (0:00:00.222)       0:02:40.320 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Select first node to initialize cluster] ***",
                                "Monday 16 February 2026  23:05:44 +0000 (0:00:00.047)       0:02:40.368 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Print selected bootstrap node] **********",
                                "Monday 16 February 2026  23:05:44 +0000 (0:00:00.040)       0:02:40.408 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"instance\"",
                                "}",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Upload cluster configuration for bootstrap node] ***",
                                "Monday 16 February 2026  23:05:44 +0000 (0:00:00.037)       0:02:40.445 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create pki folder] **********************",
                                "Monday 16 February 2026  23:05:44 +0000 (0:00:00.543)       0:02:40.989 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create kubernetes ca key] ***************",
                                "Monday 16 February 2026  23:05:44 +0000 (0:00:00.037)       0:02:41.027 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create kubernetes ca cert] **************",
                                "Monday 16 February 2026  23:05:44 +0000 (0:00:00.036)       0:02:41.063 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create etcd-ca key] *********************",
                                "Monday 16 February 2026  23:05:44 +0000 (0:00:00.034)       0:02:41.098 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create etcd-ca cert] ********************",
                                "Monday 16 February 2026  23:05:44 +0000 (0:00:00.040)       0:02:41.138 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create front-proxy-ca key] **************",
                                "Monday 16 February 2026  23:05:44 +0000 (0:00:00.031)       0:02:41.170 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create front-proxy-ca cert] *************",
                                "Monday 16 February 2026  23:05:44 +0000 (0:00:00.034)       0:02:41.205 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Initialize cluster] *********************",
                                "Monday 16 February 2026  23:05:44 +0000 (0:00:00.035)       0:02:41.240 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Check if the node is already part of the cluster] ***",
                                "Monday 16 February 2026  23:05:45 +0000 (0:00:00.199)       0:02:41.439 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Join cluster] ***************************",
                                "Monday 16 February 2026  23:05:45 +0000 (0:00:00.222)       0:02:41.662 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create folder for admin configuration] ***",
                                "Monday 16 February 2026  23:05:45 +0000 (0:00:00.077)       0:02:41.739 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Creating a symlink for admin configuration file] ***",
                                "Monday 16 February 2026  23:05:45 +0000 (0:00:00.192)       0:02:41.931 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Add bash autocomplete for kubectl] ******",
                                "Monday 16 February 2026  23:05:45 +0000 (0:00:00.199)       0:02:42.131 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Install PIP] ****************************",
                                "Monday 16 February 2026  23:05:45 +0000 (0:00:00.198)       0:02:42.329 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Install Kubernetes Python package using pip on supported systems] ***",
                                "Monday 16 February 2026  23:05:47 +0000 (0:00:01.073)       0:02:43.403 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Enable EPEL repository] *****************",
                                "Monday 16 February 2026  23:05:48 +0000 (0:00:01.310)       0:02:44.713 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Install Kubernetes Python package using package manager on supported systems] ***",
                                "Monday 16 February 2026  23:05:48 +0000 (0:00:00.048)       0:02:44.762 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Allow workload on control plane node] ***",
                                "Monday 16 February 2026  23:05:48 +0000 (0:00:00.047)       0:02:44.809 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Remove kube-proxy resources] ************",
                                "Monday 16 February 2026  23:05:49 +0000 (0:00:00.742)       0:02:45.551 *******",
                                "skipping: [instance] => (item=DaemonSet)",
                                "skipping: [instance] => (item=ConfigMap)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Start an upgrade] ***********************",
                                "Monday 16 February 2026  23:05:49 +0000 (0:00:00.050)       0:02:45.602 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Check if the Kubernetes API services is up to date] ***",
                                "Monday 16 February 2026  23:05:49 +0000 (0:00:00.045)       0:02:45.647 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Trigger an upgrade of the Kubernetes API services] ***",
                                "Monday 16 February 2026  23:05:49 +0000 (0:00:00.049)       0:02:45.696 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Setup nodes] ****************************",
                                "Monday 16 February 2026  23:05:49 +0000 (0:00:00.053)       0:02:45.750 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Upgrade & restart Kubelet node for upgrade (if needed)] ***",
                                "Monday 16 February 2026  23:05:49 +0000 (0:00:00.036)       0:02:45.786 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Set node selector for CoreDNS components] ***",
                                "Monday 16 February 2026  23:05:49 +0000 (0:00:00.392)       0:02:46.179 *******",
                                "ok: [instance]",
                                "",
                                "PLAY [Install control-plane components] ****************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Monday 16 February 2026  23:05:50 +0000 (0:00:00.903)       0:02:47.083 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Monday 16 February 2026  23:05:51 +0000 (0:00:01.071)       0:02:48.154 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Monday 16 February 2026  23:05:52 +0000 (0:00:00.213)       0:02:48.368 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***",
                                "Monday 16 February 2026  23:05:53 +0000 (0:00:01.088)       0:02:49.456 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:05:53 +0000 (0:00:00.222)       0:02:49.678 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://get.helm.sh/helm-v3.11.2-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:05:53 +0000 (0:00:00.040)       0:02:49.718 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:05:53 +0000 (0:00:00.322)       0:02:50.041 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.helm : Remove Helm repository] ***********************",
                                "Monday 16 February 2026  23:05:54 +0000 (0:00:01.248)       0:02:51.289 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.helm : Add bash autocomplete for helm] ***************",
                                "Monday 16 February 2026  23:05:55 +0000 (0:00:00.347)       0:02:51.636 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.helm : Get Helm plugins dir] *************************",
                                "Monday 16 February 2026  23:05:55 +0000 (0:00:00.208)       0:02:51.845 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.helm : Create Helm plugins directory if it does not exist] ***",
                                "Monday 16 February 2026  23:05:55 +0000 (0:00:00.258)       0:02:52.103 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.helm : Remove Helm diff plugin installed by kubernetes.core.helm_plugin] ***",
                                "Monday 16 February 2026  23:05:55 +0000 (0:00:00.205)       0:02:52.309 *******",
                                "ok: [instance]",
                                "",
                                "TASK [Install plugin] **********************************************************",
                                "Monday 16 February 2026  23:05:56 +0000 (0:00:00.208)       0:02:52.517 *******",
                                "included: vexxhost.containers.download_artifact for instance",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Monday 16 February 2026  23:05:56 +0000 (0:00:00.051)       0:02:52.569 *******",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/databus23/helm-diff/releases/download/v3.8.1/helm-diff-linux-amd64.tgz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Monday 16 February 2026  23:05:56 +0000 (0:00:00.055)       0:02:52.624 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Monday 16 February 2026  23:05:56 +0000 (0:00:00.316)       0:02:52.940 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Monday 16 February 2026  23:05:58 +0000 (0:00:01.441)       0:02:54.382 *******",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Monday 16 February 2026  23:05:58 +0000 (0:00:00.044)       0:02:54.426 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cilium : Get current Kubernetes version] *************",
                                "Monday 16 February 2026  23:05:58 +0000 (0:00:00.635)       0:02:55.061 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cilium : Deploy Helm chart] **************************",
                                "Monday 16 February 2026  23:05:59 +0000 (0:00:00.805)       0:02:55.867 *******",
                                "ok: [instance]",
                                "",
                                "PLAY [all] *********************************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Monday 16 February 2026  23:06:01 +0000 (0:00:01.621)       0:02:57.489 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kubernetes_node_labels : Add labels to node] *********",
                                "Monday 16 February 2026  23:06:02 +0000 (0:00:01.063)       0:02:58.552 *******",
                                "ok: [instance]",
                                "",
                                "PLAY [all] *********************************************************************",
                                "",
                                "TASK [Uninstall unattended-upgrades] *******************************************",
                                "Monday 16 February 2026  23:06:02 +0000 (0:00:00.711)       0:02:59.264 *******",
                                "ok: [instance]",
                                "",
                                "PLAY [controllers] *************************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Monday 16 February 2026  23:06:03 +0000 (0:00:00.692)       0:02:59.957 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Monday 16 February 2026  23:06:04 +0000 (0:00:01.178)       0:03:01.135 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.local_path_provisioner : Deploy Helm chart] **********",
                                "Monday 16 February 2026  23:06:04 +0000 (0:00:00.038)       0:03:01.174 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Monday 16 February 2026  23:06:04 +0000 (0:00:00.049)       0:03:01.224 *******",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Monday 16 February 2026  23:06:04 +0000 (0:00:00.035)       0:03:01.259 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Collect \"ceph mon dump\" output from a monitor] ***",
                                "Monday 16 February 2026  23:06:05 +0000 (0:00:00.466)       0:03:01.726 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Generate fact with list of Ceph monitors] ***",
                                "Monday 16 February 2026  23:06:19 +0000 (0:00:14.359)       0:03:16.085 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Create Ceph pool] *********************",
                                "Monday 16 February 2026  23:06:19 +0000 (0:00:00.057)       0:03:16.143 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Create user client.kube] **************",
                                "Monday 16 February 2026  23:06:22 +0000 (0:00:03.043)       0:03:19.187 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Retrieve keyring for client.kube] *****",
                                "Monday 16 February 2026  23:06:24 +0000 (0:00:01.360)       0:03:20.547 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Store keyring inside fact] ************",
                                "Monday 16 February 2026  23:06:25 +0000 (0:00:01.166)       0:03:21.713 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Deploy Helm chart] ********************",
                                "Monday 16 February 2026  23:06:25 +0000 (0:00:00.042)       0:03:21.756 *******",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.powerstore_csi : Clone PowerStore CSI from GitHub] ***",
                                "Monday 16 February 2026  23:06:26 +0000 (0:00:01.229)       0:03:22.986 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.powerstore_csi : Create Secret] **********************",
                                "Monday 16 February 2026  23:06:26 +0000 (0:00:00.030)       0:03:23.016 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.powerstore_csi : Create StorageClass] ****************",
                                "Monday 16 February 2026  23:06:26 +0000 (0:00:00.028)       0:03:23.045 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.powerstore_csi : Deploy Helm chart] ******************",
                                "Monday 16 February 2026  23:06:26 +0000 (0:00:00.027)       0:03:23.072 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.multipathd : Add backports PPA] **********************",
                                "Monday 16 February 2026  23:06:26 +0000 (0:00:00.048)       0:03:23.121 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.multipathd : Install the multipathd package] *********",
                                "Monday 16 February 2026  23:06:26 +0000 (0:00:00.022)       0:03:23.143 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.multipathd : Install the configuration file] *********",
                                "Monday 16 February 2026  23:06:26 +0000 (0:00:00.029)       0:03:23.172 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Detect if InitiatorName is set] ***********",
                                "Monday 16 February 2026  23:06:26 +0000 (0:00:00.038)       0:03:23.210 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Generate a new InitiatorName] *************",
                                "Monday 16 February 2026  23:06:26 +0000 (0:00:00.033)       0:03:23.244 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Write the new InitiatorName] **************",
                                "Monday 16 February 2026  23:06:26 +0000 (0:00:00.028)       0:03:23.273 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Create namespace] *************************",
                                "Monday 16 February 2026  23:06:26 +0000 (0:00:00.030)       0:03:23.303 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Install Portworx] *************************",
                                "Monday 16 February 2026  23:06:26 +0000 (0:00:00.021)       0:03:23.325 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Wait till the CRDs are created] ***********",
                                "Monday 16 February 2026  23:06:27 +0000 (0:00:00.024)       0:03:23.350 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Create Portworx Storage Cluster] **********",
                                "Monday 16 February 2026  23:06:27 +0000 (0:00:00.020)       0:03:23.370 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.storpool_csi : Deploy CSI RBAC] **********************",
                                "Monday 16 February 2026  23:06:27 +0000 (0:00:00.041)       0:03:23.412 *******",
                                "skipping: [instance] => (item={'name': 'controllerplugin'})",
                                "skipping: [instance] => (item={'name': 'nodeplugin'})",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.storpool_csi : Deploy CSI] ***************************",
                                "Monday 16 February 2026  23:06:27 +0000 (0:00:00.034)       0:03:23.446 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.storpool_csi : Create StorageClass] ******************",
                                "Monday 16 February 2026  23:06:27 +0000 (0:00:00.025)       0:03:23.472 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ibm_block_csi_driver : Deploy CSI] *******************",
                                "Monday 16 February 2026  23:06:27 +0000 (0:00:00.038)       0:03:23.510 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ibm_block_csi_driver : Create Secret] ****************",
                                "Monday 16 February 2026  23:06:27 +0000 (0:00:00.022)       0:03:23.532 *******",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ibm_block_csi_driver : Create StorageClass] **********",
                                "Monday 16 February 2026  23:06:27 +0000 (0:00:00.019)       0:03:23.552 *******",
                                "skipping: [instance]",
                                "",
                                "PLAY RECAP *********************************************************************",
                                "instance                   : ok=235  changed=0    unreachable=0    failed=0    skipped=82   rescued=0    ignored=0",
                                "",
                                "Monday 16 February 2026  23:06:27 +0000 (0:00:00.036)       0:03:23.588 *******",
                                "===============================================================================",
                                "vexxhost.atmosphere.ceph_csi_rbd : Collect \"ceph mon dump\" output from a monitor -- 14.36s",
                                "vexxhost.ceph.osd : Get `ceph-volume lvm list` status ------------------ 10.41s",
                                "vexxhost.ceph.mon : Validate monitor exist ----------------------------- 10.21s",
                                "vexxhost.ceph.osd : Adopt OSDs to cluster ------------------------------ 10.06s",
                                "vexxhost.ceph.mon : Get `cephadm ls` status ----------------------------- 5.68s",
                                "vexxhost.ceph.osd : Get `cephadm ls` status ----------------------------- 5.46s",
                                "vexxhost.ceph.osd : Ensure all OSDs are non-legacy ---------------------- 5.44s",
                                "vexxhost.containers.download_artifact : Extract archive ----------------- 3.80s",
                                "vexxhost.atmosphere.sysctl : Configure sysctl values -------------------- 3.66s",
                                "vexxhost.containers.download_artifact : Extract archive ----------------- 3.32s",
                                "vexxhost.atmosphere.ceph_csi_rbd : Create Ceph pool --------------------- 3.04s",
                                "vexxhost.containers.download_artifact : Extract archive ----------------- 2.50s",
                                "vexxhost.containers.download_artifact : Extract archive ----------------- 2.43s",
                                "vexxhost.ceph.cephadm_host : Add new host to Ceph ----------------------- 2.29s",
                                "vexxhost.containers.download_artifact : Extract archive ----------------- 2.10s",
                                "vexxhost.ceph.cephadm_host : Add new host to Ceph ----------------------- 2.06s",
                                "vexxhost.containers.download_artifact : Extract archive ----------------- 2.04s",
                                "vexxhost.ceph.cephadm_host : Add new host to Ceph ----------------------- 2.00s",
                                "vexxhost.ceph.mgr : Enable the Ceph Manager prometheus module ----------- 1.96s",
                                "vexxhost.ceph.osd : Get mon dump ---------------------------------------- 1.93s",
                                "INFO     [csi > idempotence] Executed: Successful",
                                "INFO     [csi > side_effect] Executing",
                                "WARNING  [csi > side_effect] Executed: Missing playbook (Remove from test_sequence to suppress)",
                                "INFO     [csi > verify] Executing",
                                "",
                                "PLAY [Verify] ******************************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Monday 16 February 2026  23:06:28 +0000 (0:00:00.033)       0:00:00.033 *******",
                                "[WARNING]: Platform linux on host instance is using the discovered Python",
                                "interpreter at /usr/bin/python3.10, but future installation of another Python",
                                "interpreter could change the meaning of that path. See",
                                "https://docs.ansible.com/ansible-",
                                "core/2.17/reference_appendices/interpreter_discovery.html for more information.",
                                "ok: [instance]",
                                "",
                                "TASK [Create a persistent volume] **********************************************",
                                "Monday 16 February 2026  23:06:29 +0000 (0:00:01.298)       0:00:01.331 *******",
                                "changed: [instance]",
                                "",
                                "TASK [Create a pod] ************************************************************",
                                "Monday 16 February 2026  23:06:30 +0000 (0:00:00.830)       0:00:02.161 *******",
                                "changed: [instance]",
                                "",
                                "TASK [Delete the pod] **********************************************************",
                                "Monday 16 February 2026  23:06:45 +0000 (0:00:15.683)       0:00:17.845 *******",
                                "changed: [instance]",
                                "",
                                "TASK [Delete the persistent volume] ********************************************",
                                "Monday 16 February 2026  23:06:46 +0000 (0:00:00.715)       0:00:18.560 *******",
                                "ok: [instance]",
                                "",
                                "PLAY RECAP *********************************************************************",
                                "instance                   : ok=5    changed=3    unreachable=0    failed=0    skipped=0    rescued=0    ignored=0",
                                "",
                                "Monday 16 February 2026  23:06:47 +0000 (0:00:00.708)       0:00:19.269 *******",
                                "===============================================================================",
                                "Create a pod ----------------------------------------------------------- 15.68s",
                                "Gathering Facts --------------------------------------------------------- 1.30s",
                                "Create a persistent volume ---------------------------------------------- 0.83s",
                                "Delete the pod ---------------------------------------------------------- 0.72s",
                                "Delete the persistent volume -------------------------------------------- 0.71s",
                                "INFO     [csi > verify] Executed: Successful",
                                "INFO     [csi > cleanup] Executing",
                                "WARNING  [csi > cleanup] Executed: Missing playbook (Remove from test_sequence to suppress)",
                                "INFO     [csi > destroy] Executing",
                                "WARNING  [csi > destroy] Skipping, '--destroy=never' requested.",
                                "INFO     [csi > destroy] Executed: Successful",
                                "WARNING  Molecule executed 1 scenario (1 missing files)"
                            ],
                            "zuul_log_id": "0242ac17-0010-434f-163f-000000000006-1-instance"
                        }
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:06:47.903223Z",
                            "start": "2026-02-16T22:54:35.419426Z"
                        },
                        "id": "0242ac17-0010-434f-163f-000000000006",
                        "name": "Run Molecule scenario"
                    }
                }
            ]
        }
    ],
    "stats": {
        "instance": {
            "changed": 3,
            "failures": 0,
            "ignored": 0,
            "ok": 3,
            "rescued": 0,
            "skipped": 0,
            "unreachable": 0
        }
    },
    "trusted": false
},
{
    "branch": "stable/2024.1",
    "index": "0",
    "phase": "post",
    "playbook": "github.com/vexxhost/atmosphere/test-playbooks/molecule/post.yml",
    "plays": [
        {
            "play": {
                "duration": {
                    "end": "2026-02-16T23:07:33.857173Z",
                    "start": "2026-02-16T23:06:48.651085Z"
                },
                "id": "0242ac17-0010-0a3c-9efc-000000000002",
                "name": "all"
            },
            "tasks": [
                {
                    "hosts": {
                        "instance": {
                            "action": "file",
                            "changed": true,
                            "diff": {
                                "after": {
                                    "path": "/tmp/logs/system",
                                    "state": "directory"
                                },
                                "before": {
                                    "path": "/tmp/logs/system",
                                    "state": "absent"
                                }
                            },
                            "gid": 0,
                            "group": "root",
                            "invocation": {
                                "module_args": {
                                    "_diff_peek": null,
                                    "_original_basename": null,
                                    "access_time": null,
                                    "access_time_format": "%Y%m%d%H%M.%S",
                                    "attributes": null,
                                    "follow": true,
                                    "force": false,
                                    "group": null,
                                    "mode": null,
                                    "modification_time": null,
                                    "modification_time_format": "%Y%m%d%H%M.%S",
                                    "owner": null,
                                    "path": "/tmp/logs/system",
                                    "recurse": false,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": null,
                                    "state": "directory",
                                    "unsafe_writes": false
                                }
                            },
                            "mode": "0755",
                            "owner": "root",
                            "path": "/tmp/logs/system",
                            "size": 4096,
                            "state": "directory",
                            "uid": 0
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-0a3c-9efc-00000000000a",
                        "name": "gather-host-logs",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-host-logs"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:06:49.025893Z",
                            "start": "2026-02-16T23:06:48.665644Z"
                        },
                        "id": "0242ac17-0010-0a3c-9efc-00000000000c",
                        "name": "creating directory for system status"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "shell",
                            "changed": true,
                            "cmd": "set -x\nsystemd-cgls --full --all --no-pager > /tmp/logs/system/systemd-cgls.txt\nip addr > /tmp/logs/system/ip-addr.txt\nip route > /tmp/logs/system/ip-route.txt\nlsblk > /tmp/logs/system/lsblk.txt\nmount > /tmp/logs/system/mount.txt\ndocker images > /tmp/logs/system/docker-images.txt\nbrctl show > /tmp/logs/system/brctl-show.txt\nps aux --sort=-%mem > /tmp/logs/system/ps.txt\ndpkg -l > /tmp/logs/system/packages.txt\nCONTAINERS=($(docker ps -a --format '{{ .Names }}' --filter label=zuul))\nif [ ! -z \"$CONTAINERS\" ]; then\n  mkdir -p \"/tmp/logs/system/containers\"\n  for CONTAINER in ${CONTAINERS}; do\n    docker logs \"${CONTAINER}\" > \"/tmp/logs/system/containers/${CONTAINER}.txt\"\n  done\nfi",
                            "delta": "0:00:00.081833",
                            "end": "2026-02-16 23:06:49.461554",
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "set -x\nsystemd-cgls --full --all --no-pager > /tmp/logs/system/systemd-cgls.txt\nip addr > /tmp/logs/system/ip-addr.txt\nip route > /tmp/logs/system/ip-route.txt\nlsblk > /tmp/logs/system/lsblk.txt\nmount > /tmp/logs/system/mount.txt\ndocker images > /tmp/logs/system/docker-images.txt\nbrctl show > /tmp/logs/system/brctl-show.txt\nps aux --sort=-%mem > /tmp/logs/system/ps.txt\ndpkg -l > /tmp/logs/system/packages.txt\nCONTAINERS=($(docker ps -a --format '{{ .Names }}' --filter label=zuul))\nif [ ! -z \"$CONTAINERS\" ]; then\n  mkdir -p \"/tmp/logs/system/containers\"\n  for CONTAINER in ${CONTAINERS}; do\n    docker logs \"${CONTAINER}\" > \"/tmp/logs/system/containers/${CONTAINER}.txt\"\n  done\nfi",
                                    "_uses_shell": true,
                                    "argv": null,
                                    "chdir": null,
                                    "creates": null,
                                    "executable": "/bin/bash",
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0010-0a3c-9efc-00000000000d-1-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "",
                            "rc": 0,
                            "start": "2026-02-16 23:06:49.379721",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "+ systemd-cgls --full --all --no-pager\n+ ip addr\n+ ip route\n+ lsblk\n+ mount\n+ docker images\n+ brctl show\n/bin/bash: line 8: brctl: command not found\n+ ps aux --sort=-%mem\n+ dpkg -l\n+ CONTAINERS=($(docker ps -a --format '{{ .Names }}' --filter label=zuul))\n++ docker ps -a --format '{{ .Names }}' --filter label=zuul\n+ '[' '!' -z '' ']'",
                            "stdout_lines": [
                                "+ systemd-cgls --full --all --no-pager",
                                "+ ip addr",
                                "+ ip route",
                                "+ lsblk",
                                "+ mount",
                                "+ docker images",
                                "+ brctl show",
                                "/bin/bash: line 8: brctl: command not found",
                                "+ ps aux --sort=-%mem",
                                "+ dpkg -l",
                                "+ CONTAINERS=($(docker ps -a --format '{{ .Names }}' --filter label=zuul))",
                                "++ docker ps -a --format '{{ .Names }}' --filter label=zuul",
                                "+ '[' '!' -z '' ']'"
                            ],
                            "zuul_log_id": "0242ac17-0010-0a3c-9efc-00000000000d-1-instance"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-0a3c-9efc-00000000000a",
                        "name": "gather-host-logs",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-host-logs"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:06:49.568261Z",
                            "start": "2026-02-16T23:06:49.051024Z"
                        },
                        "id": "0242ac17-0010-0a3c-9efc-00000000000d",
                        "name": "Get logs for each host"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "synchronize",
                            "changed": true,
                            "cmd": "/usr/bin/rsync --delay-updates -F --compress --archive --rsh='/usr/bin/ssh -S none -o Port=22 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null' --rsync-path='sudo -u root rsync' --out-format='<<CHANGED>>%i %n%L' zuul@199.204.45.216:/tmp/logs/system /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/instance",
                            "invocation": {
                                "module_args": {
                                    "_local_rsync_password": null,
                                    "_local_rsync_path": "rsync",
                                    "_ssh_args": null,
                                    "_substitute_controller": false,
                                    "archive": true,
                                    "checksum": false,
                                    "compress": true,
                                    "copy_links": false,
                                    "delay_updates": true,
                                    "delete": false,
                                    "dest": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/instance",
                                    "dest_port": 22,
                                    "dirs": false,
                                    "existing_only": false,
                                    "group": null,
                                    "link_dest": null,
                                    "links": null,
                                    "mode": "pull",
                                    "owner": null,
                                    "partial": false,
                                    "perms": null,
                                    "private_key": null,
                                    "recursive": null,
                                    "rsync_opts": [],
                                    "rsync_path": "sudo -u root rsync",
                                    "rsync_timeout": 0,
                                    "set_remote_user": true,
                                    "src": "zuul@199.204.45.216:/tmp/logs/system",
                                    "ssh_connection_multiplexing": false,
                                    "times": null,
                                    "use_ssh_args": false,
                                    "verify_host": false
                                }
                            },
                            "msg": "created directory /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/instance\ncd+++++++++ system/\n>f+++++++++ system/brctl-show.txt\n>f+++++++++ system/docker-images.txt\n>f+++++++++ system/ip-addr.txt\n>f+++++++++ system/ip-route.txt\n>f+++++++++ system/lsblk.txt\n>f+++++++++ system/mount.txt\n>f+++++++++ system/packages.txt\n>f+++++++++ system/ps.txt\n>f+++++++++ system/systemd-cgls.txt\n",
                            "rc": 0,
                            "stdout_lines": [
                                "created directory /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/instance",
                                "cd+++++++++ system/",
                                ">f+++++++++ system/brctl-show.txt",
                                ">f+++++++++ system/docker-images.txt",
                                ">f+++++++++ system/ip-addr.txt",
                                ">f+++++++++ system/ip-route.txt",
                                ">f+++++++++ system/lsblk.txt",
                                ">f+++++++++ system/mount.txt",
                                ">f+++++++++ system/packages.txt",
                                ">f+++++++++ system/ps.txt",
                                ">f+++++++++ system/systemd-cgls.txt"
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-0a3c-9efc-00000000000a",
                        "name": "gather-host-logs",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-host-logs"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:06:50.206090Z",
                            "start": "2026-02-16T23:06:49.574938Z"
                        },
                        "id": "0242ac17-0010-0a3c-9efc-00000000000e",
                        "name": "Downloads logs to executor"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "file",
                            "changed": true,
                            "msg": "All items completed",
                            "results": [
                                {
                                    "ansible_loop_var": "directory",
                                    "changed": true,
                                    "diff": {
                                        "after": {
                                            "path": "/tmp/logs/helm/values",
                                            "state": "directory"
                                        },
                                        "before": {
                                            "path": "/tmp/logs/helm/values",
                                            "state": "absent"
                                        }
                                    },
                                    "directory": "values",
                                    "failed": false,
                                    "gid": 0,
                                    "group": "root",
                                    "invocation": {
                                        "module_args": {
                                            "_diff_peek": null,
                                            "_original_basename": null,
                                            "access_time": null,
                                            "access_time_format": "%Y%m%d%H%M.%S",
                                            "attributes": null,
                                            "follow": true,
                                            "force": false,
                                            "group": null,
                                            "mode": null,
                                            "modification_time": null,
                                            "modification_time_format": "%Y%m%d%H%M.%S",
                                            "owner": null,
                                            "path": "/tmp/logs/helm/values",
                                            "recurse": false,
                                            "selevel": null,
                                            "serole": null,
                                            "setype": null,
                                            "seuser": null,
                                            "src": null,
                                            "state": "directory",
                                            "unsafe_writes": false
                                        }
                                    },
                                    "mode": "0755",
                                    "owner": "root",
                                    "path": "/tmp/logs/helm/values",
                                    "size": 4096,
                                    "state": "directory",
                                    "uid": 0
                                },
                                {
                                    "ansible_loop_var": "directory",
                                    "changed": true,
                                    "diff": {
                                        "after": {
                                            "path": "/tmp/logs/helm/releases",
                                            "state": "directory"
                                        },
                                        "before": {
                                            "path": "/tmp/logs/helm/releases",
                                            "state": "absent"
                                        }
                                    },
                                    "directory": "releases",
                                    "failed": false,
                                    "gid": 0,
                                    "group": "root",
                                    "invocation": {
                                        "module_args": {
                                            "_diff_peek": null,
                                            "_original_basename": null,
                                            "access_time": null,
                                            "access_time_format": "%Y%m%d%H%M.%S",
                                            "attributes": null,
                                            "follow": true,
                                            "force": false,
                                            "group": null,
                                            "mode": null,
                                            "modification_time": null,
                                            "modification_time_format": "%Y%m%d%H%M.%S",
                                            "owner": null,
                                            "path": "/tmp/logs/helm/releases",
                                            "recurse": false,
                                            "selevel": null,
                                            "serole": null,
                                            "setype": null,
                                            "seuser": null,
                                            "src": null,
                                            "state": "directory",
                                            "unsafe_writes": false
                                        }
                                    },
                                    "mode": "0755",
                                    "owner": "root",
                                    "path": "/tmp/logs/helm/releases",
                                    "size": 4096,
                                    "state": "directory",
                                    "uid": 0
                                }
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-0a3c-9efc-000000000010",
                        "name": "helm-release-status",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/helm-release-status"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:06:50.602424Z",
                            "start": "2026-02-16T23:06:50.215334Z"
                        },
                        "id": "0242ac17-0010-0a3c-9efc-000000000012",
                        "name": "creating directory for helm release status"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "shell",
                            "changed": true,
                            "cmd": "set -e\n\nfor namespace in $(kubectl get namespaces --no-headers --output custom-columns=\":metadata.name\"); do\n      # get all Helm releases including pending and failed releases\n      for release in $(helm list --all --short --namespace $namespace); do\n              # Make respective directories only when a Helm release actually exists in the namespace\n              # to prevent uploading a bunch of empty directories for namespaces without a Helm release.\n              mkdir -p /tmp/logs/helm/releases/$namespace\n              mkdir -p /tmp/logs/helm/values/$namespace\n\n              helm status $release --namespace $namespace >> /tmp/logs/helm/releases/$namespace/$release.txt\n              helm get values $release --namespace $namespace --all >> /tmp/logs/helm/values/$namespace/$release.yaml\n      done\ndone",
                            "delta": "0:00:00.669379",
                            "end": "2026-02-16 23:06:51.498300",
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "set -e\n\nfor namespace in $(kubectl get namespaces --no-headers --output custom-columns=\":metadata.name\"); do\n      # get all Helm releases including pending and failed releases\n      for release in $(helm list --all --short --namespace $namespace); do\n              # Make respective directories only when a Helm release actually exists in the namespace\n              # to prevent uploading a bunch of empty directories for namespaces without a Helm release.\n              mkdir -p /tmp/logs/helm/releases/$namespace\n              mkdir -p /tmp/logs/helm/values/$namespace\n\n              helm status $release --namespace $namespace >> /tmp/logs/helm/releases/$namespace/$release.txt\n              helm get values $release --namespace $namespace --all >> /tmp/logs/helm/values/$namespace/$release.yaml\n      done\ndone",
                                    "_uses_shell": true,
                                    "argv": null,
                                    "chdir": null,
                                    "creates": null,
                                    "executable": "/bin/bash",
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0010-0a3c-9efc-000000000014-1-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "",
                            "rc": 0,
                            "start": "2026-02-16 23:06:50.828921",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "",
                            "stdout_lines": [],
                            "zuul_log_id": "0242ac17-0010-0a3c-9efc-000000000014-1-instance"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-0a3c-9efc-000000000010",
                        "name": "helm-release-status",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/helm-release-status"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:06:51.650486Z",
                            "start": "2026-02-16T23:06:50.630169Z"
                        },
                        "id": "0242ac17-0010-0a3c-9efc-000000000014",
                        "name": "Gather get release status for helm charts"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "synchronize",
                            "changed": true,
                            "cmd": "/usr/bin/rsync --delay-updates -F --compress --archive --rsh='/usr/bin/ssh -S none -o Port=22 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null' --rsync-path='sudo -u root rsync' --out-format='<<CHANGED>>%i %n%L' zuul@199.204.45.216:/tmp/logs/helm /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/instance",
                            "invocation": {
                                "module_args": {
                                    "_local_rsync_password": null,
                                    "_local_rsync_path": "rsync",
                                    "_ssh_args": null,
                                    "_substitute_controller": false,
                                    "archive": true,
                                    "checksum": false,
                                    "compress": true,
                                    "copy_links": false,
                                    "delay_updates": true,
                                    "delete": false,
                                    "dest": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/instance",
                                    "dest_port": 22,
                                    "dirs": false,
                                    "existing_only": false,
                                    "group": null,
                                    "link_dest": null,
                                    "links": null,
                                    "mode": "pull",
                                    "owner": null,
                                    "partial": false,
                                    "perms": null,
                                    "private_key": null,
                                    "recursive": null,
                                    "rsync_opts": [],
                                    "rsync_path": "sudo -u root rsync",
                                    "rsync_timeout": 0,
                                    "set_remote_user": true,
                                    "src": "zuul@199.204.45.216:/tmp/logs/helm",
                                    "ssh_connection_multiplexing": false,
                                    "times": null,
                                    "use_ssh_args": false,
                                    "verify_host": false
                                }
                            },
                            "msg": "cd+++++++++ helm/\ncd+++++++++ helm/releases/\ncd+++++++++ helm/releases/kube-system/\n>f+++++++++ helm/releases/kube-system/ceph-csi-rbd.txt\n>f+++++++++ helm/releases/kube-system/cilium.txt\ncd+++++++++ helm/values/\ncd+++++++++ helm/values/kube-system/\n>f+++++++++ helm/values/kube-system/ceph-csi-rbd.yaml\n>f+++++++++ helm/values/kube-system/cilium.yaml\n",
                            "rc": 0,
                            "stdout_lines": [
                                "cd+++++++++ helm/",
                                "cd+++++++++ helm/releases/",
                                "cd+++++++++ helm/releases/kube-system/",
                                ">f+++++++++ helm/releases/kube-system/ceph-csi-rbd.txt",
                                ">f+++++++++ helm/releases/kube-system/cilium.txt",
                                "cd+++++++++ helm/values/",
                                "cd+++++++++ helm/values/kube-system/",
                                ">f+++++++++ helm/values/kube-system/ceph-csi-rbd.yaml",
                                ">f+++++++++ helm/values/kube-system/cilium.yaml"
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-0a3c-9efc-000000000010",
                        "name": "helm-release-status",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/helm-release-status"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:06:52.148178Z",
                            "start": "2026-02-16T23:06:51.655913Z"
                        },
                        "id": "0242ac17-0010-0a3c-9efc-000000000015",
                        "name": "Downloads logs to executor"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "file",
                            "changed": true,
                            "diff": {
                                "after": {
                                    "path": "/tmp/logs/objects/cluster",
                                    "state": "directory"
                                },
                                "before": {
                                    "path": "/tmp/logs/objects/cluster",
                                    "state": "absent"
                                }
                            },
                            "gid": 0,
                            "group": "root",
                            "invocation": {
                                "module_args": {
                                    "_diff_peek": null,
                                    "_original_basename": null,
                                    "access_time": null,
                                    "access_time_format": "%Y%m%d%H%M.%S",
                                    "attributes": null,
                                    "follow": true,
                                    "force": false,
                                    "group": null,
                                    "mode": null,
                                    "modification_time": null,
                                    "modification_time_format": "%Y%m%d%H%M.%S",
                                    "owner": null,
                                    "path": "/tmp/logs/objects/cluster",
                                    "recurse": false,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": null,
                                    "state": "directory",
                                    "unsafe_writes": false
                                }
                            },
                            "mode": "0755",
                            "owner": "root",
                            "path": "/tmp/logs/objects/cluster",
                            "size": 4096,
                            "state": "directory",
                            "uid": 0
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-0a3c-9efc-000000000017",
                        "name": "describe-kubernetes-objects",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/describe-kubernetes-objects"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:06:52.367370Z",
                            "start": "2026-02-16T23:06:52.158768Z"
                        },
                        "id": "0242ac17-0010-0a3c-9efc-000000000019",
                        "name": "creating directory for cluster scoped objects"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "shell",
                            "changed": true,
                            "cmd": "set -e\nexport OBJECT_TYPE=node,clusterrole,clusterrolebinding,storageclass,namespace\nexport PARALLELISM_FACTOR=2\n\nfunction list_objects () {\n  printf ${OBJECT_TYPE} | xargs -d ',' -I {} -P1 -n1 bash -c 'echo \"$@\"' _ {}\n}\nexport -f list_objects\n\nfunction name_objects () {\n  export OBJECT=$1\n  kubectl get ${OBJECT} -o name | xargs -L1 -I {} -P1 -n1 bash -c 'echo \"${OBJECT} ${1#*/}\"' _ {}\n}\nexport -f name_objects\n\nfunction get_objects () {\n  input=($1)\n  export OBJECT=${input[0]}\n  export NAME=${input[1]#*/}\n  echo \"${OBJECT}/${NAME}\"\n  DIR=\"/tmp/logs/objects/cluster/${OBJECT}\"\n  mkdir -p ${DIR}\n  kubectl get ${OBJECT} ${NAME} -o yaml > \"${DIR}/${NAME}.yaml\"\n  kubectl describe ${OBJECT} ${NAME} > \"${DIR}/${NAME}.txt\"\n}\nexport -f get_objects\n\nlist_objects |  xargs -r -n 1 -P ${PARALLELISM_FACTOR} -I {} bash -c 'name_objects \"$@\"' _ {} |  xargs -r -n 1 -P ${PARALLELISM_FACTOR} -I {} bash -c 'get_objects \"$@\"' _ {}",
                            "delta": "0:00:12.035737",
                            "end": "2026-02-16 23:07:04.615329",
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "set -e\nexport OBJECT_TYPE=node,clusterrole,clusterrolebinding,storageclass,namespace\nexport PARALLELISM_FACTOR=2\n\nfunction list_objects () {\n  printf ${OBJECT_TYPE} | xargs -d ',' -I {} -P1 -n1 bash -c 'echo \"$@\"' _ {}\n}\nexport -f list_objects\n\nfunction name_objects () {\n  export OBJECT=$1\n  kubectl get ${OBJECT} -o name | xargs -L1 -I {} -P1 -n1 bash -c 'echo \"${OBJECT} ${1#*/}\"' _ {}\n}\nexport -f name_objects\n\nfunction get_objects () {\n  input=($1)\n  export OBJECT=${input[0]}\n  export NAME=${input[1]#*/}\n  echo \"${OBJECT}/${NAME}\"\n  DIR=\"/tmp/logs/objects/cluster/${OBJECT}\"\n  mkdir -p ${DIR}\n  kubectl get ${OBJECT} ${NAME} -o yaml > \"${DIR}/${NAME}.yaml\"\n  kubectl describe ${OBJECT} ${NAME} > \"${DIR}/${NAME}.txt\"\n}\nexport -f get_objects\n\nlist_objects |  xargs -r -n 1 -P ${PARALLELISM_FACTOR} -I {} bash -c 'name_objects \"$@\"' _ {} |  xargs -r -n 1 -P ${PARALLELISM_FACTOR} -I {} bash -c 'get_objects \"$@\"' _ {}",
                                    "_uses_shell": true,
                                    "argv": null,
                                    "chdir": null,
                                    "creates": null,
                                    "executable": "/bin/bash",
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0010-0a3c-9efc-00000000001a-1-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "",
                            "rc": 0,
                            "start": "2026-02-16 23:06:52.579592",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "xargs: warning: options --max-args and --replace/-I/-i are mutually exclusive, ignoring previous --max-args value\nxargs: warning: options --max-args and --replace/-I/-i are mutually exclusive, ignoring previous --max-args value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nnode/instance\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nclusterrole/admin\nclusterrole/ceph-csi-rbd-nodeplugin\nclusterrole/ceph-csi-rbd-provisioner\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nclusterrole/cilium\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nclusterrole/cilium-operator\nclusterrole/cluster-admin\nclusterrole/edit\nclusterrole/kubeadm:get-nodes\nclusterrole/system:aggregate-to-admin\nclusterrole/system:aggregate-to-edit\nclusterrole/system:aggregate-to-view\nclusterrole/system:auth-delegator\nclusterrole/system:basic-user\nclusterrole/system:certificates.k8s.io:certificatesigningrequests:nodeclient\nclusterrole/system:certificates.k8s.io:certificatesigningrequests:selfnodeclient\nclusterrole/system:certificates.k8s.io:kube-apiserver-client-approver\nclusterrolebinding/ceph-csi-rbd-nodeplugin\nclusterrole/system:certificates.k8s.io:kube-apiserver-client-kubelet-approver\nclusterrolebinding/ceph-csi-rbd-provisioner\nclusterrolebinding/cilium\nclusterrolebinding/cilium-operator\nclusterrolebinding/cluster-admin\nclusterrolebinding/kubeadm:get-nodes\nclusterrolebinding/kubeadm:kubelet-bootstrap\nclusterrolebinding/kubeadm:node-autoapprove-bootstrap\nclusterrolebinding/kubeadm:node-autoapprove-certificate-rotation\nclusterrolebinding/kubeadm:node-proxier\nclusterrole/system:certificates.k8s.io:kubelet-serving-approver\nclusterrolebinding/system:basic-user\nclusterrolebinding/system:controller:attachdetach-controller\nclusterrolebinding/system:controller:certificate-controller\nclusterrolebinding/system:controller:clusterrole-aggregation-controller\nclusterrolebinding/system:controller:cronjob-controller\nclusterrolebinding/system:controller:daemon-set-controller\nclusterrolebinding/system:controller:deployment-controller\nclusterrolebinding/system:controller:disruption-controller\nclusterrolebinding/system:controller:endpoint-controller\nclusterrolebinding/system:controller:endpointslice-controller\nclusterrole/system:certificates.k8s.io:legacy-unknown-approver\nclusterrolebinding/system:controller:endpointslicemirroring-controller\nclusterrole/system:controller:attachdetach-controller\nclusterrolebinding/system:controller:ephemeral-volume-controller\nclusterrole/system:controller:certificate-controller\nclusterrolebinding/system:controller:expand-controller\nclusterrole/system:controller:clusterrole-aggregation-controller\nclusterrolebinding/system:controller:generic-garbage-collector\nclusterrolebinding/system:controller:horizontal-pod-autoscaler\nclusterrolebinding/system:controller:job-controller\nclusterrole/system:controller:cronjob-controller\nclusterrole/system:controller:daemon-set-controller\nclusterrolebinding/system:controller:namespace-controller\nclusterrole/system:controller:deployment-controller\nclusterrolebinding/system:controller:node-controller\nclusterrole/system:controller:disruption-controller\nclusterrole/system:controller:endpoint-controller\nclusterrole/system:controller:endpointslice-controller\nclusterrole/system:controller:endpointslicemirroring-controller\nclusterrole/system:controller:ephemeral-volume-controller\nclusterrole/system:controller:expand-controller\nclusterrole/system:controller:generic-garbage-collector\nclusterrole/system:controller:horizontal-pod-autoscaler\nclusterrolebinding/system:controller:persistent-volume-binder\nclusterrole/system:controller:job-controller\nclusterrolebinding/system:controller:pod-garbage-collector\nclusterrolebinding/system:controller:pv-protection-controller\nclusterrolebinding/system:controller:pvc-protection-controller\nclusterrole/system:controller:namespace-controller\nclusterrolebinding/system:controller:replicaset-controller\nclusterrole/system:controller:node-controller\nclusterrolebinding/system:controller:replication-controller\nclusterrole/system:controller:persistent-volume-binder\nclusterrolebinding/system:controller:resourcequota-controller\nclusterrole/system:controller:pod-garbage-collector\nclusterrolebinding/system:controller:root-ca-cert-publisher\nclusterrole/system:controller:pv-protection-controller\nclusterrolebinding/system:controller:route-controller\nclusterrole/system:controller:pvc-protection-controller\nclusterrole/system:controller:replicaset-controller\nclusterrole/system:controller:replication-controller\nclusterrolebinding/system:controller:service-account-controller\nclusterrolebinding/system:controller:service-controller\nclusterrole/system:controller:resourcequota-controller\nclusterrolebinding/system:controller:statefulset-controller\nclusterrole/system:controller:root-ca-cert-publisher\nclusterrolebinding/system:controller:ttl-after-finished-controller\nclusterrole/system:controller:route-controller\nclusterrolebinding/system:controller:ttl-controller\nclusterrole/system:controller:service-account-controller\nclusterrolebinding/system:coredns\nclusterrole/system:controller:service-controller\nclusterrolebinding/system:discovery\nclusterrole/system:controller:statefulset-controller\nclusterrolebinding/system:kube-controller-manager\nclusterrole/system:controller:ttl-after-finished-controller\nclusterrolebinding/system:kube-dns\nclusterrole/system:controller:ttl-controller\nclusterrolebinding/system:kube-scheduler\nclusterrole/system:coredns\nclusterrole/system:discovery\nclusterrole/system:heapster\nclusterrolebinding/system:monitoring\nclusterrolebinding/system:node\nclusterrole/system:kube-aggregator\nclusterrolebinding/system:node-proxier\nclusterrolebinding/system:public-info-viewer\nclusterrolebinding/system:service-account-issuer-discovery\nclusterrole/system:kube-controller-manager\nclusterrole/system:kube-dns\nclusterrolebinding/system:volume-scheduler\nclusterrole/system:kube-scheduler\nclusterrole/system:kubelet-api-admin\nclusterrole/system:monitoring\nclusterrole/system:node\nclusterrole/system:node-bootstrapper\nclusterrole/system:node-problem-detector\nclusterrole/system:node-proxier\nclusterrole/system:persistent-volume-provisioner\nclusterrole/system:public-info-viewer\nclusterrole/system:service-account-issuer-discovery\nclusterrole/system:volume-scheduler\nclusterrole/view\nstorageclass/general\nnamespace/default\nnamespace/kube-node-lease\nnamespace/kube-public\nnamespace/kube-system",
                            "stdout_lines": [
                                "xargs: warning: options --max-args and --replace/-I/-i are mutually exclusive, ignoring previous --max-args value",
                                "xargs: warning: options --max-args and --replace/-I/-i are mutually exclusive, ignoring previous --max-args value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "node/instance",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "clusterrole/admin",
                                "clusterrole/ceph-csi-rbd-nodeplugin",
                                "clusterrole/ceph-csi-rbd-provisioner",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "clusterrole/cilium",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "clusterrole/cilium-operator",
                                "clusterrole/cluster-admin",
                                "clusterrole/edit",
                                "clusterrole/kubeadm:get-nodes",
                                "clusterrole/system:aggregate-to-admin",
                                "clusterrole/system:aggregate-to-edit",
                                "clusterrole/system:aggregate-to-view",
                                "clusterrole/system:auth-delegator",
                                "clusterrole/system:basic-user",
                                "clusterrole/system:certificates.k8s.io:certificatesigningrequests:nodeclient",
                                "clusterrole/system:certificates.k8s.io:certificatesigningrequests:selfnodeclient",
                                "clusterrole/system:certificates.k8s.io:kube-apiserver-client-approver",
                                "clusterrolebinding/ceph-csi-rbd-nodeplugin",
                                "clusterrole/system:certificates.k8s.io:kube-apiserver-client-kubelet-approver",
                                "clusterrolebinding/ceph-csi-rbd-provisioner",
                                "clusterrolebinding/cilium",
                                "clusterrolebinding/cilium-operator",
                                "clusterrolebinding/cluster-admin",
                                "clusterrolebinding/kubeadm:get-nodes",
                                "clusterrolebinding/kubeadm:kubelet-bootstrap",
                                "clusterrolebinding/kubeadm:node-autoapprove-bootstrap",
                                "clusterrolebinding/kubeadm:node-autoapprove-certificate-rotation",
                                "clusterrolebinding/kubeadm:node-proxier",
                                "clusterrole/system:certificates.k8s.io:kubelet-serving-approver",
                                "clusterrolebinding/system:basic-user",
                                "clusterrolebinding/system:controller:attachdetach-controller",
                                "clusterrolebinding/system:controller:certificate-controller",
                                "clusterrolebinding/system:controller:clusterrole-aggregation-controller",
                                "clusterrolebinding/system:controller:cronjob-controller",
                                "clusterrolebinding/system:controller:daemon-set-controller",
                                "clusterrolebinding/system:controller:deployment-controller",
                                "clusterrolebinding/system:controller:disruption-controller",
                                "clusterrolebinding/system:controller:endpoint-controller",
                                "clusterrolebinding/system:controller:endpointslice-controller",
                                "clusterrole/system:certificates.k8s.io:legacy-unknown-approver",
                                "clusterrolebinding/system:controller:endpointslicemirroring-controller",
                                "clusterrole/system:controller:attachdetach-controller",
                                "clusterrolebinding/system:controller:ephemeral-volume-controller",
                                "clusterrole/system:controller:certificate-controller",
                                "clusterrolebinding/system:controller:expand-controller",
                                "clusterrole/system:controller:clusterrole-aggregation-controller",
                                "clusterrolebinding/system:controller:generic-garbage-collector",
                                "clusterrolebinding/system:controller:horizontal-pod-autoscaler",
                                "clusterrolebinding/system:controller:job-controller",
                                "clusterrole/system:controller:cronjob-controller",
                                "clusterrole/system:controller:daemon-set-controller",
                                "clusterrolebinding/system:controller:namespace-controller",
                                "clusterrole/system:controller:deployment-controller",
                                "clusterrolebinding/system:controller:node-controller",
                                "clusterrole/system:controller:disruption-controller",
                                "clusterrole/system:controller:endpoint-controller",
                                "clusterrole/system:controller:endpointslice-controller",
                                "clusterrole/system:controller:endpointslicemirroring-controller",
                                "clusterrole/system:controller:ephemeral-volume-controller",
                                "clusterrole/system:controller:expand-controller",
                                "clusterrole/system:controller:generic-garbage-collector",
                                "clusterrole/system:controller:horizontal-pod-autoscaler",
                                "clusterrolebinding/system:controller:persistent-volume-binder",
                                "clusterrole/system:controller:job-controller",
                                "clusterrolebinding/system:controller:pod-garbage-collector",
                                "clusterrolebinding/system:controller:pv-protection-controller",
                                "clusterrolebinding/system:controller:pvc-protection-controller",
                                "clusterrole/system:controller:namespace-controller",
                                "clusterrolebinding/system:controller:replicaset-controller",
                                "clusterrole/system:controller:node-controller",
                                "clusterrolebinding/system:controller:replication-controller",
                                "clusterrole/system:controller:persistent-volume-binder",
                                "clusterrolebinding/system:controller:resourcequota-controller",
                                "clusterrole/system:controller:pod-garbage-collector",
                                "clusterrolebinding/system:controller:root-ca-cert-publisher",
                                "clusterrole/system:controller:pv-protection-controller",
                                "clusterrolebinding/system:controller:route-controller",
                                "clusterrole/system:controller:pvc-protection-controller",
                                "clusterrole/system:controller:replicaset-controller",
                                "clusterrole/system:controller:replication-controller",
                                "clusterrolebinding/system:controller:service-account-controller",
                                "clusterrolebinding/system:controller:service-controller",
                                "clusterrole/system:controller:resourcequota-controller",
                                "clusterrolebinding/system:controller:statefulset-controller",
                                "clusterrole/system:controller:root-ca-cert-publisher",
                                "clusterrolebinding/system:controller:ttl-after-finished-controller",
                                "clusterrole/system:controller:route-controller",
                                "clusterrolebinding/system:controller:ttl-controller",
                                "clusterrole/system:controller:service-account-controller",
                                "clusterrolebinding/system:coredns",
                                "clusterrole/system:controller:service-controller",
                                "clusterrolebinding/system:discovery",
                                "clusterrole/system:controller:statefulset-controller",
                                "clusterrolebinding/system:kube-controller-manager",
                                "clusterrole/system:controller:ttl-after-finished-controller",
                                "clusterrolebinding/system:kube-dns",
                                "clusterrole/system:controller:ttl-controller",
                                "clusterrolebinding/system:kube-scheduler",
                                "clusterrole/system:coredns",
                                "clusterrole/system:discovery",
                                "clusterrole/system:heapster",
                                "clusterrolebinding/system:monitoring",
                                "clusterrolebinding/system:node",
                                "clusterrole/system:kube-aggregator",
                                "clusterrolebinding/system:node-proxier",
                                "clusterrolebinding/system:public-info-viewer",
                                "clusterrolebinding/system:service-account-issuer-discovery",
                                "clusterrole/system:kube-controller-manager",
                                "clusterrole/system:kube-dns",
                                "clusterrolebinding/system:volume-scheduler",
                                "clusterrole/system:kube-scheduler",
                                "clusterrole/system:kubelet-api-admin",
                                "clusterrole/system:monitoring",
                                "clusterrole/system:node",
                                "clusterrole/system:node-bootstrapper",
                                "clusterrole/system:node-problem-detector",
                                "clusterrole/system:node-proxier",
                                "clusterrole/system:persistent-volume-provisioner",
                                "clusterrole/system:public-info-viewer",
                                "clusterrole/system:service-account-issuer-discovery",
                                "clusterrole/system:volume-scheduler",
                                "clusterrole/view",
                                "storageclass/general",
                                "namespace/default",
                                "namespace/kube-node-lease",
                                "namespace/kube-public",
                                "namespace/kube-system"
                            ],
                            "zuul_log_id": "0242ac17-0010-0a3c-9efc-00000000001a-1-instance"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-0a3c-9efc-000000000017",
                        "name": "describe-kubernetes-objects",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/describe-kubernetes-objects"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:07:04.922619Z",
                            "start": "2026-02-16T23:06:52.390110Z"
                        },
                        "id": "0242ac17-0010-0a3c-9efc-00000000001a",
                        "name": "Gathering descriptions for cluster scoped objects"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "file",
                            "changed": true,
                            "diff": {
                                "after": {
                                    "path": "/tmp/logs/objects/namespaced",
                                    "state": "directory"
                                },
                                "before": {
                                    "path": "/tmp/logs/objects/namespaced",
                                    "state": "absent"
                                }
                            },
                            "gid": 0,
                            "group": "root",
                            "invocation": {
                                "module_args": {
                                    "_diff_peek": null,
                                    "_original_basename": null,
                                    "access_time": null,
                                    "access_time_format": "%Y%m%d%H%M.%S",
                                    "attributes": null,
                                    "follow": true,
                                    "force": false,
                                    "group": null,
                                    "mode": null,
                                    "modification_time": null,
                                    "modification_time_format": "%Y%m%d%H%M.%S",
                                    "owner": null,
                                    "path": "/tmp/logs/objects/namespaced",
                                    "recurse": false,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": null,
                                    "state": "directory",
                                    "unsafe_writes": false
                                }
                            },
                            "mode": "0755",
                            "owner": "root",
                            "path": "/tmp/logs/objects/namespaced",
                            "size": 4096,
                            "state": "directory",
                            "uid": 0
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-0a3c-9efc-000000000017",
                        "name": "describe-kubernetes-objects",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/describe-kubernetes-objects"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:07:05.126880Z",
                            "start": "2026-02-16T23:07:04.929944Z"
                        },
                        "id": "0242ac17-0010-0a3c-9efc-00000000001b",
                        "name": "creating directory for namespace scoped objects"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "shell",
                            "changed": true,
                            "cmd": "set -e\nexport OBJECT_TYPE=configmaps,cronjobs,daemonsets,deployment,endpoints,ingresses,jobs,networkpolicies,pods,podsecuritypolicies,persistentvolumeclaims,rolebindings,roles,secrets,serviceaccounts,services,statefulsets\nexport PARALLELISM_FACTOR=2\nfunction get_namespaces () {\n  kubectl get namespaces -o name | awk -F '/' '{ print $NF }'\n}\n\nfunction list_namespaced_objects () {\n  export NAMESPACE=$1\n  printf ${OBJECT_TYPE} | xargs -d ',' -I {} -P1 -n1 bash -c 'echo \"${NAMESPACE} $@\"' _ {}\n}\nexport -f list_namespaced_objects\n\nfunction name_objects () {\n  input=($1)\n  export NAMESPACE=${input[0]}\n  export OBJECT=${input[1]}\n  kubectl get -n ${NAMESPACE} ${OBJECT} -o name | xargs -L1 -I {} -P1 -n1 bash -c 'echo \"${NAMESPACE} ${OBJECT} $@\"' _ {}\n}\nexport -f name_objects\n\nfunction get_objects () {\n  input=($1)\n  export NAMESPACE=${input[0]}\n  export OBJECT=${input[1]}\n  export NAME=${input[2]#*/}\n  echo \"${NAMESPACE}/${OBJECT}/${NAME}\"\n  DIR=\"/tmp/logs/objects/namespaced/${NAMESPACE}/${OBJECT}\"\n  mkdir -p ${DIR}\n  kubectl get -n ${NAMESPACE} ${OBJECT} ${NAME} -o yaml > \"${DIR}/${NAME}.yaml\"\n  kubectl describe -n ${NAMESPACE} ${OBJECT} ${NAME} > \"${DIR}/${NAME}.txt\"\n}\nexport -f get_objects\n\nget_namespaces |  xargs -r -n 1 -P ${PARALLELISM_FACTOR} -I {} bash -c 'list_namespaced_objects \"$@\"' _ {} |  xargs -r -n 1 -P ${PARALLELISM_FACTOR} -I {} bash -c 'name_objects \"$@\"' _ {} |  xargs -r -n 1 -P ${PARALLELISM_FACTOR} -I {} bash -c 'get_objects \"$@\"' _ {}",
                            "delta": "0:00:15.649081",
                            "end": "2026-02-16 23:07:21.032771",
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "set -e\nexport OBJECT_TYPE=configmaps,cronjobs,daemonsets,deployment,endpoints,ingresses,jobs,networkpolicies,pods,podsecuritypolicies,persistentvolumeclaims,rolebindings,roles,secrets,serviceaccounts,services,statefulsets\nexport PARALLELISM_FACTOR=2\nfunction get_namespaces () {\n  kubectl get namespaces -o name | awk -F '/' '{ print $NF }'\n}\n\nfunction list_namespaced_objects () {\n  export NAMESPACE=$1\n  printf ${OBJECT_TYPE} | xargs -d ',' -I {} -P1 -n1 bash -c 'echo \"${NAMESPACE} $@\"' _ {}\n}\nexport -f list_namespaced_objects\n\nfunction name_objects () {\n  input=($1)\n  export NAMESPACE=${input[0]}\n  export OBJECT=${input[1]}\n  kubectl get -n ${NAMESPACE} ${OBJECT} -o name | xargs -L1 -I {} -P1 -n1 bash -c 'echo \"${NAMESPACE} ${OBJECT} $@\"' _ {}\n}\nexport -f name_objects\n\nfunction get_objects () {\n  input=($1)\n  export NAMESPACE=${input[0]}\n  export OBJECT=${input[1]}\n  export NAME=${input[2]#*/}\n  echo \"${NAMESPACE}/${OBJECT}/${NAME}\"\n  DIR=\"/tmp/logs/objects/namespaced/${NAMESPACE}/${OBJECT}\"\n  mkdir -p ${DIR}\n  kubectl get -n ${NAMESPACE} ${OBJECT} ${NAME} -o yaml > \"${DIR}/${NAME}.yaml\"\n  kubectl describe -n ${NAMESPACE} ${OBJECT} ${NAME} > \"${DIR}/${NAME}.txt\"\n}\nexport -f get_objects\n\nget_namespaces |  xargs -r -n 1 -P ${PARALLELISM_FACTOR} -I {} bash -c 'list_namespaced_objects \"$@\"' _ {} |  xargs -r -n 1 -P ${PARALLELISM_FACTOR} -I {} bash -c 'name_objects \"$@\"' _ {} |  xargs -r -n 1 -P ${PARALLELISM_FACTOR} -I {} bash -c 'get_objects \"$@\"' _ {}",
                                    "_uses_shell": true,
                                    "argv": null,
                                    "chdir": null,
                                    "creates": null,
                                    "executable": "/bin/bash",
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0010-0a3c-9efc-00000000001c-1-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "",
                            "rc": 0,
                            "start": "2026-02-16 23:07:05.383690",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "xargs: warning: options --max-args and --replace/-I/-i are mutually exclusive, ignoring previous --max-args value\nxargs: warning: options --max-args and --replace/-I/-i are mutually exclusive, ignoring previous --max-args value\nxargs: warning: options --max-args and --replace/-I/-i are mutually exclusive, ignoring previous --max-args value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ndefault/configmaps/kube-root-ca.crt\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-node-lease/configmaps/kube-root-ca.crt\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ndefault/endpoints/kubernetes\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nerror: the server doesn't have a resource type \"podsecuritypolicies\"\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nerror: the server doesn't have a resource type \"podsecuritypolicies\"\ndefault/persistentvolumeclaims/test-pvc\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-node-lease/serviceaccounts/default\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-public/configmaps/cluster-info\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-public/configmaps/kube-root-ca.crt\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ndefault/serviceaccounts/default\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ndefault/services/kubernetes\nerror: the server doesn't have a resource type \"podsecuritypolicies\"\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-public/rolebindings/kubeadm:bootstrap-signer-clusterinfo\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-public/rolebindings/system:controller:bootstrap-signer\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-public/roles/kubeadm:bootstrap-signer-clusterinfo\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-public/roles/system:controller:bootstrap-signer\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-public/serviceaccounts/default\nkube-system/configmaps/ceph-config\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-system/configmaps/ceph-csi-config\nkube-system/configmaps/ceph-csi-encryption-kms-config\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-system/configmaps/cilium-config\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-system/configmaps/coredns\nerror: the server doesn't have a resource type \"podsecuritypolicies\"\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-system/configmaps/extension-apiserver-authentication\nkube-system/configmaps/kube-apiserver-legacy-service-account-token-tracking\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-system/configmaps/kube-proxy\nkube-system/configmaps/kube-root-ca.crt\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-system/configmaps/kubeadm-config\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-system/configmaps/kubelet-config\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-system/daemonsets/ceph-csi-rbd-nodeplugin\nkube-system/daemonsets/cilium\nkube-system/daemonsets/kube-proxy\nkube-system/deployment/ceph-csi-rbd-provisioner\nkube-system/deployment/cilium-operator\nkube-system/deployment/coredns\nkube-system/endpoints/ceph-csi-rbd-nodeplugin-http-metrics\nkube-system/endpoints/ceph-csi-rbd-provisioner-http-metrics\nkube-system/endpoints/kube-dns\nkube-system/pods/ceph-csi-rbd-nodeplugin-knshx\nkube-system/pods/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz\nkube-system/pods/cilium-cpp9j\nkube-system/pods/cilium-operator-7fcd8d6ffd-bb5zb\nkube-system/pods/coredns-77cccfdc44-lvlzr\nkube-system/pods/coredns-77cccfdc44-vxkbg\nkube-system/pods/etcd-instance\nkube-system/pods/kube-apiserver-instance\nkube-system/pods/kube-controller-manager-instance\nkube-system/pods/kube-proxy-k2ngf\nkube-system/pods/kube-scheduler-instance\nkube-system/pods/kube-vip-instance\nkube-system/roles/ceph-csi-rbd-provisioner\nkube-system/roles/cilium-config-agent\nkube-system/roles/extension-apiserver-authentication-reader\nkube-system/roles/kube-proxy\nkube-system/roles/kubeadm:kubeadm-certs\nkube-system/roles/kubeadm:kubelet-config\nkube-system/rolebindings/ceph-csi-rbd-provisioner\nkube-system/rolebindings/cilium-config-agent\nkube-system/rolebindings/kube-proxy\nkube-system/rolebindings/kubeadm:kubeadm-certs\nkube-system/rolebindings/kubeadm:kubelet-config\nkube-system/rolebindings/kubeadm:nodes-kubeadm-config\nkube-system/rolebindings/system::extension-apiserver-authentication-reader\nkube-system/roles/kubeadm:nodes-kubeadm-config\nkube-system/rolebindings/system::leader-locking-kube-controller-manager\nkube-system/roles/system::leader-locking-kube-controller-manager\nkube-system/rolebindings/system::leader-locking-kube-scheduler\nkube-system/roles/system::leader-locking-kube-scheduler\nkube-system/rolebindings/system:controller:bootstrap-signer\nkube-system/roles/system:controller:bootstrap-signer\nkube-system/rolebindings/system:controller:cloud-provider\nkube-system/roles/system:controller:cloud-provider\nkube-system/rolebindings/system:controller:token-cleaner\nkube-system/roles/system:controller:token-cleaner\nkube-system/serviceaccounts/attachdetach-controller\nkube-system/serviceaccounts/bootstrap-signer\nkube-system/serviceaccounts/ceph-csi-rbd-nodeplugin\nkube-system/serviceaccounts/ceph-csi-rbd-provisioner\nkube-system/serviceaccounts/certificate-controller\nkube-system/serviceaccounts/cilium\nkube-system/serviceaccounts/cilium-operator\nkube-system/serviceaccounts/clusterrole-aggregation-controller\nkube-system/serviceaccounts/coredns\nkube-system/serviceaccounts/cronjob-controller\nkube-system/serviceaccounts/daemon-set-controller\nkube-system/secrets/bootstrap-token-b39e70\nkube-system/secrets/bootstrap-token-k7l80b\nkube-system/serviceaccounts/default\nkube-system/secrets/csi-rbd-secret\nkube-system/secrets/kubeadm-certs\nkube-system/secrets/sh.helm.release.v1.ceph-csi-rbd.v1\nkube-system/secrets/sh.helm.release.v1.cilium.v1\nkube-system/serviceaccounts/deployment-controller\nkube-system/serviceaccounts/disruption-controller\nkube-system/serviceaccounts/endpoint-controller\nkube-system/serviceaccounts/endpointslice-controller\nkube-system/serviceaccounts/endpointslicemirroring-controller\nkube-system/serviceaccounts/ephemeral-volume-controller\nkube-system/serviceaccounts/expand-controller\nkube-system/serviceaccounts/generic-garbage-collector\nkube-system/serviceaccounts/horizontal-pod-autoscaler\nkube-system/serviceaccounts/job-controller\nkube-system/serviceaccounts/kube-proxy\nkube-system/serviceaccounts/namespace-controller\nkube-system/serviceaccounts/node-controller\nkube-system/serviceaccounts/persistent-volume-binder\nkube-system/serviceaccounts/pod-garbage-collector\nkube-system/serviceaccounts/pv-protection-controller\nkube-system/services/ceph-csi-rbd-nodeplugin-http-metrics\nkube-system/serviceaccounts/pvc-protection-controller\nkube-system/serviceaccounts/replicaset-controller\nkube-system/services/ceph-csi-rbd-provisioner-http-metrics\nkube-system/serviceaccounts/replication-controller\nkube-system/services/kube-dns\nkube-system/serviceaccounts/resourcequota-controller\nkube-system/serviceaccounts/root-ca-cert-publisher\nkube-system/serviceaccounts/service-account-controller\nkube-system/serviceaccounts/service-controller\nkube-system/serviceaccounts/statefulset-controller\nkube-system/serviceaccounts/token-cleaner\nkube-system/serviceaccounts/ttl-after-finished-controller\nkube-system/serviceaccounts/ttl-controller",
                            "stdout_lines": [
                                "xargs: warning: options --max-args and --replace/-I/-i are mutually exclusive, ignoring previous --max-args value",
                                "xargs: warning: options --max-args and --replace/-I/-i are mutually exclusive, ignoring previous --max-args value",
                                "xargs: warning: options --max-args and --replace/-I/-i are mutually exclusive, ignoring previous --max-args value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "default/configmaps/kube-root-ca.crt",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-node-lease/configmaps/kube-root-ca.crt",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "default/endpoints/kubernetes",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "error: the server doesn't have a resource type \"podsecuritypolicies\"",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "error: the server doesn't have a resource type \"podsecuritypolicies\"",
                                "default/persistentvolumeclaims/test-pvc",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-node-lease/serviceaccounts/default",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-public/configmaps/cluster-info",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-public/configmaps/kube-root-ca.crt",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "default/serviceaccounts/default",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "default/services/kubernetes",
                                "error: the server doesn't have a resource type \"podsecuritypolicies\"",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-public/rolebindings/kubeadm:bootstrap-signer-clusterinfo",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-public/rolebindings/system:controller:bootstrap-signer",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-public/roles/kubeadm:bootstrap-signer-clusterinfo",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-public/roles/system:controller:bootstrap-signer",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-public/serviceaccounts/default",
                                "kube-system/configmaps/ceph-config",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-system/configmaps/ceph-csi-config",
                                "kube-system/configmaps/ceph-csi-encryption-kms-config",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-system/configmaps/cilium-config",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-system/configmaps/coredns",
                                "error: the server doesn't have a resource type \"podsecuritypolicies\"",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-system/configmaps/extension-apiserver-authentication",
                                "kube-system/configmaps/kube-apiserver-legacy-service-account-token-tracking",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-system/configmaps/kube-proxy",
                                "kube-system/configmaps/kube-root-ca.crt",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-system/configmaps/kubeadm-config",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-system/configmaps/kubelet-config",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-system/daemonsets/ceph-csi-rbd-nodeplugin",
                                "kube-system/daemonsets/cilium",
                                "kube-system/daemonsets/kube-proxy",
                                "kube-system/deployment/ceph-csi-rbd-provisioner",
                                "kube-system/deployment/cilium-operator",
                                "kube-system/deployment/coredns",
                                "kube-system/endpoints/ceph-csi-rbd-nodeplugin-http-metrics",
                                "kube-system/endpoints/ceph-csi-rbd-provisioner-http-metrics",
                                "kube-system/endpoints/kube-dns",
                                "kube-system/pods/ceph-csi-rbd-nodeplugin-knshx",
                                "kube-system/pods/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz",
                                "kube-system/pods/cilium-cpp9j",
                                "kube-system/pods/cilium-operator-7fcd8d6ffd-bb5zb",
                                "kube-system/pods/coredns-77cccfdc44-lvlzr",
                                "kube-system/pods/coredns-77cccfdc44-vxkbg",
                                "kube-system/pods/etcd-instance",
                                "kube-system/pods/kube-apiserver-instance",
                                "kube-system/pods/kube-controller-manager-instance",
                                "kube-system/pods/kube-proxy-k2ngf",
                                "kube-system/pods/kube-scheduler-instance",
                                "kube-system/pods/kube-vip-instance",
                                "kube-system/roles/ceph-csi-rbd-provisioner",
                                "kube-system/roles/cilium-config-agent",
                                "kube-system/roles/extension-apiserver-authentication-reader",
                                "kube-system/roles/kube-proxy",
                                "kube-system/roles/kubeadm:kubeadm-certs",
                                "kube-system/roles/kubeadm:kubelet-config",
                                "kube-system/rolebindings/ceph-csi-rbd-provisioner",
                                "kube-system/rolebindings/cilium-config-agent",
                                "kube-system/rolebindings/kube-proxy",
                                "kube-system/rolebindings/kubeadm:kubeadm-certs",
                                "kube-system/rolebindings/kubeadm:kubelet-config",
                                "kube-system/rolebindings/kubeadm:nodes-kubeadm-config",
                                "kube-system/rolebindings/system::extension-apiserver-authentication-reader",
                                "kube-system/roles/kubeadm:nodes-kubeadm-config",
                                "kube-system/rolebindings/system::leader-locking-kube-controller-manager",
                                "kube-system/roles/system::leader-locking-kube-controller-manager",
                                "kube-system/rolebindings/system::leader-locking-kube-scheduler",
                                "kube-system/roles/system::leader-locking-kube-scheduler",
                                "kube-system/rolebindings/system:controller:bootstrap-signer",
                                "kube-system/roles/system:controller:bootstrap-signer",
                                "kube-system/rolebindings/system:controller:cloud-provider",
                                "kube-system/roles/system:controller:cloud-provider",
                                "kube-system/rolebindings/system:controller:token-cleaner",
                                "kube-system/roles/system:controller:token-cleaner",
                                "kube-system/serviceaccounts/attachdetach-controller",
                                "kube-system/serviceaccounts/bootstrap-signer",
                                "kube-system/serviceaccounts/ceph-csi-rbd-nodeplugin",
                                "kube-system/serviceaccounts/ceph-csi-rbd-provisioner",
                                "kube-system/serviceaccounts/certificate-controller",
                                "kube-system/serviceaccounts/cilium",
                                "kube-system/serviceaccounts/cilium-operator",
                                "kube-system/serviceaccounts/clusterrole-aggregation-controller",
                                "kube-system/serviceaccounts/coredns",
                                "kube-system/serviceaccounts/cronjob-controller",
                                "kube-system/serviceaccounts/daemon-set-controller",
                                "kube-system/secrets/bootstrap-token-b39e70",
                                "kube-system/secrets/bootstrap-token-k7l80b",
                                "kube-system/serviceaccounts/default",
                                "kube-system/secrets/csi-rbd-secret",
                                "kube-system/secrets/kubeadm-certs",
                                "kube-system/secrets/sh.helm.release.v1.ceph-csi-rbd.v1",
                                "kube-system/secrets/sh.helm.release.v1.cilium.v1",
                                "kube-system/serviceaccounts/deployment-controller",
                                "kube-system/serviceaccounts/disruption-controller",
                                "kube-system/serviceaccounts/endpoint-controller",
                                "kube-system/serviceaccounts/endpointslice-controller",
                                "kube-system/serviceaccounts/endpointslicemirroring-controller",
                                "kube-system/serviceaccounts/ephemeral-volume-controller",
                                "kube-system/serviceaccounts/expand-controller",
                                "kube-system/serviceaccounts/generic-garbage-collector",
                                "kube-system/serviceaccounts/horizontal-pod-autoscaler",
                                "kube-system/serviceaccounts/job-controller",
                                "kube-system/serviceaccounts/kube-proxy",
                                "kube-system/serviceaccounts/namespace-controller",
                                "kube-system/serviceaccounts/node-controller",
                                "kube-system/serviceaccounts/persistent-volume-binder",
                                "kube-system/serviceaccounts/pod-garbage-collector",
                                "kube-system/serviceaccounts/pv-protection-controller",
                                "kube-system/services/ceph-csi-rbd-nodeplugin-http-metrics",
                                "kube-system/serviceaccounts/pvc-protection-controller",
                                "kube-system/serviceaccounts/replicaset-controller",
                                "kube-system/services/ceph-csi-rbd-provisioner-http-metrics",
                                "kube-system/serviceaccounts/replication-controller",
                                "kube-system/services/kube-dns",
                                "kube-system/serviceaccounts/resourcequota-controller",
                                "kube-system/serviceaccounts/root-ca-cert-publisher",
                                "kube-system/serviceaccounts/service-account-controller",
                                "kube-system/serviceaccounts/service-controller",
                                "kube-system/serviceaccounts/statefulset-controller",
                                "kube-system/serviceaccounts/token-cleaner",
                                "kube-system/serviceaccounts/ttl-after-finished-controller",
                                "kube-system/serviceaccounts/ttl-controller"
                            ],
                            "zuul_log_id": "0242ac17-0010-0a3c-9efc-00000000001c-1-instance"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-0a3c-9efc-000000000017",
                        "name": "describe-kubernetes-objects",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/describe-kubernetes-objects"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:07:21.225347Z",
                            "start": "2026-02-16T23:07:05.186925Z"
                        },
                        "id": "0242ac17-0010-0a3c-9efc-00000000001c",
                        "name": "Gathering descriptions for namespace scoped objects"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "synchronize",
                            "changed": true,
                            "cmd": "/usr/bin/rsync --delay-updates -F --compress --archive --rsh='/usr/bin/ssh -S none -o Port=22 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null' --rsync-path='sudo -u root rsync' --out-format='<<CHANGED>>%i %n%L' zuul@199.204.45.216:/tmp/logs/objects /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/instance",
                            "invocation": {
                                "module_args": {
                                    "_local_rsync_password": null,
                                    "_local_rsync_path": "rsync",
                                    "_ssh_args": null,
                                    "_substitute_controller": false,
                                    "archive": true,
                                    "checksum": false,
                                    "compress": true,
                                    "copy_links": false,
                                    "delay_updates": true,
                                    "delete": false,
                                    "dest": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/instance",
                                    "dest_port": 22,
                                    "dirs": false,
                                    "existing_only": false,
                                    "group": null,
                                    "link_dest": null,
                                    "links": null,
                                    "mode": "pull",
                                    "owner": null,
                                    "partial": false,
                                    "perms": null,
                                    "private_key": null,
                                    "recursive": null,
                                    "rsync_opts": [],
                                    "rsync_path": "sudo -u root rsync",
                                    "rsync_timeout": 0,
                                    "set_remote_user": true,
                                    "src": "zuul@199.204.45.216:/tmp/logs/objects",
                                    "ssh_connection_multiplexing": false,
                                    "times": null,
                                    "use_ssh_args": false,
                                    "verify_host": false
                                }
                            },
                            "msg": "cd+++++++++ objects/\ncd+++++++++ objects/cluster/\ncd+++++++++ objects/cluster/clusterrole/\n>f+++++++++ objects/cluster/clusterrole/admin.txt\n>f+++++++++ objects/cluster/clusterrole/admin.yaml\n>f+++++++++ objects/cluster/clusterrole/ceph-csi-rbd-nodeplugin.txt\n>f+++++++++ objects/cluster/clusterrole/ceph-csi-rbd-nodeplugin.yaml\n>f+++++++++ objects/cluster/clusterrole/ceph-csi-rbd-provisioner.txt\n>f+++++++++ objects/cluster/clusterrole/ceph-csi-rbd-provisioner.yaml\n>f+++++++++ objects/cluster/clusterrole/cilium-operator.txt\n>f+++++++++ objects/cluster/clusterrole/cilium-operator.yaml\n>f+++++++++ objects/cluster/clusterrole/cilium.txt\n>f+++++++++ objects/cluster/clusterrole/cilium.yaml\n>f+++++++++ objects/cluster/clusterrole/cluster-admin.txt\n>f+++++++++ objects/cluster/clusterrole/cluster-admin.yaml\n>f+++++++++ objects/cluster/clusterrole/edit.txt\n>f+++++++++ objects/cluster/clusterrole/edit.yaml\n>f+++++++++ objects/cluster/clusterrole/kubeadm:get-nodes.txt\n>f+++++++++ objects/cluster/clusterrole/kubeadm:get-nodes.yaml\n>f+++++++++ objects/cluster/clusterrole/system:aggregate-to-admin.txt\n>f+++++++++ objects/cluster/clusterrole/system:aggregate-to-admin.yaml\n>f+++++++++ objects/cluster/clusterrole/system:aggregate-to-edit.txt\n>f+++++++++ objects/cluster/clusterrole/system:aggregate-to-edit.yaml\n>f+++++++++ objects/cluster/clusterrole/system:aggregate-to-view.txt\n>f+++++++++ objects/cluster/clusterrole/system:aggregate-to-view.yaml\n>f+++++++++ objects/cluster/clusterrole/system:auth-delegator.txt\n>f+++++++++ objects/cluster/clusterrole/system:auth-delegator.yaml\n>f+++++++++ objects/cluster/clusterrole/system:basic-user.txt\n>f+++++++++ objects/cluster/clusterrole/system:basic-user.yaml\n>f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:certificatesigningrequests:nodeclient.txt\n>f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:certificatesigningrequests:nodeclient.yaml\n>f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:certificatesigningrequests:selfnodeclient.txt\n>f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:certificatesigningrequests:selfnodeclient.yaml\n>f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:kube-apiserver-client-approver.txt\n>f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:kube-apiserver-client-approver.yaml\n>f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:kube-apiserver-client-kubelet-approver.txt\n>f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:kube-apiserver-client-kubelet-approver.yaml\n>f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:kubelet-serving-approver.txt\n>f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:kubelet-serving-approver.yaml\n>f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:legacy-unknown-approver.txt\n>f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:legacy-unknown-approver.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:attachdetach-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:attachdetach-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:certificate-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:certificate-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:clusterrole-aggregation-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:clusterrole-aggregation-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:cronjob-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:cronjob-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:daemon-set-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:daemon-set-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:deployment-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:deployment-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:disruption-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:disruption-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:endpoint-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:endpoint-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:endpointslice-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:endpointslice-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:endpointslicemirroring-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:endpointslicemirroring-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:ephemeral-volume-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:ephemeral-volume-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:expand-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:expand-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:generic-garbage-collector.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:generic-garbage-collector.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:horizontal-pod-autoscaler.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:horizontal-pod-autoscaler.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:job-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:job-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:namespace-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:namespace-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:node-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:node-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:persistent-volume-binder.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:persistent-volume-binder.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:pod-garbage-collector.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:pod-garbage-collector.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:pv-protection-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:pv-protection-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:pvc-protection-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:pvc-protection-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:replicaset-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:replicaset-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:replication-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:replication-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:resourcequota-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:resourcequota-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:root-ca-cert-publisher.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:root-ca-cert-publisher.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:route-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:route-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:service-account-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:service-account-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:service-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:service-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:statefulset-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:statefulset-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:ttl-after-finished-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:ttl-after-finished-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:ttl-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:ttl-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:coredns.txt\n>f+++++++++ objects/cluster/clusterrole/system:coredns.yaml\n>f+++++++++ objects/cluster/clusterrole/system:discovery.txt\n>f+++++++++ objects/cluster/clusterrole/system:discovery.yaml\n>f+++++++++ objects/cluster/clusterrole/system:heapster.txt\n>f+++++++++ objects/cluster/clusterrole/system:heapster.yaml\n>f+++++++++ objects/cluster/clusterrole/system:kube-aggregator.txt\n>f+++++++++ objects/cluster/clusterrole/system:kube-aggregator.yaml\n>f+++++++++ objects/cluster/clusterrole/system:kube-controller-manager.txt\n>f+++++++++ objects/cluster/clusterrole/system:kube-controller-manager.yaml\n>f+++++++++ objects/cluster/clusterrole/system:kube-dns.txt\n>f+++++++++ objects/cluster/clusterrole/system:kube-dns.yaml\n>f+++++++++ objects/cluster/clusterrole/system:kube-scheduler.txt\n>f+++++++++ objects/cluster/clusterrole/system:kube-scheduler.yaml\n>f+++++++++ objects/cluster/clusterrole/system:kubelet-api-admin.txt\n>f+++++++++ objects/cluster/clusterrole/system:kubelet-api-admin.yaml\n>f+++++++++ objects/cluster/clusterrole/system:monitoring.txt\n>f+++++++++ objects/cluster/clusterrole/system:monitoring.yaml\n>f+++++++++ objects/cluster/clusterrole/system:node-bootstrapper.txt\n>f+++++++++ objects/cluster/clusterrole/system:node-bootstrapper.yaml\n>f+++++++++ objects/cluster/clusterrole/system:node-problem-detector.txt\n>f+++++++++ objects/cluster/clusterrole/system:node-problem-detector.yaml\n>f+++++++++ objects/cluster/clusterrole/system:node-proxier.txt\n>f+++++++++ objects/cluster/clusterrole/system:node-proxier.yaml\n>f+++++++++ objects/cluster/clusterrole/system:node.txt\n>f+++++++++ objects/cluster/clusterrole/system:node.yaml\n>f+++++++++ objects/cluster/clusterrole/system:persistent-volume-provisioner.txt\n>f+++++++++ objects/cluster/clusterrole/system:persistent-volume-provisioner.yaml\n>f+++++++++ objects/cluster/clusterrole/system:public-info-viewer.txt\n>f+++++++++ objects/cluster/clusterrole/system:public-info-viewer.yaml\n>f+++++++++ objects/cluster/clusterrole/system:service-account-issuer-discovery.txt\n>f+++++++++ objects/cluster/clusterrole/system:service-account-issuer-discovery.yaml\n>f+++++++++ objects/cluster/clusterrole/system:volume-scheduler.txt\n>f+++++++++ objects/cluster/clusterrole/system:volume-scheduler.yaml\n>f+++++++++ objects/cluster/clusterrole/view.txt\n>f+++++++++ objects/cluster/clusterrole/view.yaml\ncd+++++++++ objects/cluster/clusterrolebinding/\n>f+++++++++ objects/cluster/clusterrolebinding/ceph-csi-rbd-nodeplugin.txt\n>f+++++++++ objects/cluster/clusterrolebinding/ceph-csi-rbd-nodeplugin.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/ceph-csi-rbd-provisioner.txt\n>f+++++++++ objects/cluster/clusterrolebinding/ceph-csi-rbd-provisioner.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/cilium-operator.txt\n>f+++++++++ objects/cluster/clusterrolebinding/cilium-operator.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/cilium.txt\n>f+++++++++ objects/cluster/clusterrolebinding/cilium.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/cluster-admin.txt\n>f+++++++++ objects/cluster/clusterrolebinding/cluster-admin.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/kubeadm:get-nodes.txt\n>f+++++++++ objects/cluster/clusterrolebinding/kubeadm:get-nodes.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/kubeadm:kubelet-bootstrap.txt\n>f+++++++++ objects/cluster/clusterrolebinding/kubeadm:kubelet-bootstrap.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/kubeadm:node-autoapprove-bootstrap.txt\n>f+++++++++ objects/cluster/clusterrolebinding/kubeadm:node-autoapprove-bootstrap.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/kubeadm:node-autoapprove-certificate-rotation.txt\n>f+++++++++ objects/cluster/clusterrolebinding/kubeadm:node-autoapprove-certificate-rotation.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/kubeadm:node-proxier.txt\n>f+++++++++ objects/cluster/clusterrolebinding/kubeadm:node-proxier.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:basic-user.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:basic-user.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:attachdetach-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:attachdetach-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:certificate-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:certificate-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:clusterrole-aggregation-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:clusterrole-aggregation-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:cronjob-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:cronjob-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:daemon-set-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:daemon-set-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:deployment-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:deployment-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:disruption-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:disruption-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:endpoint-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:endpoint-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:endpointslice-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:endpointslice-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:endpointslicemirroring-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:endpointslicemirroring-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:ephemeral-volume-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:ephemeral-volume-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:expand-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:expand-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:generic-garbage-collector.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:generic-garbage-collector.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:horizontal-pod-autoscaler.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:horizontal-pod-autoscaler.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:job-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:job-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:namespace-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:namespace-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:node-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:node-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:persistent-volume-binder.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:persistent-volume-binder.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:pod-garbage-collector.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:pod-garbage-collector.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:pv-protection-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:pv-protection-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:pvc-protection-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:pvc-protection-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:replicaset-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:replicaset-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:replication-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:replication-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:resourcequota-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:resourcequota-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:root-ca-cert-publisher.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:root-ca-cert-publisher.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:route-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:route-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:service-account-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:service-account-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:service-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:service-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:statefulset-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:statefulset-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:ttl-after-finished-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:ttl-after-finished-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:ttl-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:ttl-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:coredns.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:coredns.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:discovery.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:discovery.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:kube-controller-manager.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:kube-controller-manager.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:kube-dns.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:kube-dns.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:kube-scheduler.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:kube-scheduler.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:monitoring.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:monitoring.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:node-proxier.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:node-proxier.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:node.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:node.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:public-info-viewer.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:public-info-viewer.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:service-account-issuer-discovery.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:service-account-issuer-discovery.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:volume-scheduler.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:volume-scheduler.yaml\ncd+++++++++ objects/cluster/namespace/\n>f+++++++++ objects/cluster/namespace/default.txt\n>f+++++++++ objects/cluster/namespace/default.yaml\n>f+++++++++ objects/cluster/namespace/kube-node-lease.txt\n>f+++++++++ objects/cluster/namespace/kube-node-lease.yaml\n>f+++++++++ objects/cluster/namespace/kube-public.txt\n>f+++++++++ objects/cluster/namespace/kube-public.yaml\n>f+++++++++ objects/cluster/namespace/kube-system.txt\n>f+++++++++ objects/cluster/namespace/kube-system.yaml\ncd+++++++++ objects/cluster/node/\n>f+++++++++ objects/cluster/node/instance.txt\n>f+++++++++ objects/cluster/node/instance.yaml\ncd+++++++++ objects/cluster/storageclass/\n>f+++++++++ objects/cluster/storageclass/general.txt\n>f+++++++++ objects/cluster/storageclass/general.yaml\ncd+++++++++ objects/namespaced/\ncd+++++++++ objects/namespaced/default/\ncd+++++++++ objects/namespaced/default/configmaps/\n>f+++++++++ objects/namespaced/default/configmaps/kube-root-ca.crt.txt\n>f+++++++++ objects/namespaced/default/configmaps/kube-root-ca.crt.yaml\ncd+++++++++ objects/namespaced/default/endpoints/\n>f+++++++++ objects/namespaced/default/endpoints/kubernetes.txt\n>f+++++++++ objects/namespaced/default/endpoints/kubernetes.yaml\ncd+++++++++ objects/namespaced/default/persistentvolumeclaims/\n>f+++++++++ objects/namespaced/default/persistentvolumeclaims/test-pvc.txt\n>f+++++++++ objects/namespaced/default/persistentvolumeclaims/test-pvc.yaml\ncd+++++++++ objects/namespaced/default/serviceaccounts/\n>f+++++++++ objects/namespaced/default/serviceaccounts/default.txt\n>f+++++++++ objects/namespaced/default/serviceaccounts/default.yaml\ncd+++++++++ objects/namespaced/default/services/\n>f+++++++++ objects/namespaced/default/services/kubernetes.txt\n>f+++++++++ objects/namespaced/default/services/kubernetes.yaml\ncd+++++++++ objects/namespaced/kube-node-lease/\ncd+++++++++ objects/namespaced/kube-node-lease/configmaps/\n>f+++++++++ objects/namespaced/kube-node-lease/configmaps/kube-root-ca.crt.txt\n>f+++++++++ objects/namespaced/kube-node-lease/configmaps/kube-root-ca.crt.yaml\ncd+++++++++ objects/namespaced/kube-node-lease/serviceaccounts/\n>f+++++++++ objects/namespaced/kube-node-lease/serviceaccounts/default.txt\n>f+++++++++ objects/namespaced/kube-node-lease/serviceaccounts/default.yaml\ncd+++++++++ objects/namespaced/kube-public/\ncd+++++++++ objects/namespaced/kube-public/configmaps/\n>f+++++++++ objects/namespaced/kube-public/configmaps/cluster-info.txt\n>f+++++++++ objects/namespaced/kube-public/configmaps/cluster-info.yaml\n>f+++++++++ objects/namespaced/kube-public/configmaps/kube-root-ca.crt.txt\n>f+++++++++ objects/namespaced/kube-public/configmaps/kube-root-ca.crt.yaml\ncd+++++++++ objects/namespaced/kube-public/rolebindings/\n>f+++++++++ objects/namespaced/kube-public/rolebindings/kubeadm:bootstrap-signer-clusterinfo.txt\n>f+++++++++ objects/namespaced/kube-public/rolebindings/kubeadm:bootstrap-signer-clusterinfo.yaml\n>f+++++++++ objects/namespaced/kube-public/rolebindings/system:controller:bootstrap-signer.txt\n>f+++++++++ objects/namespaced/kube-public/rolebindings/system:controller:bootstrap-signer.yaml\ncd+++++++++ objects/namespaced/kube-public/roles/\n>f+++++++++ objects/namespaced/kube-public/roles/kubeadm:bootstrap-signer-clusterinfo.txt\n>f+++++++++ objects/namespaced/kube-public/roles/kubeadm:bootstrap-signer-clusterinfo.yaml\n>f+++++++++ objects/namespaced/kube-public/roles/system:controller:bootstrap-signer.txt\n>f+++++++++ objects/namespaced/kube-public/roles/system:controller:bootstrap-signer.yaml\ncd+++++++++ objects/namespaced/kube-public/serviceaccounts/\n>f+++++++++ objects/namespaced/kube-public/serviceaccounts/default.txt\n>f+++++++++ objects/namespaced/kube-public/serviceaccounts/default.yaml\ncd+++++++++ objects/namespaced/kube-system/\ncd+++++++++ objects/namespaced/kube-system/configmaps/\n>f+++++++++ objects/namespaced/kube-system/configmaps/ceph-config.txt\n>f+++++++++ objects/namespaced/kube-system/configmaps/ceph-config.yaml\n>f+++++++++ objects/namespaced/kube-system/configmaps/ceph-csi-config.txt\n>f+++++++++ objects/namespaced/kube-system/configmaps/ceph-csi-config.yaml\n>f+++++++++ objects/namespaced/kube-system/configmaps/ceph-csi-encryption-kms-config.txt\n>f+++++++++ objects/namespaced/kube-system/configmaps/ceph-csi-encryption-kms-config.yaml\n>f+++++++++ objects/namespaced/kube-system/configmaps/cilium-config.txt\n>f+++++++++ objects/namespaced/kube-system/configmaps/cilium-config.yaml\n>f+++++++++ objects/namespaced/kube-system/configmaps/coredns.txt\n>f+++++++++ objects/namespaced/kube-system/configmaps/coredns.yaml\n>f+++++++++ objects/namespaced/kube-system/configmaps/extension-apiserver-authentication.txt\n>f+++++++++ objects/namespaced/kube-system/configmaps/extension-apiserver-authentication.yaml\n>f+++++++++ objects/namespaced/kube-system/configmaps/kube-apiserver-legacy-service-account-token-tracking.txt\n>f+++++++++ objects/namespaced/kube-system/configmaps/kube-apiserver-legacy-service-account-token-tracking.yaml\n>f+++++++++ objects/namespaced/kube-system/configmaps/kube-proxy.txt\n>f+++++++++ objects/namespaced/kube-system/configmaps/kube-proxy.yaml\n>f+++++++++ objects/namespaced/kube-system/configmaps/kube-root-ca.crt.txt\n>f+++++++++ objects/namespaced/kube-system/configmaps/kube-root-ca.crt.yaml\n>f+++++++++ objects/namespaced/kube-system/configmaps/kubeadm-config.txt\n>f+++++++++ objects/namespaced/kube-system/configmaps/kubeadm-config.yaml\n>f+++++++++ objects/namespaced/kube-system/configmaps/kubelet-config.txt\n>f+++++++++ objects/namespaced/kube-system/configmaps/kubelet-config.yaml\ncd+++++++++ objects/namespaced/kube-system/daemonsets/\n>f+++++++++ objects/namespaced/kube-system/daemonsets/ceph-csi-rbd-nodeplugin.txt\n>f+++++++++ objects/namespaced/kube-system/daemonsets/ceph-csi-rbd-nodeplugin.yaml\n>f+++++++++ objects/namespaced/kube-system/daemonsets/cilium.txt\n>f+++++++++ objects/namespaced/kube-system/daemonsets/cilium.yaml\n>f+++++++++ objects/namespaced/kube-system/daemonsets/kube-proxy.txt\n>f+++++++++ objects/namespaced/kube-system/daemonsets/kube-proxy.yaml\ncd+++++++++ objects/namespaced/kube-system/deployment/\n>f+++++++++ objects/namespaced/kube-system/deployment/ceph-csi-rbd-provisioner.txt\n>f+++++++++ objects/namespaced/kube-system/deployment/ceph-csi-rbd-provisioner.yaml\n>f+++++++++ objects/namespaced/kube-system/deployment/cilium-operator.txt\n>f+++++++++ objects/namespaced/kube-system/deployment/cilium-operator.yaml\n>f+++++++++ objects/namespaced/kube-system/deployment/coredns.txt\n>f+++++++++ objects/namespaced/kube-system/deployment/coredns.yaml\ncd+++++++++ objects/namespaced/kube-system/endpoints/\n>f+++++++++ objects/namespaced/kube-system/endpoints/ceph-csi-rbd-nodeplugin-http-metrics.txt\n>f+++++++++ objects/namespaced/kube-system/endpoints/ceph-csi-rbd-nodeplugin-http-metrics.yaml\n>f+++++++++ objects/namespaced/kube-system/endpoints/ceph-csi-rbd-provisioner-http-metrics.txt\n>f+++++++++ objects/namespaced/kube-system/endpoints/ceph-csi-rbd-provisioner-http-metrics.yaml\n>f+++++++++ objects/namespaced/kube-system/endpoints/kube-dns.txt\n>f+++++++++ objects/namespaced/kube-system/endpoints/kube-dns.yaml\ncd+++++++++ objects/namespaced/kube-system/pods/\n>f+++++++++ objects/namespaced/kube-system/pods/ceph-csi-rbd-nodeplugin-knshx.txt\n>f+++++++++ objects/namespaced/kube-system/pods/ceph-csi-rbd-nodeplugin-knshx.yaml\n>f+++++++++ objects/namespaced/kube-system/pods/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz.txt\n>f+++++++++ objects/namespaced/kube-system/pods/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz.yaml\n>f+++++++++ objects/namespaced/kube-system/pods/cilium-cpp9j.txt\n>f+++++++++ objects/namespaced/kube-system/pods/cilium-cpp9j.yaml\n>f+++++++++ objects/namespaced/kube-system/pods/cilium-operator-7fcd8d6ffd-bb5zb.txt\n>f+++++++++ objects/namespaced/kube-system/pods/cilium-operator-7fcd8d6ffd-bb5zb.yaml\n>f+++++++++ objects/namespaced/kube-system/pods/coredns-77cccfdc44-lvlzr.txt\n>f+++++++++ objects/namespaced/kube-system/pods/coredns-77cccfdc44-lvlzr.yaml\n>f+++++++++ objects/namespaced/kube-system/pods/coredns-77cccfdc44-vxkbg.txt\n>f+++++++++ objects/namespaced/kube-system/pods/coredns-77cccfdc44-vxkbg.yaml\n>f+++++++++ objects/namespaced/kube-system/pods/etcd-instance.txt\n>f+++++++++ objects/namespaced/kube-system/pods/etcd-instance.yaml\n>f+++++++++ objects/namespaced/kube-system/pods/kube-apiserver-instance.txt\n>f+++++++++ objects/namespaced/kube-system/pods/kube-apiserver-instance.yaml\n>f+++++++++ objects/namespaced/kube-system/pods/kube-controller-manager-instance.txt\n>f+++++++++ objects/namespaced/kube-system/pods/kube-controller-manager-instance.yaml\n>f+++++++++ objects/namespaced/kube-system/pods/kube-proxy-k2ngf.txt\n>f+++++++++ objects/namespaced/kube-system/pods/kube-proxy-k2ngf.yaml\n>f+++++++++ objects/namespaced/kube-system/pods/kube-scheduler-instance.txt\n>f+++++++++ objects/namespaced/kube-system/pods/kube-scheduler-instance.yaml\n>f+++++++++ objects/namespaced/kube-system/pods/kube-vip-instance.txt\n>f+++++++++ objects/namespaced/kube-system/pods/kube-vip-instance.yaml\ncd+++++++++ objects/namespaced/kube-system/rolebindings/\n>f+++++++++ objects/namespaced/kube-system/rolebindings/ceph-csi-rbd-provisioner.txt\n>f+++++++++ objects/namespaced/kube-system/rolebindings/ceph-csi-rbd-provisioner.yaml\n>f+++++++++ objects/namespaced/kube-system/rolebindings/cilium-config-agent.txt\n>f+++++++++ objects/namespaced/kube-system/rolebindings/cilium-config-agent.yaml\n>f+++++++++ objects/namespaced/kube-system/rolebindings/kube-proxy.txt\n>f+++++++++ objects/namespaced/kube-system/rolebindings/kube-proxy.yaml\n>f+++++++++ objects/namespaced/kube-system/rolebindings/kubeadm:kubeadm-certs.txt\n>f+++++++++ objects/namespaced/kube-system/rolebindings/kubeadm:kubeadm-certs.yaml\n>f+++++++++ objects/namespaced/kube-system/rolebindings/kubeadm:kubelet-config.txt\n>f+++++++++ objects/namespaced/kube-system/rolebindings/kubeadm:kubelet-config.yaml\n>f+++++++++ objects/namespaced/kube-system/rolebindings/kubeadm:nodes-kubeadm-config.txt\n>f+++++++++ objects/namespaced/kube-system/rolebindings/kubeadm:nodes-kubeadm-config.yaml\n>f+++++++++ objects/namespaced/kube-system/rolebindings/system::extension-apiserver-authentication-reader.txt\n>f+++++++++ objects/namespaced/kube-system/rolebindings/system::extension-apiserver-authentication-reader.yaml\n>f+++++++++ objects/namespaced/kube-system/rolebindings/system::leader-locking-kube-controller-manager.txt\n>f+++++++++ objects/namespaced/kube-system/rolebindings/system::leader-locking-kube-controller-manager.yaml\n>f+++++++++ objects/namespaced/kube-system/rolebindings/system::leader-locking-kube-scheduler.txt\n>f+++++++++ objects/namespaced/kube-system/rolebindings/system::leader-locking-kube-scheduler.yaml\n>f+++++++++ objects/namespaced/kube-system/rolebindings/system:controller:bootstrap-signer.txt\n>f+++++++++ objects/namespaced/kube-system/rolebindings/system:controller:bootstrap-signer.yaml\n>f+++++++++ objects/namespaced/kube-system/rolebindings/system:controller:cloud-provider.txt\n>f+++++++++ objects/namespaced/kube-system/rolebindings/system:controller:cloud-provider.yaml\n>f+++++++++ objects/namespaced/kube-system/rolebindings/system:controller:token-cleaner.txt\n>f+++++++++ objects/namespaced/kube-system/rolebindings/system:controller:token-cleaner.yaml\ncd+++++++++ objects/namespaced/kube-system/roles/\n>f+++++++++ objects/namespaced/kube-system/roles/ceph-csi-rbd-provisioner.txt\n>f+++++++++ objects/namespaced/kube-system/roles/ceph-csi-rbd-provisioner.yaml\n>f+++++++++ objects/namespaced/kube-system/roles/cilium-config-agent.txt\n>f+++++++++ objects/namespaced/kube-system/roles/cilium-config-agent.yaml\n>f+++++++++ objects/namespaced/kube-system/roles/extension-apiserver-authentication-reader.txt\n>f+++++++++ objects/namespaced/kube-system/roles/extension-apiserver-authentication-reader.yaml\n>f+++++++++ objects/namespaced/kube-system/roles/kube-proxy.txt\n>f+++++++++ objects/namespaced/kube-system/roles/kube-proxy.yaml\n>f+++++++++ objects/namespaced/kube-system/roles/kubeadm:kubeadm-certs.txt\n>f+++++++++ objects/namespaced/kube-system/roles/kubeadm:kubeadm-certs.yaml\n>f+++++++++ objects/namespaced/kube-system/roles/kubeadm:kubelet-config.txt\n>f+++++++++ objects/namespaced/kube-system/roles/kubeadm:kubelet-config.yaml\n>f+++++++++ objects/namespaced/kube-system/roles/kubeadm:nodes-kubeadm-config.txt\n>f+++++++++ objects/namespaced/kube-system/roles/kubeadm:nodes-kubeadm-config.yaml\n>f+++++++++ objects/namespaced/kube-system/roles/system::leader-locking-kube-controller-manager.txt\n>f+++++++++ objects/namespaced/kube-system/roles/system::leader-locking-kube-controller-manager.yaml\n>f+++++++++ objects/namespaced/kube-system/roles/system::leader-locking-kube-scheduler.txt\n>f+++++++++ objects/namespaced/kube-system/roles/system::leader-locking-kube-scheduler.yaml\n>f+++++++++ objects/namespaced/kube-system/roles/system:controller:bootstrap-signer.txt\n>f+++++++++ objects/namespaced/kube-system/roles/system:controller:bootstrap-signer.yaml\n>f+++++++++ objects/namespaced/kube-system/roles/system:controller:cloud-provider.txt\n>f+++++++++ objects/namespaced/kube-system/roles/system:controller:cloud-provider.yaml\n>f+++++++++ objects/namespaced/kube-system/roles/system:controller:token-cleaner.txt\n>f+++++++++ objects/namespaced/kube-system/roles/system:controller:token-cleaner.yaml\ncd+++++++++ objects/namespaced/kube-system/secrets/\n>f+++++++++ objects/namespaced/kube-system/secrets/bootstrap-token-b39e70.txt\n>f+++++++++ objects/namespaced/kube-system/secrets/bootstrap-token-b39e70.yaml\n>f+++++++++ objects/namespaced/kube-system/secrets/bootstrap-token-k7l80b.txt\n>f+++++++++ objects/namespaced/kube-system/secrets/bootstrap-token-k7l80b.yaml\n>f+++++++++ objects/namespaced/kube-system/secrets/csi-rbd-secret.txt\n>f+++++++++ objects/namespaced/kube-system/secrets/csi-rbd-secret.yaml\n>f+++++++++ objects/namespaced/kube-system/secrets/kubeadm-certs.txt\n>f+++++++++ objects/namespaced/kube-system/secrets/kubeadm-certs.yaml\n>f+++++++++ objects/namespaced/kube-system/secrets/sh.helm.release.v1.ceph-csi-rbd.v1.txt\n>f+++++++++ objects/namespaced/kube-system/secrets/sh.helm.release.v1.ceph-csi-rbd.v1.yaml\n>f+++++++++ objects/namespaced/kube-system/secrets/sh.helm.release.v1.cilium.v1.txt\n>f+++++++++ objects/namespaced/kube-system/secrets/sh.helm.release.v1.cilium.v1.yaml\ncd+++++++++ objects/namespaced/kube-system/serviceaccounts/\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/attachdetach-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/attachdetach-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/bootstrap-signer.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/bootstrap-signer.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/ceph-csi-rbd-nodeplugin.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/ceph-csi-rbd-nodeplugin.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/ceph-csi-rbd-provisioner.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/ceph-csi-rbd-provisioner.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/certificate-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/certificate-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/cilium-operator.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/cilium-operator.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/cilium.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/cilium.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/clusterrole-aggregation-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/clusterrole-aggregation-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/coredns.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/coredns.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/cronjob-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/cronjob-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/daemon-set-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/daemon-set-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/default.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/default.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/deployment-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/deployment-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/disruption-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/disruption-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/endpoint-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/endpoint-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/endpointslice-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/endpointslice-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/endpointslicemirroring-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/endpointslicemirroring-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/ephemeral-volume-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/ephemeral-volume-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/expand-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/expand-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/generic-garbage-collector.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/generic-garbage-collector.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/horizontal-pod-autoscaler.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/horizontal-pod-autoscaler.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/job-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/job-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/kube-proxy.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/kube-proxy.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/namespace-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/namespace-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/node-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/node-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/persistent-volume-binder.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/persistent-volume-binder.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/pod-garbage-collector.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/pod-garbage-collector.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/pv-protection-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/pv-protection-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/pvc-protection-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/pvc-protection-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/replicaset-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/replicaset-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/replication-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/replication-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/resourcequota-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/resourcequota-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/root-ca-cert-publisher.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/root-ca-cert-publisher.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/service-account-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/service-account-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/service-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/service-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/statefulset-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/statefulset-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/token-cleaner.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/token-cleaner.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/ttl-after-finished-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/ttl-after-finished-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/ttl-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/ttl-controller.yaml\ncd+++++++++ objects/namespaced/kube-system/services/\n>f+++++++++ objects/namespaced/kube-system/services/ceph-csi-rbd-nodeplugin-http-metrics.txt\n>f+++++++++ objects/namespaced/kube-system/services/ceph-csi-rbd-nodeplugin-http-metrics.yaml\n>f+++++++++ objects/namespaced/kube-system/services/ceph-csi-rbd-provisioner-http-metrics.txt\n>f+++++++++ objects/namespaced/kube-system/services/ceph-csi-rbd-provisioner-http-metrics.yaml\n>f+++++++++ objects/namespaced/kube-system/services/kube-dns.txt\n>f+++++++++ objects/namespaced/kube-system/services/kube-dns.yaml\n",
                            "rc": 0,
                            "stdout_lines": [
                                "cd+++++++++ objects/",
                                "cd+++++++++ objects/cluster/",
                                "cd+++++++++ objects/cluster/clusterrole/",
                                ">f+++++++++ objects/cluster/clusterrole/admin.txt",
                                ">f+++++++++ objects/cluster/clusterrole/admin.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/ceph-csi-rbd-nodeplugin.txt",
                                ">f+++++++++ objects/cluster/clusterrole/ceph-csi-rbd-nodeplugin.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/ceph-csi-rbd-provisioner.txt",
                                ">f+++++++++ objects/cluster/clusterrole/ceph-csi-rbd-provisioner.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/cilium-operator.txt",
                                ">f+++++++++ objects/cluster/clusterrole/cilium-operator.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/cilium.txt",
                                ">f+++++++++ objects/cluster/clusterrole/cilium.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/cluster-admin.txt",
                                ">f+++++++++ objects/cluster/clusterrole/cluster-admin.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/edit.txt",
                                ">f+++++++++ objects/cluster/clusterrole/edit.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/kubeadm:get-nodes.txt",
                                ">f+++++++++ objects/cluster/clusterrole/kubeadm:get-nodes.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:aggregate-to-admin.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:aggregate-to-admin.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:aggregate-to-edit.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:aggregate-to-edit.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:aggregate-to-view.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:aggregate-to-view.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:auth-delegator.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:auth-delegator.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:basic-user.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:basic-user.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:certificatesigningrequests:nodeclient.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:certificatesigningrequests:nodeclient.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:certificatesigningrequests:selfnodeclient.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:certificatesigningrequests:selfnodeclient.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:kube-apiserver-client-approver.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:kube-apiserver-client-approver.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:kube-apiserver-client-kubelet-approver.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:kube-apiserver-client-kubelet-approver.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:kubelet-serving-approver.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:kubelet-serving-approver.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:legacy-unknown-approver.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:legacy-unknown-approver.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:attachdetach-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:attachdetach-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:certificate-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:certificate-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:clusterrole-aggregation-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:clusterrole-aggregation-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:cronjob-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:cronjob-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:daemon-set-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:daemon-set-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:deployment-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:deployment-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:disruption-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:disruption-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:endpoint-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:endpoint-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:endpointslice-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:endpointslice-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:endpointslicemirroring-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:endpointslicemirroring-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:ephemeral-volume-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:ephemeral-volume-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:expand-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:expand-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:generic-garbage-collector.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:generic-garbage-collector.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:horizontal-pod-autoscaler.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:horizontal-pod-autoscaler.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:job-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:job-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:namespace-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:namespace-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:node-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:node-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:persistent-volume-binder.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:persistent-volume-binder.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:pod-garbage-collector.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:pod-garbage-collector.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:pv-protection-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:pv-protection-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:pvc-protection-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:pvc-protection-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:replicaset-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:replicaset-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:replication-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:replication-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:resourcequota-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:resourcequota-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:root-ca-cert-publisher.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:root-ca-cert-publisher.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:route-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:route-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:service-account-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:service-account-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:service-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:service-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:statefulset-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:statefulset-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:ttl-after-finished-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:ttl-after-finished-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:ttl-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:ttl-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:coredns.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:coredns.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:discovery.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:discovery.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:heapster.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:heapster.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:kube-aggregator.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:kube-aggregator.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:kube-controller-manager.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:kube-controller-manager.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:kube-dns.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:kube-dns.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:kube-scheduler.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:kube-scheduler.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:kubelet-api-admin.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:kubelet-api-admin.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:monitoring.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:monitoring.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:node-bootstrapper.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:node-bootstrapper.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:node-problem-detector.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:node-problem-detector.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:node-proxier.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:node-proxier.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:node.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:node.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:persistent-volume-provisioner.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:persistent-volume-provisioner.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:public-info-viewer.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:public-info-viewer.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:service-account-issuer-discovery.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:service-account-issuer-discovery.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:volume-scheduler.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:volume-scheduler.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/view.txt",
                                ">f+++++++++ objects/cluster/clusterrole/view.yaml",
                                "cd+++++++++ objects/cluster/clusterrolebinding/",
                                ">f+++++++++ objects/cluster/clusterrolebinding/ceph-csi-rbd-nodeplugin.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/ceph-csi-rbd-nodeplugin.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/ceph-csi-rbd-provisioner.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/ceph-csi-rbd-provisioner.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cilium-operator.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cilium-operator.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cilium.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cilium.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cluster-admin.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cluster-admin.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kubeadm:get-nodes.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kubeadm:get-nodes.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kubeadm:kubelet-bootstrap.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kubeadm:kubelet-bootstrap.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kubeadm:node-autoapprove-bootstrap.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kubeadm:node-autoapprove-bootstrap.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kubeadm:node-autoapprove-certificate-rotation.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kubeadm:node-autoapprove-certificate-rotation.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kubeadm:node-proxier.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kubeadm:node-proxier.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:basic-user.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:basic-user.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:attachdetach-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:attachdetach-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:certificate-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:certificate-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:clusterrole-aggregation-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:clusterrole-aggregation-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:cronjob-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:cronjob-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:daemon-set-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:daemon-set-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:deployment-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:deployment-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:disruption-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:disruption-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:endpoint-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:endpoint-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:endpointslice-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:endpointslice-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:endpointslicemirroring-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:endpointslicemirroring-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:ephemeral-volume-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:ephemeral-volume-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:expand-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:expand-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:generic-garbage-collector.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:generic-garbage-collector.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:horizontal-pod-autoscaler.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:horizontal-pod-autoscaler.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:job-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:job-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:namespace-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:namespace-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:node-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:node-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:persistent-volume-binder.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:persistent-volume-binder.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:pod-garbage-collector.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:pod-garbage-collector.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:pv-protection-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:pv-protection-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:pvc-protection-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:pvc-protection-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:replicaset-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:replicaset-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:replication-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:replication-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:resourcequota-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:resourcequota-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:root-ca-cert-publisher.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:root-ca-cert-publisher.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:route-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:route-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:service-account-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:service-account-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:service-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:service-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:statefulset-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:statefulset-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:ttl-after-finished-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:ttl-after-finished-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:ttl-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:ttl-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:coredns.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:coredns.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:discovery.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:discovery.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:kube-controller-manager.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:kube-controller-manager.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:kube-dns.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:kube-dns.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:kube-scheduler.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:kube-scheduler.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:monitoring.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:monitoring.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:node-proxier.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:node-proxier.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:node.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:node.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:public-info-viewer.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:public-info-viewer.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:service-account-issuer-discovery.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:service-account-issuer-discovery.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:volume-scheduler.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:volume-scheduler.yaml",
                                "cd+++++++++ objects/cluster/namespace/",
                                ">f+++++++++ objects/cluster/namespace/default.txt",
                                ">f+++++++++ objects/cluster/namespace/default.yaml",
                                ">f+++++++++ objects/cluster/namespace/kube-node-lease.txt",
                                ">f+++++++++ objects/cluster/namespace/kube-node-lease.yaml",
                                ">f+++++++++ objects/cluster/namespace/kube-public.txt",
                                ">f+++++++++ objects/cluster/namespace/kube-public.yaml",
                                ">f+++++++++ objects/cluster/namespace/kube-system.txt",
                                ">f+++++++++ objects/cluster/namespace/kube-system.yaml",
                                "cd+++++++++ objects/cluster/node/",
                                ">f+++++++++ objects/cluster/node/instance.txt",
                                ">f+++++++++ objects/cluster/node/instance.yaml",
                                "cd+++++++++ objects/cluster/storageclass/",
                                ">f+++++++++ objects/cluster/storageclass/general.txt",
                                ">f+++++++++ objects/cluster/storageclass/general.yaml",
                                "cd+++++++++ objects/namespaced/",
                                "cd+++++++++ objects/namespaced/default/",
                                "cd+++++++++ objects/namespaced/default/configmaps/",
                                ">f+++++++++ objects/namespaced/default/configmaps/kube-root-ca.crt.txt",
                                ">f+++++++++ objects/namespaced/default/configmaps/kube-root-ca.crt.yaml",
                                "cd+++++++++ objects/namespaced/default/endpoints/",
                                ">f+++++++++ objects/namespaced/default/endpoints/kubernetes.txt",
                                ">f+++++++++ objects/namespaced/default/endpoints/kubernetes.yaml",
                                "cd+++++++++ objects/namespaced/default/persistentvolumeclaims/",
                                ">f+++++++++ objects/namespaced/default/persistentvolumeclaims/test-pvc.txt",
                                ">f+++++++++ objects/namespaced/default/persistentvolumeclaims/test-pvc.yaml",
                                "cd+++++++++ objects/namespaced/default/serviceaccounts/",
                                ">f+++++++++ objects/namespaced/default/serviceaccounts/default.txt",
                                ">f+++++++++ objects/namespaced/default/serviceaccounts/default.yaml",
                                "cd+++++++++ objects/namespaced/default/services/",
                                ">f+++++++++ objects/namespaced/default/services/kubernetes.txt",
                                ">f+++++++++ objects/namespaced/default/services/kubernetes.yaml",
                                "cd+++++++++ objects/namespaced/kube-node-lease/",
                                "cd+++++++++ objects/namespaced/kube-node-lease/configmaps/",
                                ">f+++++++++ objects/namespaced/kube-node-lease/configmaps/kube-root-ca.crt.txt",
                                ">f+++++++++ objects/namespaced/kube-node-lease/configmaps/kube-root-ca.crt.yaml",
                                "cd+++++++++ objects/namespaced/kube-node-lease/serviceaccounts/",
                                ">f+++++++++ objects/namespaced/kube-node-lease/serviceaccounts/default.txt",
                                ">f+++++++++ objects/namespaced/kube-node-lease/serviceaccounts/default.yaml",
                                "cd+++++++++ objects/namespaced/kube-public/",
                                "cd+++++++++ objects/namespaced/kube-public/configmaps/",
                                ">f+++++++++ objects/namespaced/kube-public/configmaps/cluster-info.txt",
                                ">f+++++++++ objects/namespaced/kube-public/configmaps/cluster-info.yaml",
                                ">f+++++++++ objects/namespaced/kube-public/configmaps/kube-root-ca.crt.txt",
                                ">f+++++++++ objects/namespaced/kube-public/configmaps/kube-root-ca.crt.yaml",
                                "cd+++++++++ objects/namespaced/kube-public/rolebindings/",
                                ">f+++++++++ objects/namespaced/kube-public/rolebindings/kubeadm:bootstrap-signer-clusterinfo.txt",
                                ">f+++++++++ objects/namespaced/kube-public/rolebindings/kubeadm:bootstrap-signer-clusterinfo.yaml",
                                ">f+++++++++ objects/namespaced/kube-public/rolebindings/system:controller:bootstrap-signer.txt",
                                ">f+++++++++ objects/namespaced/kube-public/rolebindings/system:controller:bootstrap-signer.yaml",
                                "cd+++++++++ objects/namespaced/kube-public/roles/",
                                ">f+++++++++ objects/namespaced/kube-public/roles/kubeadm:bootstrap-signer-clusterinfo.txt",
                                ">f+++++++++ objects/namespaced/kube-public/roles/kubeadm:bootstrap-signer-clusterinfo.yaml",
                                ">f+++++++++ objects/namespaced/kube-public/roles/system:controller:bootstrap-signer.txt",
                                ">f+++++++++ objects/namespaced/kube-public/roles/system:controller:bootstrap-signer.yaml",
                                "cd+++++++++ objects/namespaced/kube-public/serviceaccounts/",
                                ">f+++++++++ objects/namespaced/kube-public/serviceaccounts/default.txt",
                                ">f+++++++++ objects/namespaced/kube-public/serviceaccounts/default.yaml",
                                "cd+++++++++ objects/namespaced/kube-system/",
                                "cd+++++++++ objects/namespaced/kube-system/configmaps/",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/ceph-config.txt",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/ceph-config.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/ceph-csi-config.txt",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/ceph-csi-config.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/ceph-csi-encryption-kms-config.txt",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/ceph-csi-encryption-kms-config.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/cilium-config.txt",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/cilium-config.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/coredns.txt",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/coredns.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/extension-apiserver-authentication.txt",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/extension-apiserver-authentication.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/kube-apiserver-legacy-service-account-token-tracking.txt",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/kube-apiserver-legacy-service-account-token-tracking.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/kube-proxy.txt",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/kube-proxy.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/kube-root-ca.crt.txt",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/kube-root-ca.crt.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/kubeadm-config.txt",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/kubeadm-config.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/kubelet-config.txt",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/kubelet-config.yaml",
                                "cd+++++++++ objects/namespaced/kube-system/daemonsets/",
                                ">f+++++++++ objects/namespaced/kube-system/daemonsets/ceph-csi-rbd-nodeplugin.txt",
                                ">f+++++++++ objects/namespaced/kube-system/daemonsets/ceph-csi-rbd-nodeplugin.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/daemonsets/cilium.txt",
                                ">f+++++++++ objects/namespaced/kube-system/daemonsets/cilium.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/daemonsets/kube-proxy.txt",
                                ">f+++++++++ objects/namespaced/kube-system/daemonsets/kube-proxy.yaml",
                                "cd+++++++++ objects/namespaced/kube-system/deployment/",
                                ">f+++++++++ objects/namespaced/kube-system/deployment/ceph-csi-rbd-provisioner.txt",
                                ">f+++++++++ objects/namespaced/kube-system/deployment/ceph-csi-rbd-provisioner.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/deployment/cilium-operator.txt",
                                ">f+++++++++ objects/namespaced/kube-system/deployment/cilium-operator.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/deployment/coredns.txt",
                                ">f+++++++++ objects/namespaced/kube-system/deployment/coredns.yaml",
                                "cd+++++++++ objects/namespaced/kube-system/endpoints/",
                                ">f+++++++++ objects/namespaced/kube-system/endpoints/ceph-csi-rbd-nodeplugin-http-metrics.txt",
                                ">f+++++++++ objects/namespaced/kube-system/endpoints/ceph-csi-rbd-nodeplugin-http-metrics.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/endpoints/ceph-csi-rbd-provisioner-http-metrics.txt",
                                ">f+++++++++ objects/namespaced/kube-system/endpoints/ceph-csi-rbd-provisioner-http-metrics.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/endpoints/kube-dns.txt",
                                ">f+++++++++ objects/namespaced/kube-system/endpoints/kube-dns.yaml",
                                "cd+++++++++ objects/namespaced/kube-system/pods/",
                                ">f+++++++++ objects/namespaced/kube-system/pods/ceph-csi-rbd-nodeplugin-knshx.txt",
                                ">f+++++++++ objects/namespaced/kube-system/pods/ceph-csi-rbd-nodeplugin-knshx.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/pods/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz.txt",
                                ">f+++++++++ objects/namespaced/kube-system/pods/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/pods/cilium-cpp9j.txt",
                                ">f+++++++++ objects/namespaced/kube-system/pods/cilium-cpp9j.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/pods/cilium-operator-7fcd8d6ffd-bb5zb.txt",
                                ">f+++++++++ objects/namespaced/kube-system/pods/cilium-operator-7fcd8d6ffd-bb5zb.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/pods/coredns-77cccfdc44-lvlzr.txt",
                                ">f+++++++++ objects/namespaced/kube-system/pods/coredns-77cccfdc44-lvlzr.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/pods/coredns-77cccfdc44-vxkbg.txt",
                                ">f+++++++++ objects/namespaced/kube-system/pods/coredns-77cccfdc44-vxkbg.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/pods/etcd-instance.txt",
                                ">f+++++++++ objects/namespaced/kube-system/pods/etcd-instance.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/pods/kube-apiserver-instance.txt",
                                ">f+++++++++ objects/namespaced/kube-system/pods/kube-apiserver-instance.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/pods/kube-controller-manager-instance.txt",
                                ">f+++++++++ objects/namespaced/kube-system/pods/kube-controller-manager-instance.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/pods/kube-proxy-k2ngf.txt",
                                ">f+++++++++ objects/namespaced/kube-system/pods/kube-proxy-k2ngf.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/pods/kube-scheduler-instance.txt",
                                ">f+++++++++ objects/namespaced/kube-system/pods/kube-scheduler-instance.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/pods/kube-vip-instance.txt",
                                ">f+++++++++ objects/namespaced/kube-system/pods/kube-vip-instance.yaml",
                                "cd+++++++++ objects/namespaced/kube-system/rolebindings/",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/ceph-csi-rbd-provisioner.txt",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/ceph-csi-rbd-provisioner.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/cilium-config-agent.txt",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/cilium-config-agent.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/kube-proxy.txt",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/kube-proxy.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/kubeadm:kubeadm-certs.txt",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/kubeadm:kubeadm-certs.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/kubeadm:kubelet-config.txt",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/kubeadm:kubelet-config.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/kubeadm:nodes-kubeadm-config.txt",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/kubeadm:nodes-kubeadm-config.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/system::extension-apiserver-authentication-reader.txt",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/system::extension-apiserver-authentication-reader.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/system::leader-locking-kube-controller-manager.txt",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/system::leader-locking-kube-controller-manager.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/system::leader-locking-kube-scheduler.txt",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/system::leader-locking-kube-scheduler.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/system:controller:bootstrap-signer.txt",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/system:controller:bootstrap-signer.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/system:controller:cloud-provider.txt",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/system:controller:cloud-provider.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/system:controller:token-cleaner.txt",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/system:controller:token-cleaner.yaml",
                                "cd+++++++++ objects/namespaced/kube-system/roles/",
                                ">f+++++++++ objects/namespaced/kube-system/roles/ceph-csi-rbd-provisioner.txt",
                                ">f+++++++++ objects/namespaced/kube-system/roles/ceph-csi-rbd-provisioner.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/roles/cilium-config-agent.txt",
                                ">f+++++++++ objects/namespaced/kube-system/roles/cilium-config-agent.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/roles/extension-apiserver-authentication-reader.txt",
                                ">f+++++++++ objects/namespaced/kube-system/roles/extension-apiserver-authentication-reader.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/roles/kube-proxy.txt",
                                ">f+++++++++ objects/namespaced/kube-system/roles/kube-proxy.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/roles/kubeadm:kubeadm-certs.txt",
                                ">f+++++++++ objects/namespaced/kube-system/roles/kubeadm:kubeadm-certs.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/roles/kubeadm:kubelet-config.txt",
                                ">f+++++++++ objects/namespaced/kube-system/roles/kubeadm:kubelet-config.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/roles/kubeadm:nodes-kubeadm-config.txt",
                                ">f+++++++++ objects/namespaced/kube-system/roles/kubeadm:nodes-kubeadm-config.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/roles/system::leader-locking-kube-controller-manager.txt",
                                ">f+++++++++ objects/namespaced/kube-system/roles/system::leader-locking-kube-controller-manager.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/roles/system::leader-locking-kube-scheduler.txt",
                                ">f+++++++++ objects/namespaced/kube-system/roles/system::leader-locking-kube-scheduler.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/roles/system:controller:bootstrap-signer.txt",
                                ">f+++++++++ objects/namespaced/kube-system/roles/system:controller:bootstrap-signer.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/roles/system:controller:cloud-provider.txt",
                                ">f+++++++++ objects/namespaced/kube-system/roles/system:controller:cloud-provider.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/roles/system:controller:token-cleaner.txt",
                                ">f+++++++++ objects/namespaced/kube-system/roles/system:controller:token-cleaner.yaml",
                                "cd+++++++++ objects/namespaced/kube-system/secrets/",
                                ">f+++++++++ objects/namespaced/kube-system/secrets/bootstrap-token-b39e70.txt",
                                ">f+++++++++ objects/namespaced/kube-system/secrets/bootstrap-token-b39e70.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/secrets/bootstrap-token-k7l80b.txt",
                                ">f+++++++++ objects/namespaced/kube-system/secrets/bootstrap-token-k7l80b.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/secrets/csi-rbd-secret.txt",
                                ">f+++++++++ objects/namespaced/kube-system/secrets/csi-rbd-secret.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/secrets/kubeadm-certs.txt",
                                ">f+++++++++ objects/namespaced/kube-system/secrets/kubeadm-certs.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/secrets/sh.helm.release.v1.ceph-csi-rbd.v1.txt",
                                ">f+++++++++ objects/namespaced/kube-system/secrets/sh.helm.release.v1.ceph-csi-rbd.v1.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/secrets/sh.helm.release.v1.cilium.v1.txt",
                                ">f+++++++++ objects/namespaced/kube-system/secrets/sh.helm.release.v1.cilium.v1.yaml",
                                "cd+++++++++ objects/namespaced/kube-system/serviceaccounts/",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/attachdetach-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/attachdetach-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/bootstrap-signer.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/bootstrap-signer.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/ceph-csi-rbd-nodeplugin.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/ceph-csi-rbd-nodeplugin.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/ceph-csi-rbd-provisioner.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/ceph-csi-rbd-provisioner.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/certificate-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/certificate-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/cilium-operator.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/cilium-operator.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/cilium.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/cilium.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/clusterrole-aggregation-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/clusterrole-aggregation-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/coredns.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/coredns.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/cronjob-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/cronjob-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/daemon-set-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/daemon-set-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/default.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/default.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/deployment-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/deployment-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/disruption-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/disruption-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/endpoint-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/endpoint-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/endpointslice-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/endpointslice-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/endpointslicemirroring-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/endpointslicemirroring-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/ephemeral-volume-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/ephemeral-volume-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/expand-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/expand-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/generic-garbage-collector.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/generic-garbage-collector.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/horizontal-pod-autoscaler.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/horizontal-pod-autoscaler.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/job-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/job-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/kube-proxy.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/kube-proxy.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/namespace-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/namespace-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/node-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/node-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/persistent-volume-binder.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/persistent-volume-binder.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/pod-garbage-collector.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/pod-garbage-collector.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/pv-protection-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/pv-protection-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/pvc-protection-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/pvc-protection-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/replicaset-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/replicaset-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/replication-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/replication-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/resourcequota-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/resourcequota-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/root-ca-cert-publisher.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/root-ca-cert-publisher.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/service-account-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/service-account-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/service-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/service-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/statefulset-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/statefulset-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/token-cleaner.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/token-cleaner.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/ttl-after-finished-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/ttl-after-finished-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/ttl-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/ttl-controller.yaml",
                                "cd+++++++++ objects/namespaced/kube-system/services/",
                                ">f+++++++++ objects/namespaced/kube-system/services/ceph-csi-rbd-nodeplugin-http-metrics.txt",
                                ">f+++++++++ objects/namespaced/kube-system/services/ceph-csi-rbd-nodeplugin-http-metrics.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/services/ceph-csi-rbd-provisioner-http-metrics.txt",
                                ">f+++++++++ objects/namespaced/kube-system/services/ceph-csi-rbd-provisioner-http-metrics.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/services/kube-dns.txt",
                                ">f+++++++++ objects/namespaced/kube-system/services/kube-dns.yaml"
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-0a3c-9efc-000000000017",
                        "name": "describe-kubernetes-objects",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/describe-kubernetes-objects"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:07:21.777338Z",
                            "start": "2026-02-16T23:07:21.231397Z"
                        },
                        "id": "0242ac17-0010-0a3c-9efc-00000000001d",
                        "name": "Downloads logs to executor"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "file",
                            "changed": true,
                            "diff": {
                                "after": {
                                    "path": "/tmp/logs/pod-logs",
                                    "state": "directory"
                                },
                                "before": {
                                    "path": "/tmp/logs/pod-logs",
                                    "state": "absent"
                                }
                            },
                            "gid": 0,
                            "group": "root",
                            "invocation": {
                                "module_args": {
                                    "_diff_peek": null,
                                    "_original_basename": null,
                                    "access_time": null,
                                    "access_time_format": "%Y%m%d%H%M.%S",
                                    "attributes": null,
                                    "follow": true,
                                    "force": false,
                                    "group": null,
                                    "mode": null,
                                    "modification_time": null,
                                    "modification_time_format": "%Y%m%d%H%M.%S",
                                    "owner": null,
                                    "path": "/tmp/logs/pod-logs",
                                    "recurse": false,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": null,
                                    "state": "directory",
                                    "unsafe_writes": false
                                }
                            },
                            "mode": "0755",
                            "owner": "root",
                            "path": "/tmp/logs/pod-logs",
                            "size": 4096,
                            "state": "directory",
                            "uid": 0
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-0a3c-9efc-00000000001f",
                        "name": "gather-pod-logs",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-pod-logs"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:07:21.996736Z",
                            "start": "2026-02-16T23:07:21.787586Z"
                        },
                        "id": "0242ac17-0010-0a3c-9efc-000000000021",
                        "name": "creating directory for pod logs"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "file",
                            "changed": true,
                            "diff": {
                                "after": {
                                    "path": "/tmp/logs/pod-logs/failed-pods",
                                    "state": "directory"
                                },
                                "before": {
                                    "path": "/tmp/logs/pod-logs/failed-pods",
                                    "state": "absent"
                                }
                            },
                            "gid": 0,
                            "group": "root",
                            "invocation": {
                                "module_args": {
                                    "_diff_peek": null,
                                    "_original_basename": null,
                                    "access_time": null,
                                    "access_time_format": "%Y%m%d%H%M.%S",
                                    "attributes": null,
                                    "follow": true,
                                    "force": false,
                                    "group": null,
                                    "mode": null,
                                    "modification_time": null,
                                    "modification_time_format": "%Y%m%d%H%M.%S",
                                    "owner": null,
                                    "path": "/tmp/logs/pod-logs/failed-pods",
                                    "recurse": false,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": null,
                                    "state": "directory",
                                    "unsafe_writes": false
                                }
                            },
                            "mode": "0755",
                            "owner": "root",
                            "path": "/tmp/logs/pod-logs/failed-pods",
                            "size": 4096,
                            "state": "directory",
                            "uid": 0
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-0a3c-9efc-00000000001f",
                        "name": "gather-pod-logs",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-pod-logs"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:07:22.198540Z",
                            "start": "2026-02-16T23:07:22.002768Z"
                        },
                        "id": "0242ac17-0010-0a3c-9efc-000000000022",
                        "name": "creating directory for failed pod logs"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "shell",
                            "changed": true,
                            "cmd": "set -e\nfunction get_namespaces () {\n  kubectl get namespaces -o name | awk -F '/' '{ print $NF }'\n}\nfunction get_pods () {\n  NAMESPACE=$1\n  kubectl get pods -n ${NAMESPACE} -o name | awk -F '/' '{ print $NF }' | xargs -I {} echo ${NAMESPACE} {}\n}\nexport -f get_pods\nfunction get_pod_logs () {\n  NAMESPACE=${1% *}\n  POD=${1#* }\n  INIT_CONTAINERS=$(kubectl get pod $POD -n ${NAMESPACE} -o json | jq -r '.spec.initContainers[]?.name')\n  CONTAINERS=$(kubectl get pod $POD -n ${NAMESPACE} -o json | jq -r '.spec.containers[].name')\n  for CONTAINER in ${INIT_CONTAINERS} ${CONTAINERS}; do\n    echo \"${NAMESPACE}/${POD}/${CONTAINER}\"\n    mkdir -p \"/tmp/logs/pod-logs/${NAMESPACE}/${POD}\"\n    mkdir -p \"/tmp/logs/pod-logs/failed-pods/${NAMESPACE}/${POD}\"\n    kubectl logs ${POD} -n ${NAMESPACE} -c ${CONTAINER} > \"/tmp/logs/pod-logs/${NAMESPACE}/${POD}/${CONTAINER}.txt\"\n    kubectl logs --previous ${POD} -n ${NAMESPACE} -c ${CONTAINER} > \"/tmp/logs/pod-logs/failed-pods/${NAMESPACE}/${POD}/${CONTAINER}.txt\"\n  done\n}\nexport -f get_pod_logs\nget_namespaces |  xargs -r -I {} bash -c 'get_pods \"$@\"' _ {} |  xargs -r -I {} bash -c 'get_pod_logs \"$@\"' _ {}",
                            "delta": "0:00:06.723105",
                            "end": "2026-02-16 23:07:29.130665",
                            "failed": true,
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "set -e\nfunction get_namespaces () {\n  kubectl get namespaces -o name | awk -F '/' '{ print $NF }'\n}\nfunction get_pods () {\n  NAMESPACE=$1\n  kubectl get pods -n ${NAMESPACE} -o name | awk -F '/' '{ print $NF }' | xargs -I {} echo ${NAMESPACE} {}\n}\nexport -f get_pods\nfunction get_pod_logs () {\n  NAMESPACE=${1% *}\n  POD=${1#* }\n  INIT_CONTAINERS=$(kubectl get pod $POD -n ${NAMESPACE} -o json | jq -r '.spec.initContainers[]?.name')\n  CONTAINERS=$(kubectl get pod $POD -n ${NAMESPACE} -o json | jq -r '.spec.containers[].name')\n  for CONTAINER in ${INIT_CONTAINERS} ${CONTAINERS}; do\n    echo \"${NAMESPACE}/${POD}/${CONTAINER}\"\n    mkdir -p \"/tmp/logs/pod-logs/${NAMESPACE}/${POD}\"\n    mkdir -p \"/tmp/logs/pod-logs/failed-pods/${NAMESPACE}/${POD}\"\n    kubectl logs ${POD} -n ${NAMESPACE} -c ${CONTAINER} > \"/tmp/logs/pod-logs/${NAMESPACE}/${POD}/${CONTAINER}.txt\"\n    kubectl logs --previous ${POD} -n ${NAMESPACE} -c ${CONTAINER} > \"/tmp/logs/pod-logs/failed-pods/${NAMESPACE}/${POD}/${CONTAINER}.txt\"\n  done\n}\nexport -f get_pod_logs\nget_namespaces |  xargs -r -I {} bash -c 'get_pods \"$@\"' _ {} |  xargs -r -I {} bash -c 'get_pod_logs \"$@\"' _ {}",
                                    "_uses_shell": true,
                                    "argv": null,
                                    "chdir": null,
                                    "creates": null,
                                    "executable": "/bin/bash",
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0010-0a3c-9efc-000000000023-1-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "non-zero return code",
                            "rc": 123,
                            "start": "2026-02-16 23:07:22.407560",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "kube-system/ceph-csi-rbd-nodeplugin-knshx/csi-rbdplugin\nError from server (BadRequest): previous terminated container \"csi-rbdplugin\" in pod \"ceph-csi-rbd-nodeplugin-knshx\" not found\nkube-system/ceph-csi-rbd-nodeplugin-knshx/driver-registrar\nError from server (BadRequest): previous terminated container \"driver-registrar\" in pod \"ceph-csi-rbd-nodeplugin-knshx\" not found\nkube-system/ceph-csi-rbd-nodeplugin-knshx/liveness-prometheus\nError from server (BadRequest): previous terminated container \"liveness-prometheus\" in pod \"ceph-csi-rbd-nodeplugin-knshx\" not found\nkube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-rbdplugin\nError from server (BadRequest): previous terminated container \"csi-rbdplugin\" in pod \"ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz\" not found\nkube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-provisioner\nError from server (BadRequest): previous terminated container \"csi-provisioner\" in pod \"ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz\" not found\nkube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-resizer\nError from server (BadRequest): previous terminated container \"csi-resizer\" in pod \"ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz\" not found\nkube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-snapshotter\nError from server (BadRequest): previous terminated container \"csi-snapshotter\" in pod \"ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz\" not found\nkube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-attacher\nError from server (BadRequest): previous terminated container \"csi-attacher\" in pod \"ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz\" not found\nkube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-rbdplugin-controller\nError from server (BadRequest): previous terminated container \"csi-rbdplugin-controller\" in pod \"ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz\" not found\nkube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/liveness-prometheus\nError from server (BadRequest): previous terminated container \"liveness-prometheus\" in pod \"ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz\" not found\nkube-system/cilium-cpp9j/config\nError from server (BadRequest): previous terminated container \"config\" in pod \"cilium-cpp9j\" not found\nkube-system/cilium-cpp9j/mount-cgroup\nError from server (BadRequest): previous terminated container \"mount-cgroup\" in pod \"cilium-cpp9j\" not found\nkube-system/cilium-cpp9j/apply-sysctl-overwrites\nError from server (BadRequest): previous terminated container \"apply-sysctl-overwrites\" in pod \"cilium-cpp9j\" not found\nkube-system/cilium-cpp9j/mount-bpf-fs\nError from server (BadRequest): previous terminated container \"mount-bpf-fs\" in pod \"cilium-cpp9j\" not found\nkube-system/cilium-cpp9j/clean-cilium-state\nError from server (BadRequest): previous terminated container \"clean-cilium-state\" in pod \"cilium-cpp9j\" not found\nkube-system/cilium-cpp9j/install-cni-binaries\nError from server (BadRequest): previous terminated container \"install-cni-binaries\" in pod \"cilium-cpp9j\" not found\nkube-system/cilium-cpp9j/cilium-agent\nError from server (BadRequest): previous terminated container \"cilium-agent\" in pod \"cilium-cpp9j\" not found\nkube-system/cilium-operator-7fcd8d6ffd-bb5zb/cilium-operator\nError from server (BadRequest): previous terminated container \"cilium-operator\" in pod \"cilium-operator-7fcd8d6ffd-bb5zb\" not found\nkube-system/coredns-77cccfdc44-lvlzr/coredns\nError from server (BadRequest): previous terminated container \"coredns\" in pod \"coredns-77cccfdc44-lvlzr\" not found\nkube-system/coredns-77cccfdc44-vxkbg/coredns\nError from server (BadRequest): previous terminated container \"coredns\" in pod \"coredns-77cccfdc44-vxkbg\" not found\nkube-system/etcd-instance/etcd\nError from server (BadRequest): previous terminated container \"etcd\" in pod \"etcd-instance\" not found\nkube-system/kube-apiserver-instance/kube-apiserver\nError from server (BadRequest): previous terminated container \"kube-apiserver\" in pod \"kube-apiserver-instance\" not found\nkube-system/kube-controller-manager-instance/kube-controller-manager\nError from server (BadRequest): previous terminated container \"kube-controller-manager\" in pod \"kube-controller-manager-instance\" not found\nkube-system/kube-proxy-k2ngf/kube-proxy\nError from server (BadRequest): previous terminated container \"kube-proxy\" in pod \"kube-proxy-k2ngf\" not found\nkube-system/kube-scheduler-instance/kube-scheduler\nError from server (BadRequest): previous terminated container \"kube-scheduler\" in pod \"kube-scheduler-instance\" not found\nkube-system/kube-vip-instance/kube-vip\nError from server (BadRequest): previous terminated container \"kube-vip\" in pod \"kube-vip-instance\" not found",
                            "stdout_lines": [
                                "kube-system/ceph-csi-rbd-nodeplugin-knshx/csi-rbdplugin",
                                "Error from server (BadRequest): previous terminated container \"csi-rbdplugin\" in pod \"ceph-csi-rbd-nodeplugin-knshx\" not found",
                                "kube-system/ceph-csi-rbd-nodeplugin-knshx/driver-registrar",
                                "Error from server (BadRequest): previous terminated container \"driver-registrar\" in pod \"ceph-csi-rbd-nodeplugin-knshx\" not found",
                                "kube-system/ceph-csi-rbd-nodeplugin-knshx/liveness-prometheus",
                                "Error from server (BadRequest): previous terminated container \"liveness-prometheus\" in pod \"ceph-csi-rbd-nodeplugin-knshx\" not found",
                                "kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-rbdplugin",
                                "Error from server (BadRequest): previous terminated container \"csi-rbdplugin\" in pod \"ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz\" not found",
                                "kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-provisioner",
                                "Error from server (BadRequest): previous terminated container \"csi-provisioner\" in pod \"ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz\" not found",
                                "kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-resizer",
                                "Error from server (BadRequest): previous terminated container \"csi-resizer\" in pod \"ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz\" not found",
                                "kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-snapshotter",
                                "Error from server (BadRequest): previous terminated container \"csi-snapshotter\" in pod \"ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz\" not found",
                                "kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-attacher",
                                "Error from server (BadRequest): previous terminated container \"csi-attacher\" in pod \"ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz\" not found",
                                "kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-rbdplugin-controller",
                                "Error from server (BadRequest): previous terminated container \"csi-rbdplugin-controller\" in pod \"ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz\" not found",
                                "kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/liveness-prometheus",
                                "Error from server (BadRequest): previous terminated container \"liveness-prometheus\" in pod \"ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz\" not found",
                                "kube-system/cilium-cpp9j/config",
                                "Error from server (BadRequest): previous terminated container \"config\" in pod \"cilium-cpp9j\" not found",
                                "kube-system/cilium-cpp9j/mount-cgroup",
                                "Error from server (BadRequest): previous terminated container \"mount-cgroup\" in pod \"cilium-cpp9j\" not found",
                                "kube-system/cilium-cpp9j/apply-sysctl-overwrites",
                                "Error from server (BadRequest): previous terminated container \"apply-sysctl-overwrites\" in pod \"cilium-cpp9j\" not found",
                                "kube-system/cilium-cpp9j/mount-bpf-fs",
                                "Error from server (BadRequest): previous terminated container \"mount-bpf-fs\" in pod \"cilium-cpp9j\" not found",
                                "kube-system/cilium-cpp9j/clean-cilium-state",
                                "Error from server (BadRequest): previous terminated container \"clean-cilium-state\" in pod \"cilium-cpp9j\" not found",
                                "kube-system/cilium-cpp9j/install-cni-binaries",
                                "Error from server (BadRequest): previous terminated container \"install-cni-binaries\" in pod \"cilium-cpp9j\" not found",
                                "kube-system/cilium-cpp9j/cilium-agent",
                                "Error from server (BadRequest): previous terminated container \"cilium-agent\" in pod \"cilium-cpp9j\" not found",
                                "kube-system/cilium-operator-7fcd8d6ffd-bb5zb/cilium-operator",
                                "Error from server (BadRequest): previous terminated container \"cilium-operator\" in pod \"cilium-operator-7fcd8d6ffd-bb5zb\" not found",
                                "kube-system/coredns-77cccfdc44-lvlzr/coredns",
                                "Error from server (BadRequest): previous terminated container \"coredns\" in pod \"coredns-77cccfdc44-lvlzr\" not found",
                                "kube-system/coredns-77cccfdc44-vxkbg/coredns",
                                "Error from server (BadRequest): previous terminated container \"coredns\" in pod \"coredns-77cccfdc44-vxkbg\" not found",
                                "kube-system/etcd-instance/etcd",
                                "Error from server (BadRequest): previous terminated container \"etcd\" in pod \"etcd-instance\" not found",
                                "kube-system/kube-apiserver-instance/kube-apiserver",
                                "Error from server (BadRequest): previous terminated container \"kube-apiserver\" in pod \"kube-apiserver-instance\" not found",
                                "kube-system/kube-controller-manager-instance/kube-controller-manager",
                                "Error from server (BadRequest): previous terminated container \"kube-controller-manager\" in pod \"kube-controller-manager-instance\" not found",
                                "kube-system/kube-proxy-k2ngf/kube-proxy",
                                "Error from server (BadRequest): previous terminated container \"kube-proxy\" in pod \"kube-proxy-k2ngf\" not found",
                                "kube-system/kube-scheduler-instance/kube-scheduler",
                                "Error from server (BadRequest): previous terminated container \"kube-scheduler\" in pod \"kube-scheduler-instance\" not found",
                                "kube-system/kube-vip-instance/kube-vip",
                                "Error from server (BadRequest): previous terminated container \"kube-vip\" in pod \"kube-vip-instance\" not found"
                            ],
                            "zuul_log_id": "0242ac17-0010-0a3c-9efc-000000000023-1-instance"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-0a3c-9efc-00000000001f",
                        "name": "gather-pod-logs",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-pod-logs"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:07:29.252129Z",
                            "start": "2026-02-16T23:07:22.223767Z"
                        },
                        "id": "0242ac17-0010-0a3c-9efc-000000000023",
                        "name": "retrieve all kubernetes logs, current and previous (if they exist)"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "synchronize",
                            "changed": true,
                            "cmd": "/usr/bin/rsync --delay-updates -F --compress --archive --rsh='/usr/bin/ssh -S none -o Port=22 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null' --rsync-path='sudo -u root rsync' --out-format='<<CHANGED>>%i %n%L' zuul@199.204.45.216:/tmp/logs/pod-logs /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/instance",
                            "invocation": {
                                "module_args": {
                                    "_local_rsync_password": null,
                                    "_local_rsync_path": "rsync",
                                    "_ssh_args": null,
                                    "_substitute_controller": false,
                                    "archive": true,
                                    "checksum": false,
                                    "compress": true,
                                    "copy_links": false,
                                    "delay_updates": true,
                                    "delete": false,
                                    "dest": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/instance",
                                    "dest_port": 22,
                                    "dirs": false,
                                    "existing_only": false,
                                    "group": null,
                                    "link_dest": null,
                                    "links": null,
                                    "mode": "pull",
                                    "owner": null,
                                    "partial": false,
                                    "perms": null,
                                    "private_key": null,
                                    "recursive": null,
                                    "rsync_opts": [],
                                    "rsync_path": "sudo -u root rsync",
                                    "rsync_timeout": 0,
                                    "set_remote_user": true,
                                    "src": "zuul@199.204.45.216:/tmp/logs/pod-logs",
                                    "ssh_connection_multiplexing": false,
                                    "times": null,
                                    "use_ssh_args": false,
                                    "verify_host": false
                                }
                            },
                            "msg": "cd+++++++++ pod-logs/\ncd+++++++++ pod-logs/failed-pods/\ncd+++++++++ pod-logs/failed-pods/kube-system/\ncd+++++++++ pod-logs/failed-pods/kube-system/ceph-csi-rbd-nodeplugin-knshx/\n>f+++++++++ pod-logs/failed-pods/kube-system/ceph-csi-rbd-nodeplugin-knshx/csi-rbdplugin.txt\n>f+++++++++ pod-logs/failed-pods/kube-system/ceph-csi-rbd-nodeplugin-knshx/driver-registrar.txt\n>f+++++++++ pod-logs/failed-pods/kube-system/ceph-csi-rbd-nodeplugin-knshx/liveness-prometheus.txt\ncd+++++++++ pod-logs/failed-pods/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/\n>f+++++++++ pod-logs/failed-pods/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-attacher.txt\n>f+++++++++ pod-logs/failed-pods/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-provisioner.txt\n>f+++++++++ pod-logs/failed-pods/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-rbdplugin-controller.txt\n>f+++++++++ pod-logs/failed-pods/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-rbdplugin.txt\n>f+++++++++ pod-logs/failed-pods/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-resizer.txt\n>f+++++++++ pod-logs/failed-pods/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-snapshotter.txt\n>f+++++++++ pod-logs/failed-pods/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/liveness-prometheus.txt\ncd+++++++++ pod-logs/failed-pods/kube-system/cilium-cpp9j/\n>f+++++++++ pod-logs/failed-pods/kube-system/cilium-cpp9j/apply-sysctl-overwrites.txt\n>f+++++++++ pod-logs/failed-pods/kube-system/cilium-cpp9j/cilium-agent.txt\n>f+++++++++ pod-logs/failed-pods/kube-system/cilium-cpp9j/clean-cilium-state.txt\n>f+++++++++ pod-logs/failed-pods/kube-system/cilium-cpp9j/config.txt\n>f+++++++++ pod-logs/failed-pods/kube-system/cilium-cpp9j/install-cni-binaries.txt\n>f+++++++++ pod-logs/failed-pods/kube-system/cilium-cpp9j/mount-bpf-fs.txt\n>f+++++++++ pod-logs/failed-pods/kube-system/cilium-cpp9j/mount-cgroup.txt\ncd+++++++++ pod-logs/failed-pods/kube-system/cilium-operator-7fcd8d6ffd-bb5zb/\n>f+++++++++ pod-logs/failed-pods/kube-system/cilium-operator-7fcd8d6ffd-bb5zb/cilium-operator.txt\ncd+++++++++ pod-logs/failed-pods/kube-system/coredns-77cccfdc44-lvlzr/\n>f+++++++++ pod-logs/failed-pods/kube-system/coredns-77cccfdc44-lvlzr/coredns.txt\ncd+++++++++ pod-logs/failed-pods/kube-system/coredns-77cccfdc44-vxkbg/\n>f+++++++++ pod-logs/failed-pods/kube-system/coredns-77cccfdc44-vxkbg/coredns.txt\ncd+++++++++ pod-logs/failed-pods/kube-system/etcd-instance/\n>f+++++++++ pod-logs/failed-pods/kube-system/etcd-instance/etcd.txt\ncd+++++++++ pod-logs/failed-pods/kube-system/kube-apiserver-instance/\n>f+++++++++ pod-logs/failed-pods/kube-system/kube-apiserver-instance/kube-apiserver.txt\ncd+++++++++ pod-logs/failed-pods/kube-system/kube-controller-manager-instance/\n>f+++++++++ pod-logs/failed-pods/kube-system/kube-controller-manager-instance/kube-controller-manager.txt\ncd+++++++++ pod-logs/failed-pods/kube-system/kube-proxy-k2ngf/\n>f+++++++++ pod-logs/failed-pods/kube-system/kube-proxy-k2ngf/kube-proxy.txt\ncd+++++++++ pod-logs/failed-pods/kube-system/kube-scheduler-instance/\n>f+++++++++ pod-logs/failed-pods/kube-system/kube-scheduler-instance/kube-scheduler.txt\ncd+++++++++ pod-logs/failed-pods/kube-system/kube-vip-instance/\n>f+++++++++ pod-logs/failed-pods/kube-system/kube-vip-instance/kube-vip.txt\ncd+++++++++ pod-logs/kube-system/\ncd+++++++++ pod-logs/kube-system/ceph-csi-rbd-nodeplugin-knshx/\n>f+++++++++ pod-logs/kube-system/ceph-csi-rbd-nodeplugin-knshx/csi-rbdplugin.txt\n>f+++++++++ pod-logs/kube-system/ceph-csi-rbd-nodeplugin-knshx/driver-registrar.txt\n>f+++++++++ pod-logs/kube-system/ceph-csi-rbd-nodeplugin-knshx/liveness-prometheus.txt\ncd+++++++++ pod-logs/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/\n>f+++++++++ pod-logs/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-attacher.txt\n>f+++++++++ pod-logs/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-provisioner.txt\n>f+++++++++ pod-logs/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-rbdplugin-controller.txt\n>f+++++++++ pod-logs/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-rbdplugin.txt\n>f+++++++++ pod-logs/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-resizer.txt\n>f+++++++++ pod-logs/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-snapshotter.txt\n>f+++++++++ pod-logs/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/liveness-prometheus.txt\ncd+++++++++ pod-logs/kube-system/cilium-cpp9j/\n>f+++++++++ pod-logs/kube-system/cilium-cpp9j/apply-sysctl-overwrites.txt\n>f+++++++++ pod-logs/kube-system/cilium-cpp9j/cilium-agent.txt\n>f+++++++++ pod-logs/kube-system/cilium-cpp9j/clean-cilium-state.txt\n>f+++++++++ pod-logs/kube-system/cilium-cpp9j/config.txt\n>f+++++++++ pod-logs/kube-system/cilium-cpp9j/install-cni-binaries.txt\n>f+++++++++ pod-logs/kube-system/cilium-cpp9j/mount-bpf-fs.txt\n>f+++++++++ pod-logs/kube-system/cilium-cpp9j/mount-cgroup.txt\ncd+++++++++ pod-logs/kube-system/cilium-operator-7fcd8d6ffd-bb5zb/\n>f+++++++++ pod-logs/kube-system/cilium-operator-7fcd8d6ffd-bb5zb/cilium-operator.txt\ncd+++++++++ pod-logs/kube-system/coredns-77cccfdc44-lvlzr/\n>f+++++++++ pod-logs/kube-system/coredns-77cccfdc44-lvlzr/coredns.txt\ncd+++++++++ pod-logs/kube-system/coredns-77cccfdc44-vxkbg/\n>f+++++++++ pod-logs/kube-system/coredns-77cccfdc44-vxkbg/coredns.txt\ncd+++++++++ pod-logs/kube-system/etcd-instance/\n>f+++++++++ pod-logs/kube-system/etcd-instance/etcd.txt\ncd+++++++++ pod-logs/kube-system/kube-apiserver-instance/\n>f+++++++++ pod-logs/kube-system/kube-apiserver-instance/kube-apiserver.txt\ncd+++++++++ pod-logs/kube-system/kube-controller-manager-instance/\n>f+++++++++ pod-logs/kube-system/kube-controller-manager-instance/kube-controller-manager.txt\ncd+++++++++ pod-logs/kube-system/kube-proxy-k2ngf/\n>f+++++++++ pod-logs/kube-system/kube-proxy-k2ngf/kube-proxy.txt\ncd+++++++++ pod-logs/kube-system/kube-scheduler-instance/\n>f+++++++++ pod-logs/kube-system/kube-scheduler-instance/kube-scheduler.txt\ncd+++++++++ pod-logs/kube-system/kube-vip-instance/\n>f+++++++++ pod-logs/kube-system/kube-vip-instance/kube-vip.txt\n",
                            "rc": 0,
                            "stdout_lines": [
                                "cd+++++++++ pod-logs/",
                                "cd+++++++++ pod-logs/failed-pods/",
                                "cd+++++++++ pod-logs/failed-pods/kube-system/",
                                "cd+++++++++ pod-logs/failed-pods/kube-system/ceph-csi-rbd-nodeplugin-knshx/",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/ceph-csi-rbd-nodeplugin-knshx/csi-rbdplugin.txt",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/ceph-csi-rbd-nodeplugin-knshx/driver-registrar.txt",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/ceph-csi-rbd-nodeplugin-knshx/liveness-prometheus.txt",
                                "cd+++++++++ pod-logs/failed-pods/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-attacher.txt",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-provisioner.txt",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-rbdplugin-controller.txt",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-rbdplugin.txt",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-resizer.txt",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-snapshotter.txt",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/liveness-prometheus.txt",
                                "cd+++++++++ pod-logs/failed-pods/kube-system/cilium-cpp9j/",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/cilium-cpp9j/apply-sysctl-overwrites.txt",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/cilium-cpp9j/cilium-agent.txt",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/cilium-cpp9j/clean-cilium-state.txt",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/cilium-cpp9j/config.txt",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/cilium-cpp9j/install-cni-binaries.txt",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/cilium-cpp9j/mount-bpf-fs.txt",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/cilium-cpp9j/mount-cgroup.txt",
                                "cd+++++++++ pod-logs/failed-pods/kube-system/cilium-operator-7fcd8d6ffd-bb5zb/",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/cilium-operator-7fcd8d6ffd-bb5zb/cilium-operator.txt",
                                "cd+++++++++ pod-logs/failed-pods/kube-system/coredns-77cccfdc44-lvlzr/",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/coredns-77cccfdc44-lvlzr/coredns.txt",
                                "cd+++++++++ pod-logs/failed-pods/kube-system/coredns-77cccfdc44-vxkbg/",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/coredns-77cccfdc44-vxkbg/coredns.txt",
                                "cd+++++++++ pod-logs/failed-pods/kube-system/etcd-instance/",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/etcd-instance/etcd.txt",
                                "cd+++++++++ pod-logs/failed-pods/kube-system/kube-apiserver-instance/",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/kube-apiserver-instance/kube-apiserver.txt",
                                "cd+++++++++ pod-logs/failed-pods/kube-system/kube-controller-manager-instance/",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/kube-controller-manager-instance/kube-controller-manager.txt",
                                "cd+++++++++ pod-logs/failed-pods/kube-system/kube-proxy-k2ngf/",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/kube-proxy-k2ngf/kube-proxy.txt",
                                "cd+++++++++ pod-logs/failed-pods/kube-system/kube-scheduler-instance/",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/kube-scheduler-instance/kube-scheduler.txt",
                                "cd+++++++++ pod-logs/failed-pods/kube-system/kube-vip-instance/",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/kube-vip-instance/kube-vip.txt",
                                "cd+++++++++ pod-logs/kube-system/",
                                "cd+++++++++ pod-logs/kube-system/ceph-csi-rbd-nodeplugin-knshx/",
                                ">f+++++++++ pod-logs/kube-system/ceph-csi-rbd-nodeplugin-knshx/csi-rbdplugin.txt",
                                ">f+++++++++ pod-logs/kube-system/ceph-csi-rbd-nodeplugin-knshx/driver-registrar.txt",
                                ">f+++++++++ pod-logs/kube-system/ceph-csi-rbd-nodeplugin-knshx/liveness-prometheus.txt",
                                "cd+++++++++ pod-logs/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/",
                                ">f+++++++++ pod-logs/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-attacher.txt",
                                ">f+++++++++ pod-logs/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-provisioner.txt",
                                ">f+++++++++ pod-logs/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-rbdplugin-controller.txt",
                                ">f+++++++++ pod-logs/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-rbdplugin.txt",
                                ">f+++++++++ pod-logs/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-resizer.txt",
                                ">f+++++++++ pod-logs/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/csi-snapshotter.txt",
                                ">f+++++++++ pod-logs/kube-system/ceph-csi-rbd-provisioner-795cfbf7fb-rj9pz/liveness-prometheus.txt",
                                "cd+++++++++ pod-logs/kube-system/cilium-cpp9j/",
                                ">f+++++++++ pod-logs/kube-system/cilium-cpp9j/apply-sysctl-overwrites.txt",
                                ">f+++++++++ pod-logs/kube-system/cilium-cpp9j/cilium-agent.txt",
                                ">f+++++++++ pod-logs/kube-system/cilium-cpp9j/clean-cilium-state.txt",
                                ">f+++++++++ pod-logs/kube-system/cilium-cpp9j/config.txt",
                                ">f+++++++++ pod-logs/kube-system/cilium-cpp9j/install-cni-binaries.txt",
                                ">f+++++++++ pod-logs/kube-system/cilium-cpp9j/mount-bpf-fs.txt",
                                ">f+++++++++ pod-logs/kube-system/cilium-cpp9j/mount-cgroup.txt",
                                "cd+++++++++ pod-logs/kube-system/cilium-operator-7fcd8d6ffd-bb5zb/",
                                ">f+++++++++ pod-logs/kube-system/cilium-operator-7fcd8d6ffd-bb5zb/cilium-operator.txt",
                                "cd+++++++++ pod-logs/kube-system/coredns-77cccfdc44-lvlzr/",
                                ">f+++++++++ pod-logs/kube-system/coredns-77cccfdc44-lvlzr/coredns.txt",
                                "cd+++++++++ pod-logs/kube-system/coredns-77cccfdc44-vxkbg/",
                                ">f+++++++++ pod-logs/kube-system/coredns-77cccfdc44-vxkbg/coredns.txt",
                                "cd+++++++++ pod-logs/kube-system/etcd-instance/",
                                ">f+++++++++ pod-logs/kube-system/etcd-instance/etcd.txt",
                                "cd+++++++++ pod-logs/kube-system/kube-apiserver-instance/",
                                ">f+++++++++ pod-logs/kube-system/kube-apiserver-instance/kube-apiserver.txt",
                                "cd+++++++++ pod-logs/kube-system/kube-controller-manager-instance/",
                                ">f+++++++++ pod-logs/kube-system/kube-controller-manager-instance/kube-controller-manager.txt",
                                "cd+++++++++ pod-logs/kube-system/kube-proxy-k2ngf/",
                                ">f+++++++++ pod-logs/kube-system/kube-proxy-k2ngf/kube-proxy.txt",
                                "cd+++++++++ pod-logs/kube-system/kube-scheduler-instance/",
                                ">f+++++++++ pod-logs/kube-system/kube-scheduler-instance/kube-scheduler.txt",
                                "cd+++++++++ pod-logs/kube-system/kube-vip-instance/",
                                ">f+++++++++ pod-logs/kube-system/kube-vip-instance/kube-vip.txt"
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-0a3c-9efc-00000000001f",
                        "name": "gather-pod-logs",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-pod-logs"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:07:29.755922Z",
                            "start": "2026-02-16T23:07:29.257423Z"
                        },
                        "id": "0242ac17-0010-0a3c-9efc-000000000024",
                        "name": "Downloads pod logs to executor"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "file",
                            "changed": true,
                            "diff": {
                                "after": {
                                    "path": "/tmp/logs/prometheus",
                                    "state": "directory"
                                },
                                "before": {
                                    "path": "/tmp/logs/prometheus",
                                    "state": "absent"
                                }
                            },
                            "gid": 0,
                            "group": "root",
                            "invocation": {
                                "module_args": {
                                    "_diff_peek": null,
                                    "_original_basename": null,
                                    "access_time": null,
                                    "access_time_format": "%Y%m%d%H%M.%S",
                                    "attributes": null,
                                    "follow": true,
                                    "force": false,
                                    "group": null,
                                    "mode": null,
                                    "modification_time": null,
                                    "modification_time_format": "%Y%m%d%H%M.%S",
                                    "owner": null,
                                    "path": "/tmp/logs/prometheus",
                                    "recurse": false,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": null,
                                    "state": "directory",
                                    "unsafe_writes": false
                                }
                            },
                            "mode": "0755",
                            "owner": "root",
                            "path": "/tmp/logs/prometheus",
                            "size": 4096,
                            "state": "directory",
                            "uid": 0
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-0a3c-9efc-000000000026",
                        "name": "gather-prom-metrics",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-prom-metrics"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:07:29.974943Z",
                            "start": "2026-02-16T23:07:29.765270Z"
                        },
                        "id": "0242ac17-0010-0a3c-9efc-000000000028",
                        "name": "creating directory for helm release descriptions"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "shell",
                            "changed": true,
                            "cmd": "set -e\nNAMESPACES=$(kubectl get namespaces -o json | jq -r '.items[].metadata.name')\nfor NS in $NAMESPACES; do\n  SERVICES=$(kubectl get svc -n $NS -o json | jq -r '.items[] | select(.spec.ports[].name==\"metrics\") | .metadata.name')\n  for SVC in $SERVICES; do\n    PORT=$(kubectl get svc $SVC -n $NS -o json | jq -r '.spec.ports[] | select(.name==\"metrics\") | .port')\n    echo \"Scraping $SVC.$NS:$PORT/metrics:\"\n    curl \"$SVC.$NS:$PORT/metrics\" >> \"/tmp/logs\"/prometheus/$NS-$SVC.txt || true\n  done\ndone",
                            "delta": "0:00:00.473660",
                            "end": "2026-02-16 23:07:30.669922",
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "set -e\nNAMESPACES=$(kubectl get namespaces -o json | jq -r '.items[].metadata.name')\nfor NS in $NAMESPACES; do\n  SERVICES=$(kubectl get svc -n $NS -o json | jq -r '.items[] | select(.spec.ports[].name==\"metrics\") | .metadata.name')\n  for SVC in $SERVICES; do\n    PORT=$(kubectl get svc $SVC -n $NS -o json | jq -r '.spec.ports[] | select(.name==\"metrics\") | .port')\n    echo \"Scraping $SVC.$NS:$PORT/metrics:\"\n    curl \"$SVC.$NS:$PORT/metrics\" >> \"/tmp/logs\"/prometheus/$NS-$SVC.txt || true\n  done\ndone",
                                    "_uses_shell": true,
                                    "argv": null,
                                    "chdir": null,
                                    "creates": null,
                                    "executable": "/bin/bash",
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0010-0a3c-9efc-000000000029-1-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "",
                            "rc": 0,
                            "start": "2026-02-16 23:07:30.196262",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "Scraping kube-dns.kube-system:9153/metrics:\n/bin/bash: line 8: curl: command not found",
                            "stdout_lines": [
                                "Scraping kube-dns.kube-system:9153/metrics:",
                                "/bin/bash: line 8: curl: command not found"
                            ],
                            "zuul_log_id": "0242ac17-0010-0a3c-9efc-000000000029-1-instance"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-0a3c-9efc-000000000026",
                        "name": "gather-prom-metrics",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-prom-metrics"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:07:31.019457Z",
                            "start": "2026-02-16T23:07:30.000377Z"
                        },
                        "id": "0242ac17-0010-0a3c-9efc-000000000029",
                        "name": "Get metrics from exporter services in all namespaces"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "shell",
                            "changed": true,
                            "cmd": "set -e\nmgr_endpoints=$(kubectl get endpoints -n ceph -l component=manager -o json | jq -r '.items[].subsets[].addresses[].ip')\necho \"ceph-mgr endpoints: $mgr_endpoints\"\nfor endpoint in $mgr_endpoints; do\n  echo \"checking ceph-mgr at $endpoint\"\n  metrics_curl=\"curl $endpoint:9283/metrics\"\n  op=$(eval \"$metrics_curl\")\n  if [[ -n $op ]]; then\n    curl $endpoint:9283/metrics >> \"/tmp/logs\"/prometheus/ceph-ceph-mgr.txt\n    break\n  else\n    echo \"$endpoint is a standby ceph-mgr. Trying next endpoint\"\n  fi\ndone",
                            "delta": "0:00:00.094170",
                            "end": "2026-02-16 23:07:31.329873",
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "set -e\nmgr_endpoints=$(kubectl get endpoints -n ceph -l component=manager -o json | jq -r '.items[].subsets[].addresses[].ip')\necho \"ceph-mgr endpoints: $mgr_endpoints\"\nfor endpoint in $mgr_endpoints; do\n  echo \"checking ceph-mgr at $endpoint\"\n  metrics_curl=\"curl $endpoint:9283/metrics\"\n  op=$(eval \"$metrics_curl\")\n  if [[ -n $op ]]; then\n    curl $endpoint:9283/metrics >> \"/tmp/logs\"/prometheus/ceph-ceph-mgr.txt\n    break\n  else\n    echo \"$endpoint is a standby ceph-mgr. Trying next endpoint\"\n  fi\ndone",
                                    "_uses_shell": true,
                                    "argv": null,
                                    "chdir": null,
                                    "creates": null,
                                    "executable": "/bin/bash",
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0010-0a3c-9efc-00000000002a-1-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "",
                            "rc": 0,
                            "start": "2026-02-16 23:07:31.235703",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "ceph-mgr endpoints: ",
                            "stdout_lines": [
                                "ceph-mgr endpoints: "
                            ],
                            "zuul_log_id": "0242ac17-0010-0a3c-9efc-00000000002a-1-instance"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-0a3c-9efc-000000000026",
                        "name": "gather-prom-metrics",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-prom-metrics"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:07:31.558323Z",
                            "start": "2026-02-16T23:07:31.042218Z"
                        },
                        "id": "0242ac17-0010-0a3c-9efc-00000000002a",
                        "name": "Get ceph metrics from ceph-mgr"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "shell",
                            "changed": true,
                            "cmd": "set -e\nNAMESPACE=\"osh-infra\"\nAPP_LABEL=\"fluentd\"\nPODS=$(kubectl get pods -n $NAMESPACE -l application=$APP_LABEL -o json | jq -r '.items[].metadata.name')\nfor POD in $PODS; do\n  IP=$(kubectl get pod -n $NAMESPACE $POD -o json | jq -r '.status.podIP')\n  PORT=$(kubectl get pod -n $NAMESPACE $POD -o json |  jq -r '.spec.containers[0].ports[] | select(.name==\"metrics\") | .containerPort')\n  echo \"Scraping $POD at $IP:$PORT/metrics\"\n  curl \"$IP:$PORT/metrics\" >> \"/tmp/logs\"/prometheus/$POD.txt || true\ndone",
                            "delta": "0:00:00.087785",
                            "end": "2026-02-16 23:07:31.879402",
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "set -e\nNAMESPACE=\"osh-infra\"\nAPP_LABEL=\"fluentd\"\nPODS=$(kubectl get pods -n $NAMESPACE -l application=$APP_LABEL -o json | jq -r '.items[].metadata.name')\nfor POD in $PODS; do\n  IP=$(kubectl get pod -n $NAMESPACE $POD -o json | jq -r '.status.podIP')\n  PORT=$(kubectl get pod -n $NAMESPACE $POD -o json |  jq -r '.spec.containers[0].ports[] | select(.name==\"metrics\") | .containerPort')\n  echo \"Scraping $POD at $IP:$PORT/metrics\"\n  curl \"$IP:$PORT/metrics\" >> \"/tmp/logs\"/prometheus/$POD.txt || true\ndone",
                                    "_uses_shell": true,
                                    "argv": null,
                                    "chdir": null,
                                    "creates": null,
                                    "executable": "/bin/bash",
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0010-0a3c-9efc-00000000002b-1-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "",
                            "rc": 0,
                            "start": "2026-02-16 23:07:31.791617",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "",
                            "stdout_lines": [],
                            "zuul_log_id": "0242ac17-0010-0a3c-9efc-00000000002b-1-instance"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-0a3c-9efc-000000000026",
                        "name": "gather-prom-metrics",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-prom-metrics"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:07:32.103037Z",
                            "start": "2026-02-16T23:07:31.585379Z"
                        },
                        "id": "0242ac17-0010-0a3c-9efc-00000000002b",
                        "name": "Get metrics from fluentd pods"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "synchronize",
                            "changed": true,
                            "cmd": "/usr/bin/rsync --delay-updates -F --compress --archive --rsh='/usr/bin/ssh -S none -o Port=22 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null' --rsync-path='sudo -u root rsync' --out-format='<<CHANGED>>%i %n%L' zuul@199.204.45.216:/tmp/logs/prometheus /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/instance",
                            "invocation": {
                                "module_args": {
                                    "_local_rsync_password": null,
                                    "_local_rsync_path": "rsync",
                                    "_ssh_args": null,
                                    "_substitute_controller": false,
                                    "archive": true,
                                    "checksum": false,
                                    "compress": true,
                                    "copy_links": false,
                                    "delay_updates": true,
                                    "delete": false,
                                    "dest": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/instance",
                                    "dest_port": 22,
                                    "dirs": false,
                                    "existing_only": false,
                                    "group": null,
                                    "link_dest": null,
                                    "links": null,
                                    "mode": "pull",
                                    "owner": null,
                                    "partial": false,
                                    "perms": null,
                                    "private_key": null,
                                    "recursive": null,
                                    "rsync_opts": [],
                                    "rsync_path": "sudo -u root rsync",
                                    "rsync_timeout": 0,
                                    "set_remote_user": true,
                                    "src": "zuul@199.204.45.216:/tmp/logs/prometheus",
                                    "ssh_connection_multiplexing": false,
                                    "times": null,
                                    "use_ssh_args": false,
                                    "verify_host": false
                                }
                            },
                            "msg": "cd+++++++++ prometheus/\n>f+++++++++ prometheus/kube-system-kube-dns.txt\n",
                            "rc": 0,
                            "stdout_lines": [
                                "cd+++++++++ prometheus/",
                                ">f+++++++++ prometheus/kube-system-kube-dns.txt"
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-0a3c-9efc-000000000026",
                        "name": "gather-prom-metrics",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-prom-metrics"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:07:32.588819Z",
                            "start": "2026-02-16T23:07:32.107641Z"
                        },
                        "id": "0242ac17-0010-0a3c-9efc-00000000002c",
                        "name": "Downloads logs to executor"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "file",
                            "changed": true,
                            "diff": {
                                "after": {
                                    "path": "/tmp/logs/selenium",
                                    "state": "directory"
                                },
                                "before": {
                                    "path": "/tmp/logs/selenium",
                                    "state": "absent"
                                }
                            },
                            "gid": 0,
                            "group": "root",
                            "invocation": {
                                "module_args": {
                                    "_diff_peek": null,
                                    "_original_basename": null,
                                    "access_time": null,
                                    "access_time_format": "%Y%m%d%H%M.%S",
                                    "attributes": null,
                                    "follow": true,
                                    "force": false,
                                    "group": null,
                                    "mode": null,
                                    "modification_time": null,
                                    "modification_time_format": "%Y%m%d%H%M.%S",
                                    "owner": null,
                                    "path": "/tmp/logs/selenium",
                                    "recurse": false,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": null,
                                    "state": "directory",
                                    "unsafe_writes": false
                                }
                            },
                            "mode": "0755",
                            "owner": "root",
                            "path": "/tmp/logs/selenium",
                            "size": 4096,
                            "state": "directory",
                            "uid": 0
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-0a3c-9efc-00000000002e",
                        "name": "gather-selenium-data",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-selenium-data"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:07:32.804636Z",
                            "start": "2026-02-16T23:07:32.597399Z"
                        },
                        "id": "0242ac17-0010-0a3c-9efc-000000000030",
                        "name": "creating directory for helm release descriptions"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "shell",
                            "changed": true,
                            "cmd": "set -x\ncp /tmp/artifacts/* /tmp/logs/selenium/.",
                            "delta": "0:00:00.006478",
                            "end": "2026-02-16 23:07:33.021356",
                            "failed": true,
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "set -x\ncp /tmp/artifacts/* /tmp/logs/selenium/.",
                                    "_uses_shell": true,
                                    "argv": null,
                                    "chdir": null,
                                    "creates": null,
                                    "executable": "/bin/bash",
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0010-0a3c-9efc-000000000031-1-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "non-zero return code",
                            "rc": 1,
                            "start": "2026-02-16 23:07:33.014878",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "+ cp '/tmp/artifacts/*' /tmp/logs/selenium/.\ncp: cannot stat '/tmp/artifacts/*': No such file or directory",
                            "stdout_lines": [
                                "+ cp '/tmp/artifacts/*' /tmp/logs/selenium/.",
                                "cp: cannot stat '/tmp/artifacts/*': No such file or directory"
                            ],
                            "zuul_log_id": "0242ac17-0010-0a3c-9efc-000000000031-1-instance"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-0a3c-9efc-00000000002e",
                        "name": "gather-selenium-data",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-selenium-data"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:07:33.348478Z",
                            "start": "2026-02-16T23:07:32.829038Z"
                        },
                        "id": "0242ac17-0010-0a3c-9efc-000000000031",
                        "name": "Get selenium data"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "synchronize",
                            "changed": true,
                            "cmd": "/usr/bin/rsync --delay-updates -F --compress --archive --rsh='/usr/bin/ssh -S none -o Port=22 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null' --rsync-path='sudo -u root rsync' --out-format='<<CHANGED>>%i %n%L' zuul@199.204.45.216:/tmp/logs/selenium /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/instance",
                            "invocation": {
                                "module_args": {
                                    "_local_rsync_password": null,
                                    "_local_rsync_path": "rsync",
                                    "_ssh_args": null,
                                    "_substitute_controller": false,
                                    "archive": true,
                                    "checksum": false,
                                    "compress": true,
                                    "copy_links": false,
                                    "delay_updates": true,
                                    "delete": false,
                                    "dest": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/instance",
                                    "dest_port": 22,
                                    "dirs": false,
                                    "existing_only": false,
                                    "group": null,
                                    "link_dest": null,
                                    "links": null,
                                    "mode": "pull",
                                    "owner": null,
                                    "partial": false,
                                    "perms": null,
                                    "private_key": null,
                                    "recursive": null,
                                    "rsync_opts": [],
                                    "rsync_path": "sudo -u root rsync",
                                    "rsync_timeout": 0,
                                    "set_remote_user": true,
                                    "src": "zuul@199.204.45.216:/tmp/logs/selenium",
                                    "ssh_connection_multiplexing": false,
                                    "times": null,
                                    "use_ssh_args": false,
                                    "verify_host": false
                                }
                            },
                            "msg": "cd+++++++++ selenium/\n",
                            "rc": 0,
                            "stdout_lines": [
                                "cd+++++++++ selenium/"
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-0a3c-9efc-00000000002e",
                        "name": "gather-selenium-data",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-selenium-data"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:07:33.857173Z",
                            "start": "2026-02-16T23:07:33.356072Z"
                        },
                        "id": "0242ac17-0010-0a3c-9efc-000000000032",
                        "name": "Downloads logs to executor"
                    }
                }
            ]
        }
    ],
    "stats": {
        "instance": {
            "changed": 23,
            "failures": 0,
            "ignored": 2,
            "ok": 23,
            "rescued": 0,
            "skipped": 0,
            "unreachable": 0
        }
    },
    "trusted": false
},
{
    "branch": "main",
    "index": "1",
    "phase": "post",
    "playbook": "vexxhost.dev/zuul-config/playbooks/base/post.yaml",
    "plays": [
        {
            "play": {
                "duration": {
                    "end": "2026-02-16T23:07:37.721331Z",
                    "start": "2026-02-16T23:07:34.582240Z"
                },
                "id": "0242ac17-0010-4628-8839-000000000002",
                "name": "all"
            },
            "tasks": [
                {
                    "hosts": {
                        "instance": {
                            "action": "set_fact",
                            "changed": false,
                            "false_condition": "groups['all'] | length > 1",
                            "skip_reason": "Conditional result was False",
                            "skipped": true
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-4628-8839-000000000006",
                        "name": "fetch-output",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/fetch-output"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:07:34.639182Z",
                            "start": "2026-02-16T23:07:34.593513Z"
                        },
                        "id": "0242ac17-0010-4628-8839-000000000008",
                        "name": "Set log path for multiple nodes"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "set_fact",
                            "ansible_facts": {
                                "log_path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs"
                            },
                            "changed": false
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-4628-8839-000000000006",
                        "name": "fetch-output",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/fetch-output"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:07:34.692470Z",
                            "start": "2026-02-16T23:07:34.649881Z"
                        },
                        "id": "0242ac17-0010-4628-8839-000000000009",
                        "name": "Set log path for single node"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "file",
                            "changed": true,
                            "msg": "All items completed",
                            "results": [
                                {
                                    "ansible_loop_var": "zj_output_dir",
                                    "changed": false,
                                    "diff": {
                                        "after": {
                                            "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs"
                                        },
                                        "before": {
                                            "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs"
                                        }
                                    },
                                    "failed": false,
                                    "gid": 0,
                                    "group": "root",
                                    "invocation": {
                                        "module_args": {
                                            "_diff_peek": null,
                                            "_original_basename": null,
                                            "access_time": null,
                                            "access_time_format": "%Y%m%d%H%M.%S",
                                            "attributes": null,
                                            "follow": true,
                                            "force": false,
                                            "group": null,
                                            "mode": 493,
                                            "modification_time": null,
                                            "modification_time_format": "%Y%m%d%H%M.%S",
                                            "owner": null,
                                            "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs",
                                            "recurse": false,
                                            "selevel": null,
                                            "serole": null,
                                            "setype": null,
                                            "seuser": null,
                                            "src": null,
                                            "state": "directory",
                                            "unsafe_writes": false
                                        }
                                    },
                                    "mode": "0755",
                                    "owner": "root",
                                    "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs",
                                    "size": 4096,
                                    "state": "directory",
                                    "uid": 0,
                                    "zj_output_dir": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs"
                                },
                                {
                                    "ansible_loop_var": "zj_output_dir",
                                    "changed": true,
                                    "diff": {
                                        "after": {
                                            "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/artifacts",
                                            "state": "directory"
                                        },
                                        "before": {
                                            "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/artifacts",
                                            "state": "absent"
                                        }
                                    },
                                    "failed": false,
                                    "gid": 0,
                                    "group": "root",
                                    "invocation": {
                                        "module_args": {
                                            "_diff_peek": null,
                                            "_original_basename": null,
                                            "access_time": null,
                                            "access_time_format": "%Y%m%d%H%M.%S",
                                            "attributes": null,
                                            "follow": true,
                                            "force": false,
                                            "group": null,
                                            "mode": 493,
                                            "modification_time": null,
                                            "modification_time_format": "%Y%m%d%H%M.%S",
                                            "owner": null,
                                            "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/artifacts",
                                            "recurse": false,
                                            "selevel": null,
                                            "serole": null,
                                            "setype": null,
                                            "seuser": null,
                                            "src": null,
                                            "state": "directory",
                                            "unsafe_writes": false
                                        }
                                    },
                                    "mode": "0755",
                                    "owner": "root",
                                    "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/artifacts",
                                    "size": 4096,
                                    "state": "directory",
                                    "uid": 0,
                                    "zj_output_dir": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/artifacts"
                                },
                                {
                                    "ansible_loop_var": "zj_output_dir",
                                    "changed": true,
                                    "diff": {
                                        "after": {
                                            "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/docs",
                                            "state": "directory"
                                        },
                                        "before": {
                                            "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/docs",
                                            "state": "absent"
                                        }
                                    },
                                    "failed": false,
                                    "gid": 0,
                                    "group": "root",
                                    "invocation": {
                                        "module_args": {
                                            "_diff_peek": null,
                                            "_original_basename": null,
                                            "access_time": null,
                                            "access_time_format": "%Y%m%d%H%M.%S",
                                            "attributes": null,
                                            "follow": true,
                                            "force": false,
                                            "group": null,
                                            "mode": 493,
                                            "modification_time": null,
                                            "modification_time_format": "%Y%m%d%H%M.%S",
                                            "owner": null,
                                            "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/docs",
                                            "recurse": false,
                                            "selevel": null,
                                            "serole": null,
                                            "setype": null,
                                            "seuser": null,
                                            "src": null,
                                            "state": "directory",
                                            "unsafe_writes": false
                                        }
                                    },
                                    "mode": "0755",
                                    "owner": "root",
                                    "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/docs",
                                    "size": 4096,
                                    "state": "directory",
                                    "uid": 0,
                                    "zj_output_dir": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/docs"
                                }
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-4628-8839-000000000006",
                        "name": "fetch-output",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/fetch-output"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:07:35.520390Z",
                            "start": "2026-02-16T23:07:34.698398Z"
                        },
                        "id": "0242ac17-0010-4628-8839-00000000000a",
                        "name": "Ensure local output dirs"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "synchronize",
                            "changed": true,
                            "msg": "All items completed",
                            "results": [
                                {
                                    "ansible_loop_var": "zj_output",
                                    "changed": true,
                                    "cmd": "/usr/bin/rsync --delay-updates -F --compress --archive --no-owner --no-group --rsh='/usr/bin/ssh -S none -o Port=22' --out-format='<<CHANGED>>%i %n%L' zuul@199.204.45.216:/home/zuul/zuul-output/logs/ /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/",
                                    "failed": false,
                                    "invocation": {
                                        "module_args": {
                                            "_local_rsync_password": null,
                                            "_local_rsync_path": "rsync",
                                            "_ssh_args": null,
                                            "_substitute_controller": false,
                                            "archive": true,
                                            "checksum": false,
                                            "compress": true,
                                            "copy_links": false,
                                            "delay_updates": true,
                                            "delete": false,
                                            "dest": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/",
                                            "dest_port": 22,
                                            "dirs": false,
                                            "existing_only": false,
                                            "group": false,
                                            "link_dest": null,
                                            "links": null,
                                            "mode": "pull",
                                            "owner": false,
                                            "partial": false,
                                            "perms": null,
                                            "private_key": null,
                                            "recursive": null,
                                            "rsync_opts": [],
                                            "rsync_path": null,
                                            "rsync_timeout": 0,
                                            "set_remote_user": true,
                                            "src": "zuul@199.204.45.216:/home/zuul/zuul-output/logs/",
                                            "ssh_connection_multiplexing": false,
                                            "times": null,
                                            "use_ssh_args": false,
                                            "verify_host": true
                                        }
                                    },
                                    "msg": ".d..t...... ./\n",
                                    "rc": 0,
                                    "stdout_lines": [
                                        ".d..t...... ./"
                                    ],
                                    "zj_output": {
                                        "dest": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs",
                                        "src": "logs"
                                    }
                                },
                                {
                                    "ansible_loop_var": "zj_output",
                                    "changed": true,
                                    "cmd": "/usr/bin/rsync --delay-updates -F --compress --archive --no-owner --no-group --rsh='/usr/bin/ssh -S none -o Port=22' --out-format='<<CHANGED>>%i %n%L' zuul@199.204.45.216:/home/zuul/zuul-output/artifacts/ /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/artifacts/",
                                    "failed": false,
                                    "invocation": {
                                        "module_args": {
                                            "_local_rsync_password": null,
                                            "_local_rsync_path": "rsync",
                                            "_ssh_args": null,
                                            "_substitute_controller": false,
                                            "archive": true,
                                            "checksum": false,
                                            "compress": true,
                                            "copy_links": false,
                                            "delay_updates": true,
                                            "delete": false,
                                            "dest": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/artifacts/",
                                            "dest_port": 22,
                                            "dirs": false,
                                            "existing_only": false,
                                            "group": false,
                                            "link_dest": null,
                                            "links": null,
                                            "mode": "pull",
                                            "owner": false,
                                            "partial": false,
                                            "perms": null,
                                            "private_key": null,
                                            "recursive": null,
                                            "rsync_opts": [],
                                            "rsync_path": null,
                                            "rsync_timeout": 0,
                                            "set_remote_user": true,
                                            "src": "zuul@199.204.45.216:/home/zuul/zuul-output/artifacts/",
                                            "ssh_connection_multiplexing": false,
                                            "times": null,
                                            "use_ssh_args": false,
                                            "verify_host": true
                                        }
                                    },
                                    "msg": ".d..t...... ./\n",
                                    "rc": 0,
                                    "stdout_lines": [
                                        ".d..t...... ./"
                                    ],
                                    "zj_output": {
                                        "dest": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/artifacts",
                                        "src": "artifacts"
                                    }
                                },
                                {
                                    "ansible_loop_var": "zj_output",
                                    "changed": true,
                                    "cmd": "/usr/bin/rsync --delay-updates -F --compress --archive --no-owner --no-group --rsh='/usr/bin/ssh -S none -o Port=22' --out-format='<<CHANGED>>%i %n%L' zuul@199.204.45.216:/home/zuul/zuul-output/docs/ /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/docs/",
                                    "failed": false,
                                    "invocation": {
                                        "module_args": {
                                            "_local_rsync_password": null,
                                            "_local_rsync_path": "rsync",
                                            "_ssh_args": null,
                                            "_substitute_controller": false,
                                            "archive": true,
                                            "checksum": false,
                                            "compress": true,
                                            "copy_links": false,
                                            "delay_updates": true,
                                            "delete": false,
                                            "dest": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/docs/",
                                            "dest_port": 22,
                                            "dirs": false,
                                            "existing_only": false,
                                            "group": false,
                                            "link_dest": null,
                                            "links": null,
                                            "mode": "pull",
                                            "owner": false,
                                            "partial": false,
                                            "perms": null,
                                            "private_key": null,
                                            "recursive": null,
                                            "rsync_opts": [],
                                            "rsync_path": null,
                                            "rsync_timeout": 0,
                                            "set_remote_user": true,
                                            "src": "zuul@199.204.45.216:/home/zuul/zuul-output/docs/",
                                            "ssh_connection_multiplexing": false,
                                            "times": null,
                                            "use_ssh_args": false,
                                            "verify_host": true
                                        }
                                    },
                                    "msg": ".d..t...... ./\n",
                                    "rc": 0,
                                    "stdout_lines": [
                                        ".d..t...... ./"
                                    ],
                                    "zj_output": {
                                        "dest": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/docs",
                                        "src": "docs"
                                    }
                                }
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-4628-8839-000000000006",
                        "name": "fetch-output",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/fetch-output"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:07:37.029725Z",
                            "start": "2026-02-16T23:07:35.529437Z"
                        },
                        "id": "0242ac17-0010-4628-8839-00000000000c",
                        "name": "Collect logs, artifacts and docs"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "shell",
                            "changed": true,
                            "msg": "All items completed",
                            "results": [
                                {
                                    "ansible_loop_var": "zj_item",
                                    "changed": true,
                                    "cmd": "if [ -n \"$(find /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/artifacts -mindepth 1)\" ] ; then\n  # Only create target directory if it is needed.\n  # Do not fail if it is already there.\n  mkdir -p /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/artifacts\n  # Leave the original directory behind so that other roles\n  # operating on the interface directories can simply no-op.\n  mv -f /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/artifacts/* /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/artifacts\nfi\n",
                                    "delta": "0:00:00.007989",
                                    "end": "2026-02-16 23:07:37.442214",
                                    "failed": false,
                                    "invocation": {
                                        "module_args": {
                                            "_raw_params": "if [ -n \"$(find /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/artifacts -mindepth 1)\" ] ; then\n  # Only create target directory if it is needed.\n  # Do not fail if it is already there.\n  mkdir -p /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/artifacts\n  # Leave the original directory behind so that other roles\n  # operating on the interface directories can simply no-op.\n  mv -f /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/artifacts/* /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/artifacts\nfi\n",
                                            "_uses_shell": true,
                                            "argv": null,
                                            "chdir": null,
                                            "creates": null,
                                            "executable": null,
                                            "expand_argument_vars": true,
                                            "removes": null,
                                            "stdin": null,
                                            "stdin_add_newline": true,
                                            "strip_empty_ends": true,
                                            "zuul_ansible_split_streams": false,
                                            "zuul_log_id": "in-loop-ignore",
                                            "zuul_no_log": false,
                                            "zuul_output_max_bytes": 1073741824
                                        }
                                    },
                                    "msg": "",
                                    "rc": 0,
                                    "start": "2026-02-16 23:07:37.434225",
                                    "stderr": "",
                                    "stderr_lines": [],
                                    "stdout": "",
                                    "stdout_lines": [],
                                    "zj_item": "artifacts",
                                    "zuul_log_id": "in-loop-ignore"
                                },
                                {
                                    "ansible_loop_var": "zj_item",
                                    "changed": true,
                                    "cmd": "if [ -n \"$(find /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/docs -mindepth 1)\" ] ; then\n  # Only create target directory if it is needed.\n  # Do not fail if it is already there.\n  mkdir -p /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/docs\n  # Leave the original directory behind so that other roles\n  # operating on the interface directories can simply no-op.\n  mv -f /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/docs/* /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/docs\nfi\n",
                                    "delta": "0:00:00.008379",
                                    "end": "2026-02-16 23:07:37.670352",
                                    "failed": false,
                                    "invocation": {
                                        "module_args": {
                                            "_raw_params": "if [ -n \"$(find /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/docs -mindepth 1)\" ] ; then\n  # Only create target directory if it is needed.\n  # Do not fail if it is already there.\n  mkdir -p /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/docs\n  # Leave the original directory behind so that other roles\n  # operating on the interface directories can simply no-op.\n  mv -f /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/docs/* /var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/work/logs/docs\nfi\n",
                                            "_uses_shell": true,
                                            "argv": null,
                                            "chdir": null,
                                            "creates": null,
                                            "executable": null,
                                            "expand_argument_vars": true,
                                            "removes": null,
                                            "stdin": null,
                                            "stdin_add_newline": true,
                                            "strip_empty_ends": true,
                                            "zuul_ansible_split_streams": false,
                                            "zuul_log_id": "in-loop-ignore",
                                            "zuul_no_log": false,
                                            "zuul_output_max_bytes": 1073741824
                                        }
                                    },
                                    "msg": "",
                                    "rc": 0,
                                    "start": "2026-02-16 23:07:37.661973",
                                    "stderr": "",
                                    "stderr_lines": [],
                                    "stdout": "",
                                    "stdout_lines": [],
                                    "zj_item": "docs",
                                    "zuul_log_id": "in-loop-ignore"
                                }
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-4628-8839-00000000000f",
                        "name": "merge-output-to-logs",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/merge-output-to-logs"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:07:37.721331Z",
                            "start": "2026-02-16T23:07:37.048156Z"
                        },
                        "id": "0242ac17-0010-4628-8839-000000000011",
                        "name": "Move artifacts and docs to logs dir"
                    }
                }
            ]
        },
        {
            "play": {
                "duration": {
                    "end": "2026-02-16T23:07:38.180651Z",
                    "start": "2026-02-16T23:07:37.732809Z"
                },
                "id": "0242ac17-0010-4628-8839-000000000013",
                "name": "all"
            },
            "tasks": [
                {
                    "hosts": {
                        "instance": {
                            "action": "authorized_key",
                            "changed": true,
                            "comment": null,
                            "exclusive": false,
                            "follow": false,
                            "invocation": {
                                "module_args": {
                                    "changed": true,
                                    "comment": null,
                                    "exclusive": false,
                                    "follow": false,
                                    "key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCTykJAbMtx1dNz/ZyMSQFCrkGfX64qNr5A+sjyGnCBB7FKVfB7o64ewfKHHWrTu7j8fpcYb6DJJCLKfAlSJYUAzpPJp43tT3McrEL0n3D00ym+132moJaCzUBvYIXcT1aawKrL121JBYtfm5++ux4DaX5PHHJ8i2cFwMdOQNgBK4xGkWk/ZQiK70S/fLx97OUPQobV9VLKj6lXablW4KAoK415b6DCsNrzb42vcp8IyU51m4N9C3sF8jDBKX48GU+IV1qZ+woEB8M0JzdbXQJPqKwJ9iQbRO1ORMyqHRlRJeC+HU8yOmbapOB0Lq0r+sw7/X9Ln81zIMVpeIati1lWqlZy45fKkHzimoh0DKgowUTWimUkSBD5eF5CnsId3hBKoIcfXVepX3eSTaYXIZu1brxfLGLwWUQa5Hgnq7aO7kVvrFxPWuDRxgDpeDDdyWsPeQb2TijNwYbfGrpnDR1A1Uw8aMj9hBGgzQ/KPDwnMUJH4i+H8I+Ps5vj70wsAz8= zuul-build-sshkey",
                                    "key_options": null,
                                    "keyfile": "/home/zuul/.ssh/authorized_keys",
                                    "manage_dir": true,
                                    "path": null,
                                    "state": "absent",
                                    "user": "zuul",
                                    "validate_certs": true
                                }
                            },
                            "key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCTykJAbMtx1dNz/ZyMSQFCrkGfX64qNr5A+sjyGnCBB7FKVfB7o64ewfKHHWrTu7j8fpcYb6DJJCLKfAlSJYUAzpPJp43tT3McrEL0n3D00ym+132moJaCzUBvYIXcT1aawKrL121JBYtfm5++ux4DaX5PHHJ8i2cFwMdOQNgBK4xGkWk/ZQiK70S/fLx97OUPQobV9VLKj6lXablW4KAoK415b6DCsNrzb42vcp8IyU51m4N9C3sF8jDBKX48GU+IV1qZ+woEB8M0JzdbXQJPqKwJ9iQbRO1ORMyqHRlRJeC+HU8yOmbapOB0Lq0r+sw7/X9Ln81zIMVpeIati1lWqlZy45fKkHzimoh0DKgowUTWimUkSBD5eF5CnsId3hBKoIcfXVepX3eSTaYXIZu1brxfLGLwWUQa5Hgnq7aO7kVvrFxPWuDRxgDpeDDdyWsPeQb2TijNwYbfGrpnDR1A1Uw8aMj9hBGgzQ/KPDwnMUJH4i+H8I+Ps5vj70wsAz8= zuul-build-sshkey",
                            "key_options": null,
                            "keyfile": "/home/zuul/.ssh/authorized_keys",
                            "manage_dir": true,
                            "path": null,
                            "state": "absent",
                            "user": "zuul",
                            "validate_certs": true
                        }
                    },
                    "role": {
                        "id": "0242ac17-0010-4628-8839-000000000016",
                        "name": "remove-build-sshkey",
                        "path": "/var/lib/zuul/builds/75357a43b83d49128104b6fefcadd954/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/remove-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-02-16T23:07:38.180651Z",
                            "start": "2026-02-16T23:07:37.738712Z"
                        },
                        "id": "0242ac17-0010-4628-8839-000000000018",
                        "name": "Remove the build SSH key from all nodes"
                    }
                }
            ]
        }
    ],
    "stats": {
        "instance": {
            "changed": 4,
            "failures": 0,
            "ignored": 0,
            "ok": 5,
            "rescued": 0,
            "skipped": 1,
            "unreachable": 0
        }
    },
    "trusted": true
}
]
